diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 75a4b5815e..e8f632af23 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,9 +1,11 @@ + + - [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc). -- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO). - [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes. - [ ] You submit test cases (unit or integration tests) that back your changes. - [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only). -- [ ] You provide your full name and an email address registered with your GitHub account. If you’re a first-time submitter, make sure you have completed the [Contributor’s License Agreement form](https://support.springsource.com/spring_committer_signup). \ No newline at end of file diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml new file mode 100644 index 0000000000..a5f764579a --- /dev/null +++ b/.github/workflows/project.yml @@ -0,0 +1,40 @@ +# GitHub Actions to automate GitHub issues for Spring Data Project Management + +name: Spring Data GitHub Issues + +on: + issues: + types: [opened, edited, reopened] + issue_comment: + types: [created] + pull_request_target: + types: [opened, edited, reopened] + +jobs: + Inbox: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null && !contains(join(github.event.issue.labels.*.name, ', '), 'dependency-upgrade') && !contains(github.event.issue.title, 'Release ') + steps: + - name: Create or Update Issue Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Pull-Request: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null + steps: + - name: Create or Update Pull Request Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Feedback-Provided: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback') + steps: + - name: Update Project Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} diff --git a/.gitignore b/.gitignore index 3b34d16a80..27b7a78896 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,9 @@ src/ant/.ant-targets-upload-dist.xml atlassian-ide-plugin.xml /.gradle/ /.idea/ +*.graphml +build/ +node_modules +node +package-lock.json +.mvn/.develocity diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..32599cefea --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,10 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100755 index 0000000000..01e6799737 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100755 index 0000000000..5f3193b363 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +#Thu Nov 07 09:47:19 CET 2024 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index fd3e646ae0..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,37 +0,0 @@ -language: java - -jdk: - - oraclejdk8 - -before_script: - - mongod --version - -env: - matrix: - - PROFILE=ci - - PROFILE=mongo-next - - PROFILE=mongo3 - - PROFILE=mongo3-next - - PROFILE=mongo31 - - PROFILE=mongo32 - - PROFILE=mongo33-next - -# Current MongoDB version is 2.4.2 as of 2016-04, see https://github.com/travis-ci/travis-ci/issues/3694 -# apt-get starts a MongoDB instance so it's not started using before_script -addons: - apt: - sources: - - mongodb-3.2-precise - packages: - - mongodb-org-server - - mongodb-org-shell - -sudo: false - -cache: - directories: - - $HOME/.m2 - -install: true - -script: "mvn clean dependency:list test -P${PROFILE} -Dsort" diff --git a/CI.adoc b/CI.adoc new file mode 100644 index 0000000000..057100a955 --- /dev/null +++ b/CI.adoc @@ -0,0 +1,43 @@ += Continuous Integration + +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] + +== Running CI tasks locally + +Since this pipeline is purely Docker-based, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your test routine before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what the CI server does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, run the tests from inside the container: ++ +3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out) + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to package things up, do this: + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, package things from inside the container doing this: ++ +3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B` + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc deleted file mode 100644 index f64fb1b7a5..0000000000 --- a/CODE_OF_CONDUCT.adoc +++ /dev/null @@ -1,27 +0,0 @@ -= Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io. -All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. -Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident. - -This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/]. \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc index f007591467..740e8bd0bb 100644 --- a/CONTRIBUTING.adoc +++ b/CONTRIBUTING.adoc @@ -1,3 +1,3 @@ = Spring Data contribution guidelines -You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here]. +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..0e83b47e2f --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,216 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + +pipeline { + agent none + + triggers { + pollSCM 'H/10 * * * *' + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) + } + + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '14')) + } + + stages { + stage("Docker images") { + parallel { + stage('Publish JDK (Java 17) + MongoDB 6.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-6.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + stage('Publish JDK (Java 17) + MongoDB 7.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-7.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk17-mongodb-7.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + stage('Publish JDK (Java.next) + MongoDB 8.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-8.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.8.0.version']} ci/openjdk23-mongodb-8.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + } + } + + stage("test: baseline (main)") { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + + stage("Test other configurations") { + when { + beforeAgent(true) + allOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + parallel { + stage("test: MongoDB 7.0 (main)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + + stage("test: MongoDB 8.0") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + } + } + + stage('Release to artifactory') { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 20, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + + "-Dartifactory.username=${ARTIFACTORY_USR} " + + "-Dartifactory.password=${ARTIFACTORY_PSW} " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + + "-Dartifactory.build-name=spring-data-mongodb " + + "-Dartifactory.build-number=spring-data-mongodb-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb " + + "-Dmaven.test.skip=true clean deploy -U -B" + } + } + } + } + } + } + + post { + changed { + script { + emailext( + subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", + mimeType: 'text/html', + recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], + body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") + } + } + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000000..61b956fbfc --- /dev/null +++ b/README.adoc @@ -0,0 +1,231 @@ +image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] + += Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring Data MongoDB"] + +The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. + +The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. +The Spring Data MongoDB project provides integration with the MongoDB document database. +Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer. + +[[code-of-conduct]] +== Code of Conduct + +This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. + +[[getting-started]] +== Getting Started + +Here is a quick teaser of an application using Spring Data Repositories in Java: + +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + List findByLastname(String lastname); + + List findByFirstnameLike(String firstname); +} + +@Service +public class MyService { + + private final PersonRepository repository; + + public MyService(PersonRepository repository) { + this.repository = repository; + } + + public void doWork() { + + repository.deleteAll(); + + Person person = new Person(); + person.setFirstname("Oliver"); + person.setLastname("Gierke"); + repository.save(person); + + List lastNameResults = repository.findByLastname("Gierke"); + List firstNameResults = repository.findByFirstnameLike("Oli*"); + } +} + +@Configuration +@EnableMongoRepositories +class ApplicationConfig extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "springdata"; + } +} +---- + +[[maven-configuration]] +=== Maven configuration + +Add the Maven dependency: + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version} + +---- + +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository +and declare the appropriate dependency version. + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version}-SNAPSHOT + + + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- + +[[upgrading]] +== Upgrading + +Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki]. +Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to. + +[[getting-help]] +== Getting Help + +Having trouble with Spring Data? We’d love to help! + +* Check the +https://docs.spring.io/spring-data/mongodb/reference/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs] +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. +* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`]. +* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues]. + +[[reporting-issues]] +== Reporting Issues + +Spring Data uses Github as issue tracking system to record bugs and feature requests. +If you want to raise an issue, please follow the recommendations below: + +* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem. +* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc. +* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++. + +[[guides]] +== Guides + +The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step: + +* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories. +* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories. + +[[examples]] +== Examples + +* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. + +[[building-from-source]] +== Building from Source + +You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io] +and accessible from Maven using the Maven configuration noted <>. + +NOTE: Configuration for Gradle is similar to Maven. + +The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io]. +Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link] +to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link] +to build a reactive one. + +However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper] +and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]). + +In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download] +and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution]. + +Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to +your MongoDB installation directory (e.g. `MONGODB_HOME`). + +To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] +is required. + +To run the MongoDB server enter the following command from a command-line: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0 +... +"msg":"Successfully connected to host" +---- + +Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_". + +Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set +the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`). + +You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started. +To initialize the replica set, start a mongo client: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongo +MongoDB server version: 6.0.0 +... +---- + +Then enter the following command: + +[source,bash] +---- +mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] }) +---- + +Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`. +In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation): + +[source,bash] +---- +$ ulimit -n 32768 +---- + +You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately. + +Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command: + +[source,bash] +---- + $ ./mvnw clean install +---- + +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above]. + +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign +the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ + +=== Building reference documentation + +Building the documentation builds also the project without running tests. + +[source,bash] +---- + $ ./mvnw clean install -Pantora +---- + +The generated documentation is available from `target/antora/site/index.html`. + +[[license]] +== License + +Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/README.md b/README.md deleted file mode 100644 index 54d625fdf4..0000000000 --- a/README.md +++ /dev/null @@ -1,147 +0,0 @@ -# Spring Data MongoDB - -The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. - -The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer. - -## Getting Help - -For a comprehensive treatment of all the Spring Data MongoDB features, please refer to: - -* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/) -* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well. -* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources. -* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb). - -If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/). - - -## Quick Start - -### Maven configuration - -Add the Maven dependency: - -```xml - - org.springframework.data - spring-data-mongodb - 1.9.1.RELEASE - -``` - -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. - -```xml - - org.springframework.data - spring-data-mongodb - 1.10.0.BUILD-SNAPSHOT - - - - spring-libs-snapshot - Spring Snapshot Repository - http://repo.spring.io/libs-snapshot - -``` - -### MongoTemplate - -MongoTemplate is the central support class for Mongo database operations. It provides: - -* Basic POJO mapping support to and from BSON -* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.) -* Connection affinity callback -* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions). - -### Spring Data repositories - -To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface. - -For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below: - -```java -public interface PersonRepository extends CrudRepository { - - List findByLastname(String lastname); - - List findByFirstnameLike(String firstname); -} -``` - -The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them. - -You can have Spring automatically create a proxy for the interface by using the following JavaConfig: - -```java -@Configuration -@EnableMongoRepositories -class ApplicationConfig extends AbstractMongoConfiguration { - - @Override - public Mongo mongo() throws Exception { - return new MongoClient(); - } - - @Override - protected String getDatabaseName() { - return "springdata"; - } -} -``` - -This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML: - -```xml - - - - - - - - - - - -``` - -This will find the repository interface and register a proxy object in the container. You can use it as shown below: - -```java -@Service -public class MyService { - - private final PersonRepository repository; - - @Autowired - public MyService(PersonRepository repository) { - this.repository = repository; - } - - public void doWork() { - - repository.deleteAll(); - - Person person = new Person(); - person.setFirstname("Oliver"); - person.setLastname("Gierke"); - person = repository.save(person); - - List lastNameResults = repository.findByLastname("Gierke"); - List firstNameResults = repository.findByFirstnameLike("Oli*"); - } -} -``` - -## Contributing to Spring Data - -Here are some ways for you to get involved in the community: - -* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate. -* Create [JIRA](https://jira.springframework.org/browse/DATADOC) tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing. -* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io. - -Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests. diff --git a/SECURITY.adoc b/SECURITY.adoc new file mode 100644 index 0000000000..9c518d999a --- /dev/null +++ b/SECURITY.adoc @@ -0,0 +1,9 @@ +# Security Policy + +## Supported Versions + +Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions. + +## Reporting a Vulnerability + +Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly. diff --git a/ci/README.adoc b/ci/README.adoc new file mode 100644 index 0000000000..f1c11d8496 --- /dev/null +++ b/ci/README.adoc @@ -0,0 +1,39 @@ +== Running CI tasks locally + +Since Concourse is built on top of Docker, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your `test.sh` script before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what Concourse does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +Next, run the `test.sh` script from inside the container: ++ +2. `PROFILE=none spring-data-mongodb-github/ci/test.sh` + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to test the `build.sh` script, do this: + +1. `mkdir /tmp/spring-data-mongodb-artifactory` +2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary +artifactory output directory at `spring-data-mongodb-artifactory`. ++ +Next, run the `build.sh` script from inside the container: ++ +3. `spring-data-mongodb-github/ci/build.sh` + +IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything. +It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts +and deliver them to artifactory. + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/ci/openjdk17-mongodb-6.0/Dockerfile b/ci/openjdk17-mongodb-6.0/Dockerfile new file mode 100644 index 0000000000..fd2580e23a --- /dev/null +++ b/ci/openjdk17-mongodb-6.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 6.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk17-mongodb-7.0/Dockerfile b/ci/openjdk17-mongodb-7.0/Dockerfile new file mode 100644 index 0000000000..5701ab9fbc --- /dev/null +++ b/ci/openjdk17-mongodb-7.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-7.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 7.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk23-mongodb-8.0/Dockerfile b/ci/openjdk23-mongodb-8.0/Dockerfile new file mode 100644 index 0000000000..0cb80001bf --- /dev/null +++ b/ci/openjdk23-mongodb-8.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 8.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-8.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 8.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu noble/mongodb-org/8.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-8.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..9eb163fde7 --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,31 @@ +# Java versions +java.main.tag=17.0.13_11-jdk-focal +java.next.tag=23.0.1_11-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.6.0.version=6.0.10 +docker.mongodb.7.0.version=7.0.2 +docker.mongodb.8.0.version=8.0.0 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/start-replica.sh b/ci/start-replica.sh new file mode 100755 index 0000000000..9124976f39 --- /dev/null +++ b/ci/start-replica.sh @@ -0,0 +1,6 @@ +#!/bin/sh +mkdir -p /tmp/mongodb/db /tmp/mongodb/log +mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log & +sleep 10 +mongosh --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" +sleep 15 diff --git a/etc/formatting.xml b/etc/formatting.xml deleted file mode 100644 index b5515c19e2..0000000000 --- a/etc/formatting.xml +++ /dev/null @@ -1,291 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lombok.config b/lombok.config new file mode 100644 index 0000000000..e50c7ea439 --- /dev/null +++ b/lombok.config @@ -0,0 +1,2 @@ +lombok.nonNull.exceptionType = IllegalArgumentException +lombok.log.fieldName = LOG diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..8b9da3b8b6 --- /dev/null +++ b/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100755 index 0000000000..fef5a8f7f9 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" +FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + echo Found %WRAPPER_JAR% +) else ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" + echo Finished downloading %WRAPPER_JAR% +) +@REM End of extension + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml index 3a9b4f5ad7..9f4b6bc897 100644 --- a/pom.xml +++ b/pom.xml @@ -1,36 +1,34 @@ - + 4.0.0 org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT pom Spring Data MongoDB MongoDB support for Spring Data - http://projects.spring.io/spring-data-mongodb + https://spring.io/projects/spring-data-mongodb org.springframework.data.build spring-data-parent - 1.9.0.BUILD-SNAPSHOT + 3.5.0-SNAPSHOT spring-data-mongodb - spring-data-mongodb-cross-store - spring-data-mongodb-log4j spring-data-mongodb-distribution multi spring-data-mongodb - 1.13.0.BUILD-SNAPSHOT - 2.14.0 - 2.13.0 + 3.5.0-SNAPSHOT + 5.4.0 + 1.19 @@ -39,7 +37,7 @@ Oliver Gierke ogierke at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Project Lead @@ -50,7 +48,7 @@ Thomas Risberg trisberg at vmware.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -61,7 +59,7 @@ Mark Pollack mpollack at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -72,7 +70,7 @@ Jon Brisbin jbrisbin at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -83,7 +81,7 @@ Thomas Darimont tdarimont at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -94,7 +92,18 @@ Christoph Strobl cstrobl at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io + + Developer + + +1 + + + mpaluch + Mark Paluch + mpaluch at pivotal.io + Pivotal + https://www.pivotal.io Developer @@ -102,118 +111,66 @@ - - - - mongo-next - - 2.15.0-SNAPSHOT - - - - - mongo-snapshots - https://oss.sonatype.org/content/repositories/snapshots - - - - - - + + scm:git:https://github.com/spring-projects/spring-data-mongodb.git + scm:git:git@github.com:spring-projects/spring-data-mongodb.git + https://github.com/spring-projects/spring-data-mongodb + - mongo3 - - 3.0.4 - - - + + GitHub + https://github.com/spring-projects/spring-data-mongodb/issues + + - - mongo3-next - - 3.0.5-SNAPSHOT - - + jmh - mongo-snapshots - https://oss.sonatype.org/content/repositories/snapshots + jitpack.io + https://jitpack.io - - - - - - mongo31 - - 3.1.1 - - - - - - - mongo32 - - 3.2.2 - - - - - mongo33-next + mongo-4.x - 3.3.0-SNAPSHOT + 4.11.1 + 1.8.0 - - - - mongo-snapshots - https://oss.sonatype.org/content/repositories/snapshots - - - - - - - release - - - - org.jfrog.buildinfo - artifactory-maven-plugin - false - - - - - - - org.mongodb - mongo-java-driver - ${mongo} - - + + + + + org.mongodb + mongodb-driver-bom + ${mongo} + pom + import + + + + - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + + + + spring-milestone + https://repo.spring.io/milestone - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/aop.xml b/spring-data-mongodb-cross-store/aop.xml deleted file mode 100644 index d11b1549e8..0000000000 --- a/spring-data-mongodb-cross-store/aop.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml deleted file mode 100644 index 6554045e11..0000000000 --- a/spring-data-mongodb-cross-store/pom.xml +++ /dev/null @@ -1,139 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-cross-store - Spring Data MongoDB - Cross-Store Support - - - 2.0.0 - 3.6.10.Final - - - - - - - org.springframework - spring-beans - - - commons-logging - commons-logging - - - - - org.springframework - spring-tx - - - org.springframework - spring-aspects - - - org.springframework - spring-orm - - - - - org.springframework.data - spring-data-mongodb - 1.10.0.BUILD-SNAPSHOT - - - - org.aspectj - aspectjrt - ${aspectj} - - - - - org.eclipse.persistence - javax.persistence - ${jpa} - true - - - - - org.hibernate - hibernate-entitymanager - ${hibernate} - test - - - hsqldb - hsqldb - 1.8.0.10 - test - - - javax.validation - validation-api - 1.0.0.GA - test - - - org.hibernate - hibernate-validator - 4.0.2.GA - test - - - - - - - - org.codehaus.mojo - aspectj-maven-plugin - 1.6 - - - org.aspectj - aspectjrt - ${aspectj} - - - org.aspectj - aspectjtools - ${aspectj} - - - - - - compile - test-compile - - - - - true - - - org.springframework - spring-aspects - - - ${source.level} - ${source.level} - ${source.level} - aop.xml - - - - - - diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java deleted file mode 100644 index e1c0c358d2..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright 2011-2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManagerFactory; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetBacked; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.util.ClassUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.MongoException; - -/** - * @author Thomas Risberg - * @author Oliver Gierke - * @author Alex Vengrovsk - * @author Mark Paluch - */ -public class MongoChangeSetPersister implements ChangeSetPersister { - - private static final String ENTITY_CLASS = "_entity_class"; - private static final String ENTITY_ID = "_entity_id"; - private static final String ENTITY_FIELD_NAME = "_entity_field_name"; - private static final String ENTITY_FIELD_CLASS = "_entity_field_class"; - - private final Logger log = LoggerFactory.getLogger(getClass()); - - private MongoTemplate mongoTemplate; - private EntityManagerFactory entityManagerFactory; - - public void setMongoTemplate(MongoTemplate mongoTemplate) { - this.mongoTemplate = mongoTemplate; - } - - public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { - this.entityManagerFactory = entityManagerFactory; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet) - */ - public void getPersistentState(Class entityClass, Object id, final ChangeSet changeSet) - throws DataAccessException, NotFoundException { - - if (id == null) { - log.debug("Unable to load MongoDB data for null id"); - return; - } - - String collName = getCollectionNameForEntity(entityClass); - - final DBObject dbk = new BasicDBObject(); - dbk.put(ENTITY_ID, id); - dbk.put(ENTITY_CLASS, entityClass.getName()); - if (log.isDebugEnabled()) { - log.debug("Loading MongoDB data for {}", dbk); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - for (DBObject dbo : collection.find(dbk)) { - String key = (String) dbo.get(ENTITY_FIELD_NAME); - if (log.isDebugEnabled()) { - log.debug("Processing key: {}", key); - } - if (!changeSet.getValues().containsKey(key)) { - String className = (String) dbo.get(ENTITY_FIELD_CLASS); - if (className == null) { - throw new DataIntegrityViolationException( - "Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available"); - } - Class clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader()); - Object value = mongoTemplate.getConverter().read(clazz, dbo); - if (log.isDebugEnabled()) { - log.debug("Adding to ChangeSet: {}", key); - } - changeSet.set(key, value); - } - } - return null; - } - }); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (log.isDebugEnabled()) { - log.debug("getPersistentId called on {}", entity); - } - if (entityManagerFactory == null) { - throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null"); - } - - return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (cs == null) { - log.debug("Flush: changeset was null, nothing to flush."); - return 0L; - } - - if (log.isDebugEnabled()) { - log.debug("Flush: changeset: {}", cs.getValues()); - } - - String collName = getCollectionNameForEntity(entity.getClass()); - if (mongoTemplate.getCollection(collName) == null) { - mongoTemplate.createCollection(collName); - } - - for (String key : cs.getValues().keySet()) { - if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) { - Object value = cs.getValues().get(key); - final DBObject dbQuery = new BasicDBObject(); - dbQuery.put(ENTITY_ID, getPersistentId(entity, cs)); - dbQuery.put(ENTITY_CLASS, entity.getClass().getName()); - dbQuery.put(ENTITY_FIELD_NAME, key); - DBObject dbId = mongoTemplate.execute(collName, new CollectionCallback() { - public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException { - return collection.findOne(dbQuery); - } - }); - if (value == null) { - if (log.isDebugEnabled()) { - log.debug("Flush: removing: {}", dbQuery); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - collection.remove(dbQuery); - return null; - } - }); - } else { - final DBObject dbDoc = new BasicDBObject(); - dbDoc.putAll(dbQuery); - if (log.isDebugEnabled()) { - log.debug("Flush: saving: {}", dbQuery); - } - mongoTemplate.getConverter().write(value, dbDoc); - dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName()); - if (dbId != null) { - dbDoc.put("_id", dbId.get("_id")); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - collection.save(dbDoc); - return null; - } - }); - } - } - } - return 0L; - } - - /** - * Returns the collection the given entity type shall be persisted to. - * - * @param entityClass must not be {@literal null}. - * @return - */ - private String getCollectionNameForEntity(Class entityClass) { - return mongoTemplate.getCollectionName(entityClass); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj deleted file mode 100644 index 1a0a101c63..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import java.lang.reflect.Field; - -import javax.persistence.EntityManager; -import javax.persistence.Transient; -import javax.persistence.Entity; - -import org.aspectj.lang.JoinPoint; -import org.aspectj.lang.reflect.FieldSignature; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.crossstore.RelatedDocument; -import org.springframework.data.mongodb.crossstore.DocumentBacked; -import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException; -import org.springframework.data.crossstore.HashMapChangeSet; -import org.springframework.transaction.support.TransactionSynchronizationManager; - -/** - * Aspect to turn an object annotated with @Document into a persistent document using Mongo. - * - * @author Thomas Risberg - */ -public aspect MongoDocumentBacking { - - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class); - - // Aspect shared config - private ChangeSetPersister changeSetPersister; - - public void setChangeSetPersister(ChangeSetPersister changeSetPersister) { - this.changeSetPersister = changeSetPersister; - } - - // ITD to introduce N state to Annotated objects - declare parents : (@Entity *) implements DocumentBacked; - - // The annotated fields that will be persisted in MongoDB rather than with JPA - declare @field: @RelatedDocument * (@Entity+ *).*:@Transient; - - // ------------------------------------------------------------------------- - // Advise user-defined constructors of ChangeSetBacked objects to create a new - // backing ChangeSet - // ------------------------------------------------------------------------- - pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) : - execution((DocumentBacked+).new(..)) && - !execution((DocumentBacked+).new(ChangeSet)) && - this(entity); - - pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) : - execution((DocumentBacked+).new(ChangeSet)) && - this(entity) && - args(cs); - - protected pointcut entityFieldGet(DocumentBacked entity) : - get(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - !get(* DocumentBacked.*); - - protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) : - set(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - args(newVal) && - !set(* DocumentBacked.*); - - // intercept EntityManager.merge calls - public pointcut entityManagerMerge(EntityManager em, Object entity) : - call(* EntityManager.merge(Object)) && - target(em) && - args(entity); - - // intercept EntityManager.remove calls - // public pointcut entityManagerRemove(EntityManager em, Object entity) : - // call(* EntityManager.remove(Object)) && - // target(em) && - // args(entity); - - // move changeSet from detached entity to the newly merged persistent object - Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) { - Object mergedEntity = proceed(em, entity); - if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) { - ((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet(); - } - return mergedEntity; - } - - // clear changeSet from removed entity - // Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) { - // if (entity instanceof DocumentBacked) { - // removeChangeSetValues((DocumentBacked)entity); - // } - // return proceed(em, entity); - // } - - private static void removeChangeSetValues(DocumentBacked entity) { - LOGGER.debug("Removing all change-set values for " + entity); - ChangeSet nulledCs = new HashMapChangeSet(); - DocumentBacked documentEntity = (DocumentBacked) entity; - @SuppressWarnings("unchecked") - ChangeSetPersister changeSetPersister = (ChangeSetPersister) documentEntity.itdChangeSetPersister; - try { - changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(), - documentEntity.getChangeSet()); - } catch (DataAccessException e) { - } catch (NotFoundException e) { - } - for (String key : entity.getChangeSet().getValues().keySet()) { - nulledCs.set(key, null); - } - entity.setChangeSet(nulledCs); - } - - before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) { - LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass()); - // Populate all ITD fields - entity.setChangeSet(new HashMapChangeSet()); - entity.itdChangeSetPersister = changeSetPersister; - entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity); - // registerTransactionSynchronization(entity); - } - - private static void registerTransactionSynchronization(DocumentBacked entity) { - if (TransactionSynchronizationManager.isSynchronizationActive()) { - if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Adding transaction synchronization for " + entity); - } - TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization); - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization already active for " + entity); - } - } - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization is not active for " + entity); - } - } - } - - // ------------------------------------------------------------------------- - // ChangeSet-related mixins - // ------------------------------------------------------------------------- - // Introduced field - @Transient - private ChangeSet DocumentBacked.changeSet; - - @Transient - private ChangeSetPersister DocumentBacked.itdChangeSetPersister; - - @Transient - private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization; - - public void DocumentBacked.setChangeSet(ChangeSet cs) { - this.changeSet = cs; - } - - public ChangeSet DocumentBacked.getChangeSet() { - return changeSet; - } - - // Flush the entity state to the persistent store - public void DocumentBacked.flush() { - Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet); - itdChangeSetPersister.persistState(this, this.changeSet); - } - - public Object DocumentBacked.get_persistent_id() { - return itdChangeSetPersister.getPersistentId(this, this.changeSet); - } - - // lifecycle methods - @javax.persistence.PostPersist - public void DocumentBacked.itdPostPersist() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName()); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PreUpdate - public void DocumentBacked.itdPreUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostUpdate - public void DocumentBacked.itdPostUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostRemove - public void DocumentBacked.itdPostRemove() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - removeChangeSetValues(this); - } - - @javax.persistence.PostLoad - public void DocumentBacked.itdPostLoad() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - /** - * delegates field reads to the state accessors instance - */ - Object around(DocumentBacked entity): entityFieldGet(entity) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet()); - if (entity.getChangeSet().getValues().get(propName) == null) { - try { - this.changeSetPersister - .getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet()); - } catch (NotFoundException e) { - } - } - Object fValue = entity.getChangeSet().getValues().get(propName); - if (fValue != null) { - return fValue; - } - return proceed(entity); - } - - /** - * delegates field writes to the state accessors instance - */ - Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]"); - entity.getChangeSet().set(propName, newVal); - return proceed(entity, newVal); - } - - Field field(JoinPoint joinPoint) { - FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature(); - return fieldSignature.getField(); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java deleted file mode 100644 index 7209091339..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Infrastructure for Spring Data's MongoDB cross store support. - */ -package org.springframework.data.mongodb.crossstore; - diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java deleted file mode 100644 index 4f2451a15b..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.crossstore.test.Address; -import org.springframework.data.mongodb.crossstore.test.Person; -import org.springframework.data.mongodb.crossstore.test.Resume; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; - -import com.mongodb.DBObject; - -/** - * Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}). - * - * @author Thomas Risberg - * @author Oliver Gierke - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml") -public class CrossStoreMongoTests { - - @Autowired - MongoTemplate mongoTemplate; - - @PersistenceContext - EntityManager entityManager; - - @Autowired - PlatformTransactionManager transactionManager; - TransactionTemplate txTemplate; - - @Before - public void setUp() { - - txTemplate = new TransactionTemplate(transactionManager); - - clearData(Person.class); - - Address address = new Address(12, "MAin St.", "Boston", "MA", "02101"); - - Resume resume = new Resume(); - resume.addEducation("Skanstulls High School, 1975"); - resume.addEducation("Univ. of Stockholm, 1980"); - resume.addJob("DiMark, DBA, 1990-2000"); - resume.addJob("VMware, Developer, 2007-"); - - final Person person = new Person("Thomas", 20); - person.setAddress(address); - person.setResume(resume); - person.setId(1L); - - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.persist(person); - return null; - } - }); - } - - @After - public void tearDown() { - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.remove(entityManager.find(Person.class, 1L)); - return null; - } - }); - } - - private void clearData(Class domainType) { - - String collectionName = mongoTemplate.getCollectionName(domainType); - mongoTemplate.dropCollection(collectionName); - } - - @Test - @Transactional - public void testReadJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs()); - } - - @Test - @Transactional - public void testUpdatedJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - found.setAge(44); - found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006"); - - entityManager.merge(found); - - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; " - + "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs()); - } - - @Test - public void testMergeJpaEntityWithMongoDocument() { - - final Person detached = entityManager.find(Person.class, 1L); - entityManager.detach(detached); - detached.getResume().addJob("TargetRx, Developer, 2000-2005"); - - Person merged = txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person result = entityManager.merge(detached); - entityManager.flush(); - return result; - } - }); - - Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - final Person updated = entityManager.find(Person.class, 1L); - Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - } - - @Test - public void testRemoveJpaEntityWithMongoDocument() { - - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person p2 = new Person("Thomas", 20); - Resume r2 = new Resume(); - r2.addEducation("Skanstulls High School, 1975"); - r2.addJob("DiMark, DBA, 1990-2000"); - p2.setResume(r2); - p2.setId(2L); - entityManager.persist(p2); - Person p3 = new Person("Thomas", 20); - Resume r3 = new Resume(); - r3.addEducation("Univ. of Stockholm, 1980"); - r3.addJob("VMware, Developer, 2007-"); - p3.setResume(r3); - p3.setId(3L); - entityManager.persist(p3); - return null; - } - }); - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - final Person found2 = entityManager.find(Person.class, 2L); - entityManager.remove(found2); - return null; - } - }); - - boolean weFound3 = false; - - for (DBObject dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) { - Assert.assertTrue(!dbo.get("_entity_id").equals(2L)); - if (dbo.get("_entity_id").equals(3L)) { - weFound3 = true; - } - } - Assert.assertTrue(weFound3); - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java deleted file mode 100644 index 072239d21d..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -public class Address { - - private Integer streetNumber; - private String streetName; - private String city; - private String state; - private String zip; - - public Address(Integer streetNumber, String streetName, String city, String state, String zip) { - super(); - this.streetNumber = streetNumber; - this.streetName = streetName; - this.city = city; - this.state = state; - this.zip = zip; - } - - public Integer getStreetNumber() { - return streetNumber; - } - - public void setStreetNumber(Integer streetNumber) { - this.streetNumber = streetNumber; - } - - public String getStreetName() { - return streetName; - } - - public void setStreetName(String streetName) { - this.streetName = streetName; - } - - public String getCity() { - return city; - } - - public void setCity(String city) { - this.city = city; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public String getZip() { - return zip; - } - - public void setZip(String zip) { - this.zip = zip; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java deleted file mode 100644 index a05ca31407..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import javax.persistence.Entity; -import javax.persistence.Id; - -import org.springframework.data.mongodb.crossstore.RelatedDocument; - -@Entity -public class Person { - - @Id - Long id; - - private String name; - - private int age; - - private java.util.Date birthDate; - - @RelatedDocument - private Address address; - - @RelatedDocument - private Resume resume; - - public Person() { - } - - public Person(String name, int age) { - this.name = name; - this.age = age; - this.birthDate = new java.util.Date(); - } - - public void birthday() { - ++age; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public int getAge() { - return age; - } - - public void setAge(int age) { - this.age = age; - } - - public java.util.Date getBirthDate() { - return birthDate; - } - - public void setBirthDate(java.util.Date birthDate) { - this.birthDate = birthDate; - } - - public Resume getResume() { - return resume; - } - - public void setResume(Resume resume) { - this.resume = resume; - } - - public Address getAddress() { - return address; - } - - public void setAddress(Address address) { - this.address = address; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java deleted file mode 100644 index eb714bf3b5..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.bson.types.ObjectId; -import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; - -@Document -public class Resume { - - private static final Log LOGGER = LogFactory.getLog(Resume.class); - - @Id - private ObjectId id; - - private String education = ""; - - private String jobs = ""; - - public String getId() { - return id.toString(); - } - - public String getEducation() { - return education; - } - - public void addEducation(String education) { - LOGGER.debug("Adding education " + education); - this.education = this.education + (this.education.length() > 0 ? "; " : "") + education; - } - - public String getJobs() { - return jobs; - } - - public void addJob(String job) { - LOGGER.debug("Adding job " + job); - this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job; - } - - @Override - public String toString() { - return "Resume [education=" + education + ", jobs=" + jobs + "]"; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml deleted file mode 100644 index 878fff47ba..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - org.hibernate.ejb.HibernatePersistence - org.springframework.data.mongodb.crossstore.test.Person - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml deleted file mode 100644 index 9468296658..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/logback.xml b/spring-data-mongodb-cross-store/src/test/resources/logback.xml deleted file mode 100644 index 5ecc71909e..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/logback.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/template.mf b/spring-data-mongodb-cross-store/template.mf deleted file mode 100644 index 8561ad900d..0000000000 --- a/spring-data-mongodb-cross-store/template.mf +++ /dev/null @@ -1,18 +0,0 @@ -Bundle-SymbolicName: org.springframework.data.mongodb.crossstore -Bundle-Name: Spring Data MongoDB Cross Store Support -Bundle-Vendor: Pivotal Software, Inc. -Bundle-ManifestVersion: 2 -Import-Package: - sun.reflect;version="0";resolution:=optional -Export-Template: - org.springframework.data.mongodb.crossstore.*;version="${project.version}" -Import-Template: - com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}", - javax.persistence.*;version="${jpa:[=.=.=,+1.0.0)}", - org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}", - org.bson.*;version="0", - org.slf4j.*;version="${slf4j:[=.=.=,+1.0.0)}", - org.springframework.*;version="${spring:[=.=.=.=,+1.0.0)}", - org.springframework.data.*;version="${springdata.commons:[=.=.=.=,+1.0.0)}", - org.springframework.data.mongodb.*;version="${project.version:[=.=.=.=,+1.0.0)}", - org.w3c.dom.*;version="0" diff --git a/spring-data-mongodb-distribution/package.json b/spring-data-mongodb-distribution/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/spring-data-mongodb-distribution/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 2d02722262..58c63dfc97 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -13,30 +15,62 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml ${basedir}/.. - SDMONGO + ${project.basedir}/../src/main/antora/antora-playbook.yml + + + ${project.basedir}/../src/main/antora/resources/antora-resources + true + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.4.0 + + + timestamp-property + + timestamp-property + + validate + + current.year + yyyy + + + + org.apache.maven.plugins - maven-assembly-plugin + maven-resources-plugin + + + + resources + + + - org.codehaus.mojo - wagon-maven-plugin + org.apache.maven.plugins + maven-assembly-plugin - org.asciidoctor - asciidoctor-maven-plugin + org.antora + antora-maven-plugin + diff --git a/spring-data-mongodb-log4j/README.md b/spring-data-mongodb-log4j/README.md deleted file mode 100644 index ed4c96d3ad..0000000000 --- a/spring-data-mongodb-log4j/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# MongoDB Log4J Appender - -This module sets up a Log4J appender that puts logging events in MongoDB. It is fully configurable -and connects directly to the MongoDB server using the driver. It has no dependency on any Spring package. - -To use it, configure a host, port, (optionally) applicationId, and database property in your Log4J configuration: - - log4j.appender.stdout=org.springframework.data.mongodb.log4j.MongoLog4jAppender - log4j.appender.stdout.layout=org.apache.log4j.PatternLayout - log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - <%m>%n - log4j.appender.stdout.host = localhost - log4j.appender.stdout.port = 27017 - log4j.appender.stdout.database = logs - log4j.appender.stdout.collectionPattern = %c - log4j.appender.stdout.applicationId = my.application - log4j.appender.stdout.warnOrHigherWriteConcern = FSYNC_SAFE - -It will even support properties in your MDC (so long as they're Strings or support .toString()). - -The collection name is configurable as well. If you don't specify anything, it will use the Category name. -If you want to specify a collection name, you can give it a Log4J pattern layout format string which will have -the following additional MDC variables in the context when the collection name is rendered: - - "year" = Calendar.YEAR - "month" = Calendar.MONTH + 1 - "day" = Calendar.DAY_OF_MONTH - "hour" = Calendar.HOUR_OF_DAY - "applicationId" = configured applicationId - -An example log entry might look like: - - { - "_id" : ObjectId("4d89341a8ef397e06940d5cd"), - "applicationId" : "my.application", - "name" : "org.springframework.data.mongodb.log4j.MongoLog4jAppenderIntegrationTests", - "level" : "DEBUG", - "timestamp" : ISODate("2011-03-23T16:53:46.778Z"), - "properties" : { - "property" : "one" - }, - "message" : "DEBUG message" - } - -To set WriteConcern levels for WARN or higher messages, set warnOrHigherWriteConcern to one of the following: - -* FSYNC_SAFE -* NONE -* NORMAL -* REPLICAS_SAFE -* SAFE - -[http://api.mongodb.org/java/2.5-pre-/com/mongodb/WriteConcern.html#field_detail](http://api.mongodb.org/java/2.5-pre-/com/mongodb/WriteConcern.html#field_detail) \ No newline at end of file diff --git a/spring-data-mongodb-log4j/pom.xml b/spring-data-mongodb-log4j/pom.xml deleted file mode 100644 index ee5e3336db..0000000000 --- a/spring-data-mongodb-log4j/pom.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-log4j - Spring Data MongoDB - Log4J Appender - - - 1.2.16 - - - - - - - log4j - log4j - ${log4j} - - - - - diff --git a/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/MongoLog4jAppender.java b/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/MongoLog4jAppender.java deleted file mode 100644 index 10883f5835..0000000000 --- a/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/MongoLog4jAppender.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.log4j; - -import java.net.UnknownHostException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Map; - -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Level; -import org.apache.log4j.MDC; -import org.apache.log4j.PatternLayout; -import org.apache.log4j.spi.LoggingEvent; - -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.WriteConcern; - -/** - * Log4j appender writing log entries into a MongoDB instance. - * - * @author Jon Brisbin - * @author Oliver Gierke - */ -public class MongoLog4jAppender extends AppenderSkeleton { - - public static final String LEVEL = "level"; - public static final String NAME = "name"; - public static final String APP_ID = "applicationId"; - public static final String TIMESTAMP = "timestamp"; - public static final String PROPERTIES = "properties"; - public static final String TRACEBACK = "traceback"; - public static final String MESSAGE = "message"; - public static final String YEAR = "year"; - public static final String MONTH = "month"; - public static final String DAY = "day"; - public static final String HOUR = "hour"; - - protected String host = "localhost"; - protected int port = 27017; - protected String database = "logs"; - protected String collectionPattern = "%c"; - protected PatternLayout collectionLayout = new PatternLayout(collectionPattern); - protected String applicationId = System.getProperty("APPLICATION_ID", null); - protected WriteConcern warnOrHigherWriteConcern = WriteConcern.SAFE; - protected WriteConcern infoOrLowerWriteConcern = WriteConcern.NORMAL; - protected Mongo mongo; - protected DB db; - - public MongoLog4jAppender() { - } - - public MongoLog4jAppender(boolean isActive) { - super(isActive); - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public String getDatabase() { - return database; - } - - public void setDatabase(String database) { - this.database = database; - } - - public String getCollectionPattern() { - return collectionPattern; - } - - public void setCollectionPattern(String collectionPattern) { - this.collectionPattern = collectionPattern; - this.collectionLayout = new PatternLayout(collectionPattern); - } - - public String getApplicationId() { - return applicationId; - } - - public void setApplicationId(String applicationId) { - this.applicationId = applicationId; - } - - public void setWarnOrHigherWriteConcern(String wc) { - this.warnOrHigherWriteConcern = WriteConcern.valueOf(wc); - } - - public String getWarnOrHigherWriteConcern() { - return warnOrHigherWriteConcern.toString(); - } - - public String getInfoOrLowerWriteConcern() { - return infoOrLowerWriteConcern.toString(); - } - - public void setInfoOrLowerWriteConcern(String wc) { - this.infoOrLowerWriteConcern = WriteConcern.valueOf(wc); - } - - protected void connectToMongo() throws UnknownHostException { - this.mongo = new Mongo(host, port); - this.db = mongo.getDB(database); - } - - /* - * (non-Javadoc) - * @see org.apache.log4j.AppenderSkeleton#append(org.apache.log4j.spi.LoggingEvent) - */ - @Override - @SuppressWarnings({ "unchecked" }) - protected void append(final LoggingEvent event) { - if (null == db) { - try { - connectToMongo(); - } catch (UnknownHostException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - BasicDBObject dbo = new BasicDBObject(); - if (null != applicationId) { - dbo.put(APP_ID, applicationId); - MDC.put(APP_ID, applicationId); - } - dbo.put(NAME, event.getLogger().getName()); - dbo.put(LEVEL, event.getLevel().toString()); - Calendar tstamp = Calendar.getInstance(); - tstamp.setTimeInMillis(event.getTimeStamp()); - dbo.put(TIMESTAMP, tstamp.getTime()); - - // Copy properties into document - Map props = event.getProperties(); - if (null != props && !props.isEmpty()) { - BasicDBObject propsDbo = new BasicDBObject(); - for (Map.Entry entry : props.entrySet()) { - propsDbo.put(entry.getKey().toString(), entry.getValue().toString()); - } - dbo.put(PROPERTIES, propsDbo); - } - - // Copy traceback info (if there is any) into the document - String[] traceback = event.getThrowableStrRep(); - if (null != traceback && traceback.length > 0) { - BasicDBList tbDbo = new BasicDBList(); - tbDbo.addAll(Arrays.asList(traceback)); - dbo.put(TRACEBACK, tbDbo); - } - - // Put the rendered message into the document - dbo.put(MESSAGE, event.getRenderedMessage()); - - // Insert the document - Calendar now = Calendar.getInstance(); - MDC.put(YEAR, now.get(Calendar.YEAR)); - MDC.put(MONTH, String.format("%1$02d", now.get(Calendar.MONTH) + 1)); - MDC.put(DAY, String.format("%1$02d", now.get(Calendar.DAY_OF_MONTH))); - MDC.put(HOUR, String.format("%1$02d", now.get(Calendar.HOUR_OF_DAY))); - - String coll = collectionLayout.format(event); - - MDC.remove(YEAR); - MDC.remove(MONTH); - MDC.remove(DAY); - MDC.remove(HOUR); - if (null != applicationId) { - MDC.remove(APP_ID); - } - - WriteConcern wc; - if (event.getLevel().isGreaterOrEqual(Level.WARN)) { - wc = warnOrHigherWriteConcern; - } else { - wc = infoOrLowerWriteConcern; - } - db.getCollection(coll).insert(dbo, wc); - } - - /* - * (non-Javadoc) - * @see org.apache.log4j.AppenderSkeleton#close() - */ - public void close() { - - if (mongo != null) { - mongo.close(); - } - } - - /* - * (non-Javadoc) - * @see org.apache.log4j.AppenderSkeleton#requiresLayout() - */ - public boolean requiresLayout() { - return true; - } -} diff --git a/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/package-info.java b/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/package-info.java deleted file mode 100644 index 06a59f17f3..0000000000 --- a/spring-data-mongodb-log4j/src/main/java/org/springframework/data/mongodb/log4j/package-info.java +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Infrastructure for to use MongoDB as a logging sink. - */ -package org.springframework.data.mongodb.log4j; - diff --git a/spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderIntegrationTests.java b/spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderIntegrationTests.java deleted file mode 100644 index e8109d093b..0000000000 --- a/spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderIntegrationTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.log4j; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; - -import java.util.Calendar; - -import org.apache.log4j.Logger; -import org.apache.log4j.MDC; -import org.junit.Before; -import org.junit.Test; - -import com.mongodb.DB; -import com.mongodb.DBCursor; -import com.mongodb.Mongo; - -/** - * Integration tests for {@link MongoLog4jAppender}. - * - * @author Jon Brisbin - * @author Oliver Gierke - */ -public class MongoLog4jAppenderIntegrationTests { - - static final String NAME = MongoLog4jAppenderIntegrationTests.class.getName(); - - private static final Logger log = Logger.getLogger(NAME); - Mongo mongo; - DB db; - String collection; - - @Before - public void setUp() throws Exception { - - mongo = new Mongo("localhost", 27017); - db = mongo.getDB("logs"); - - Calendar now = Calendar.getInstance(); - collection = String.valueOf(now.get(Calendar.YEAR)) + String.format("%1$02d", now.get(Calendar.MONTH) + 1); - db.getCollection(collection).drop(); - } - - @Test - public void testLogging() { - - log.debug("DEBUG message"); - log.info("INFO message"); - log.warn("WARN message"); - log.error("ERROR message"); - - DBCursor msgs = db.getCollection(collection).find(); - - assertThat(msgs.count(), is(4)); - } - - @Test - public void testProperties() { - - MDC.put("property", "one"); - log.debug("DEBUG message"); - } -} diff --git a/spring-data-mongodb-log4j/src/test/resources/log4j.properties b/spring-data-mongodb-log4j/src/test/resources/log4j.properties deleted file mode 100644 index 88459b3ffa..0000000000 --- a/spring-data-mongodb-log4j/src/test/resources/log4j.properties +++ /dev/null @@ -1,13 +0,0 @@ -log4j.rootCategory=INFO, stdout - -log4j.appender.stdout=org.springframework.data.mongodb.log4j.MongoLog4jAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - <%m>%n -log4j.appender.stdout.host = localhost -log4j.appender.stdout.port = 27017 -log4j.appender.stdout.database = logs -log4j.appender.stdout.collectionPattern = %X{year}%X{month} -log4j.appender.stdout.applicationId = my.application -log4j.appender.stdout.warnOrHigherWriteConcern = FSYNC_SAFE - -log4j.category.org.springframework.data.mongodb=DEBUG diff --git a/spring-data-mongodb-log4j/template.mf b/spring-data-mongodb-log4j/template.mf deleted file mode 100644 index e7d182b113..0000000000 --- a/spring-data-mongodb-log4j/template.mf +++ /dev/null @@ -1,9 +0,0 @@ -Bundle-SymbolicName: org.springframework.data.mongodb.log4j -Bundle-Name: Spring Data Mongo DB Log4J Appender -Bundle-Vendor: Pivotal Software, Inc. -Bundle-ManifestVersion: 2 -Import-Package: - sun.reflect;version="0";resolution:=optional -Import-Template: - com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}", - org.apache.log4j.*;version="${log4j:[=.=.=,+1.0.0)}" diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index edfa519fad..b842a2def3 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -11,18 +13,44 @@ org.springframework.data spring-data-mongodb-parent - 1.10.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml - 1.0.0.GA 1.3 - 1.5 + 1.7.8 + spring.data.mongodb + ${basedir}/.. + 1.01 + + + org.mongodb + mongodb-driver-core + + + + org.mongodb + mongodb-driver-sync + true + + + + org.mongodb + mongodb-driver-reactivestreams + true + + + + org.mongodb + mongodb-crypt + true + + org.springframework @@ -63,6 +91,12 @@ querydsl-mongodb ${querydsl} true + + + org.mongodb + mongo-java-driver + + @@ -79,40 +113,74 @@ true - - javax.enterprise - cdi-api - ${cdi} - provided + com.google.code.findbugs + jsr305 + 3.0.2 true - javax.el - el-api - ${cdi} + io.projectreactor + reactor-core + true + + + + io.projectreactor + reactor-test + true + + + + org.awaitility + awaitility + ${awaitility} test - org.apache.openwebbeans.test - cditest-owb - ${webbeans} + io.reactivex.rxjava3 + rxjava + ${rxjava3} + true + + + + + + javax.interceptor + javax.interceptor-api + 1.2.1 + test + + + + jakarta.enterprise + jakarta.enterprise.cdi-api + ${cdi} + provided + true + + + + jakarta.annotation + jakarta.annotation-api + ${jakarta-annotation-api} test - javax.servlet - servlet-api - 2.5 + org.apache.openwebbeans + openwebbeans-se + ${webbeans} test - javax.validation - validation-api + jakarta.validation + jakarta.validation-api ${validation} true @@ -125,38 +193,44 @@ - org.hibernate - hibernate-validator - 4.2.0.Final - test + io.micrometer + micrometer-observation + true - joda-time - joda-time - ${jodatime} + io.micrometer + micrometer-tracing + true + + + + org.hibernate.validator + hibernate-validator + 7.0.1.Final test - org.threeten - threetenbp - ${threetenbp} + jakarta.el + jakarta.el-api + 4.0.0 + provided true - com.fasterxml.jackson.core - jackson-databind - ${jackson} + org.glassfish + jakarta.el + 4.0.2 + provided true - org.slf4j - jul-to-slf4j - ${slf4j} - test + com.fasterxml.jackson.core + jackson-databind + true @@ -172,11 +246,137 @@ test + + de.schauderhaft.degraph + degraph-check + 0.1.4 + test + + + + edu.umd.cs.mtc + multithreadedtc + ${multithreadedtc} + test + + + + org.junit-pioneer + junit-pioneer + 0.5.3 + test + + + + org.junit.platform + junit-platform-launcher + test + + + + org.testcontainers + junit-jupiter + ${testcontainers} + test + + + + org.testcontainers + mongodb + ${testcontainers} + test + + + + jakarta.transaction + jakarta.transaction-api + 2.0.0 + test + + + + + org.jetbrains.kotlin + kotlin-stdlib + true + + + + org.jetbrains.kotlin + kotlin-reflect + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-core + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-reactor + true + + + + io.mockk + mockk-jvm + ${mockk} + test + + + + io.micrometer + micrometer-test + test + + + com.github.tomakehurst + wiremock-jre8-standalone + + + + + io.micrometer + micrometer-tracing-test + test + + + + io.micrometer + micrometer-tracing-integration-test + test + + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh} + + + + + com.mysema.maven apt-maven-plugin @@ -195,8 +395,11 @@ test-process - target/generated-test-sources - org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + target/generated-test-sources + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + @@ -205,26 +408,28 @@ org.apache.maven.plugins maven-surefire-plugin - 2.12 + false false **/*Tests.java **/PerformanceTests.java + **/ReactivePerformanceTests.java - src/test/resources/logging.properties + ${mongo} + ${env.MONGO_VERSION} + + src/test/resources/logging.properties + + true - - - listener - org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener - - + + diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java new file mode 100644 index 0000000000..3b0c72cc0b --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java @@ -0,0 +1,181 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.TearDown; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +public class ProjectionsBenchmark extends AbstractMicrobenchmark { + + private static final String DB_NAME = "projections-benchmark"; + private static final String COLLECTION_NAME = "projections"; + + private MongoTemplate template; + private MongoClient client; + private MongoCollection mongoCollection; + + private Person source; + + private FindWithQuery asPerson; + private FindWithQuery asDtoProjection; + private FindWithQuery asClosedProjection; + private FindWithQuery asOpenProjection; + + private TerminatingFind asPersonWithFieldsRestriction; + private Document fields = new Document("firstname", 1); + + @Setup + public void setUp() { + + client = MongoClients.create(); + template = new MongoTemplate(client, DB_NAME); + + source = new Person(); + source.firstname = "luke"; + source.lastname = "skywalker"; + + source.address = new Address(); + source.address.street = "melenium falcon 1"; + source.address.city = "deathstar"; + + template.save(source, COLLECTION_NAME); + + asPerson = template.query(Person.class).inCollection(COLLECTION_NAME); + asDtoProjection = template.query(Person.class).inCollection(COLLECTION_NAME).as(DtoProjection.class); + asClosedProjection = template.query(Person.class).inCollection(COLLECTION_NAME).as(ClosedProjection.class); + asOpenProjection = template.query(Person.class).inCollection(COLLECTION_NAME).as(OpenProjection.class); + + asPersonWithFieldsRestriction = template.query(Person.class).inCollection(COLLECTION_NAME) + .matching(new BasicQuery(new Document(), fields)); + + mongoCollection = client.getDatabase(DB_NAME).getCollection(COLLECTION_NAME); + } + + @TearDown + public void tearDown() { + + client.getDatabase(DB_NAME).drop(); + client.close(); + } + + /** + * Set the baseline for comparison by using the plain MongoDB java driver api without any additional fluff. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object baseline() { + return mongoCollection.find().first(); + } + + /** + * Read into the domain type including all fields. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object readIntoDomainType() { + return asPerson.all(); + } + + /** + * Read into the domain type but restrict query to only return one field. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object readIntoDomainTypeRestrictingToOneField() { + return asPersonWithFieldsRestriction.all(); + } + + /** + * Read into dto projection that only needs to map one field back. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object readIntoDtoProjectionWithOneField() { + return asDtoProjection.all(); + } + + /** + * Read into closed interface projection. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object readIntoClosedProjectionWithOneField() { + return asClosedProjection.all(); + } + + /** + * Read into an open projection backed by the mapped domain object. + * + * @return + */ + @Benchmark // DATAMONGO-1733 + public Object readIntoOpenProjection() { + return asOpenProjection.all(); + } + + static class Person { + + @Id String id; + String firstname; + String lastname; + Address address; + } + + static class Address { + + String city; + String street; + } + + static class DtoProjection { + + @Field("firstname") String name; + } + + static interface ClosedProjection { + + String getFirstname(); + } + + static interface OpenProjection { + + @Value("#{target.firstname}") + String name(); + } + +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java new file mode 100644 index 0000000000..53f64f2a50 --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java @@ -0,0 +1,148 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.types.ObjectId; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * @author Christoph Strobl + */ +@State(Scope.Benchmark) +public class DbRefMappingBenchmark extends AbstractMicrobenchmark { + + private static final String DB_NAME = "dbref-loading-benchmark"; + + private MongoClient client; + private MongoTemplate template; + + private Query queryObjectWithDBRef; + private Query queryObjectWithDBRefList; + + @Setup + public void setUp() throws Exception { + + client = MongoClients.create(); + template = new MongoTemplate(client, DB_NAME); + + List refObjects = new ArrayList<>(); + for (int i = 0; i < 1; i++) { + RefObject o = new RefObject(); + template.save(o); + refObjects.add(o); + } + + ObjectWithDBRef singleDBRef = new ObjectWithDBRef(); + singleDBRef.ref = refObjects.iterator().next(); + template.save(singleDBRef); + + ObjectWithDBRef multipleDBRefs = new ObjectWithDBRef(); + multipleDBRefs.refList = refObjects; + template.save(multipleDBRefs); + + queryObjectWithDBRef = query(where("id").is(singleDBRef.id)); + queryObjectWithDBRefList = query(where("id").is(multipleDBRefs.id)); + } + + @TearDown + public void tearDown() { + + client.getDatabase(DB_NAME).drop(); + client.close(); + } + + @Benchmark // DATAMONGO-1720 + public ObjectWithDBRef readSingleDbRef() { + return template.findOne(queryObjectWithDBRef, ObjectWithDBRef.class); + } + + @Benchmark // DATAMONGO-1720 + public ObjectWithDBRef readMultipleDbRefs() { + return template.findOne(queryObjectWithDBRefList, ObjectWithDBRef.class); + } + + static class ObjectWithDBRef { + + private @Id ObjectId id; + private @DBRef RefObject ref; + private @DBRef List refList; + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public RefObject getRef() { + return ref; + } + + public void setRef(RefObject ref) { + this.ref = ref; + } + + public List getRefList() { + return refList; + } + + public void setRefList(List refList) { + this.refList = refList; + } + } + + static class RefObject { + + private @Id String id; + private String someValue; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSomeValue() { + return someValue; + } + + public void setSomeValue(String someValue) { + this.someValue = someValue; + } + } +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java new file mode 100644 index 0000000000..00d2e7034a --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java @@ -0,0 +1,248 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.platform.commons.annotation.Testable; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; + +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * @author Christoph Strobl + */ +@State(Scope.Benchmark) +@Testable +public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { + + private static final String DB_NAME = "mapping-mongo-converter-benchmark"; + + private MongoClient client; + private MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private Document documentWith2Properties, documentWith2PropertiesAnd1Nested; + private Customer objectWith2PropertiesAnd1Nested; + + private Document documentWithFlatAndComplexPropertiesPlusListAndMap; + private SlightlyMoreComplexObject objectWithFlatAndComplexPropertiesPlusListAndMap; + + @Setup + public void setUp() throws Exception { + + client = MongoClients.create(); + + this.mappingContext = new MongoMappingContext(); + this.mappingContext.setInitialEntitySet(Collections.singleton(Customer.class)); + this.mappingContext.afterPropertiesSet(); + + DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoClientDatabaseFactory(client, DB_NAME)); + + this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); + this.converter.setCustomConversions(new MongoCustomConversions(Collections.emptyList())); + this.converter.afterPropertiesSet(); + + // just a flat document + this.documentWith2Properties = new Document("firstname", "Dave").append("lastname", "Matthews"); + + // document with a nested one + Document address = new Document("zipCode", "ABCDE").append("city", "Some Place"); + this.documentWith2PropertiesAnd1Nested = new Document("firstname", "Dave").// + append("lastname", "Matthews").// + append("address", address); + + // object equivalent of documentWith2PropertiesAnd1Nested + this.objectWith2PropertiesAnd1Nested = new Customer("Dave", "Matthews", new Address("zipCode", "City")); + + // a bit more challenging object with list & map conversion. + objectWithFlatAndComplexPropertiesPlusListAndMap = new SlightlyMoreComplexObject(); + objectWithFlatAndComplexPropertiesPlusListAndMap.id = UUID.randomUUID().toString(); + objectWithFlatAndComplexPropertiesPlusListAndMap.addressList = Arrays.asList(new Address("zip-1", "city-1"), + new Address("zip-2", "city-2")); + objectWithFlatAndComplexPropertiesPlusListAndMap.customer = objectWith2PropertiesAnd1Nested; + objectWithFlatAndComplexPropertiesPlusListAndMap.customerMap = new LinkedHashMap<>(); + objectWithFlatAndComplexPropertiesPlusListAndMap.customerMap.put("dave", objectWith2PropertiesAnd1Nested); + objectWithFlatAndComplexPropertiesPlusListAndMap.customerMap.put("deborah", + new Customer("Deborah Anne", "Dyer", new Address("?", "london"))); + objectWithFlatAndComplexPropertiesPlusListAndMap.customerMap.put("eddie", + new Customer("Eddie", "Vedder", new Address("??", "Seattle"))); + objectWithFlatAndComplexPropertiesPlusListAndMap.intOne = Integer.MIN_VALUE; + objectWithFlatAndComplexPropertiesPlusListAndMap.intTwo = Integer.MAX_VALUE; + objectWithFlatAndComplexPropertiesPlusListAndMap.location = new Point(-33.865143, 151.209900); + objectWithFlatAndComplexPropertiesPlusListAndMap.renamedField = "supercalifragilisticexpialidocious"; + objectWithFlatAndComplexPropertiesPlusListAndMap.stringOne = "¯\\_(ツ)_/¯"; + objectWithFlatAndComplexPropertiesPlusListAndMap.stringTwo = " (╯°□°)╯︵ ┻━┻"; + + // JSON equivalent of objectWithFlatAndComplexPropertiesPlusListAndMap + documentWithFlatAndComplexPropertiesPlusListAndMap = Document.parse( + "{ \"_id\" : \"517f6aee-e9e0-44f0-88ed-f3694a019f27\", \"intOne\" : -2147483648, \"intTwo\" : 2147483647, \"stringOne\" : \"¯\\\\_(ツ)_/¯\", \"stringTwo\" : \" (╯°□°)╯︵ ┻━┻\", \"explicit-field-name\" : \"supercalifragilisticexpialidocious\", \"location\" : { \"x\" : -33.865143, \"y\" : 151.2099 }, \"objectWith2PropertiesAnd1Nested\" : { \"firstname\" : \"Dave\", \"lastname\" : \"Matthews\", \"address\" : { \"zipCode\" : \"zipCode\", \"city\" : \"City\" } }, \"addressList\" : [{ \"zipCode\" : \"zip-1\", \"city\" : \"city-1\" }, { \"zipCode\" : \"zip-2\", \"city\" : \"city-2\" }], \"customerMap\" : { \"dave\" : { \"firstname\" : \"Dave\", \"lastname\" : \"Matthews\", \"address\" : { \"zipCode\" : \"zipCode\", \"city\" : \"City\" } }, \"deborah\" : { \"firstname\" : \"Deborah Anne\", \"lastname\" : \"Dyer\", \"address\" : { \"zipCode\" : \"?\", \"city\" : \"london\" } }, \"eddie\" : { \"firstname\" : \"Eddie\", \"lastname\" : \"Vedder\", \"address\" : { \"zipCode\" : \"??\", \"city\" : \"Seattle\" } } }, \"_class\" : \"org.springframework.data.mongodb.core.convert.MappingMongoConverterBenchmark$SlightlyMoreComplexObject\" }"); + + } + + @TearDown + public void tearDown() { + + client.getDatabase(DB_NAME).drop(); + client.close(); + } + + @Benchmark // DATAMONGO-1720 + public Customer readObjectWith2Properties() { + return converter.read(Customer.class, documentWith2Properties); + } + + @Benchmark // DATAMONGO-1720 + public Customer readObjectWith2PropertiesAnd1NestedObject() { + return converter.read(Customer.class, documentWith2PropertiesAnd1Nested); + } + + @Benchmark // DATAMONGO-1720 + public Document writeObjectWith2PropertiesAnd1NestedObject() { + + Document sink = new Document(); + converter.write(objectWith2PropertiesAnd1Nested, sink); + return sink; + } + + @Benchmark // DATAMONGO-1720 + public Object readObjectWithListAndMapsOfComplexType() { + return converter.read(SlightlyMoreComplexObject.class, documentWithFlatAndComplexPropertiesPlusListAndMap); + } + + @Benchmark // DATAMONGO-1720 + public Object writeObjectWithListAndMapsOfComplexType() { + + Document sink = new Document(); + converter.write(objectWithFlatAndComplexPropertiesPlusListAndMap, sink); + return sink; + } + + static class Customer { + + private @Id ObjectId id; + private final String firstname, lastname; + private final Address address; + + public Customer(String firstname, String lastname, Address address) { + this.firstname = firstname; + this.lastname = lastname; + this.address = address; + } + } + + static class Address { + private String zipCode, city; + + public Address(String zipCode, String city) { + this.zipCode = zipCode; + this.city = city; + } + + public String getZipCode() { + return zipCode; + } + + public String getCity() { + return city; + } + } + + static class SlightlyMoreComplexObject { + + @Id String id; + int intOne, intTwo; + String stringOne, stringTwo; + @Field("explicit-field-name") String renamedField; + Point location; + Customer customer; + List
addressList; + Map customerMap; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SlightlyMoreComplexObject)) { + return false; + } + SlightlyMoreComplexObject that = (SlightlyMoreComplexObject) o; + if (intOne != that.intOne) { + return false; + } + if (intTwo != that.intTwo) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringOne, that.stringOne)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringTwo, that.stringTwo)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(renamedField, that.renamedField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(location, that.location)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(customer, that.customer)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(addressList, that.addressList)) { + return false; + } + return ObjectUtils.nullSafeEquals(customerMap, that.customerMap); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + intOne; + result = 31 * result + intTwo; + result = 31 * result + ObjectUtils.nullSafeHashCode(stringOne); + result = 31 * result + ObjectUtils.nullSafeHashCode(stringTwo); + result = 31 * result + ObjectUtils.nullSafeHashCode(renamedField); + result = 31 * result + ObjectUtils.nullSafeHashCode(location); + result = 31 * result + ObjectUtils.nullSafeHashCode(customer); + result = 31 * result + ObjectUtils.nullSafeHashCode(addressList); + result = 31 * result + ObjectUtils.nullSafeHashCode(customerMap); + return result; + } + } +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java new file mode 100644 index 0000000000..615500904d --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java @@ -0,0 +1,327 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.microbenchmark; + +import java.io.File; +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Collection; +import java.util.Date; + +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.results.RunResult; +import org.openjdk.jmh.results.format.ResultFormatType; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.TimeValue; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ResourceUtils; +import org.springframework.util.StringUtils; + +/** + * @author Christoph Strobl + */ +@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS, time = 2) +@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS, time = 2) +@Fork(AbstractMicrobenchmark.FORKS) +@State(Scope.Thread) +public class AbstractMicrobenchmark { + + static final int WARMUP_ITERATIONS = 5; + static final int MEASUREMENT_ITERATIONS = 10; + static final int FORKS = 1; + static final String[] JVM_ARGS = { "-server", "-XX:+HeapDumpOnOutOfMemoryError", "-Xms1024m", "-Xmx1024m", + "-XX:MaxDirectMemorySize=1024m" }; + + private final StandardEnvironment environment = new StandardEnvironment(); + + /** + * Run matching {@link org.openjdk.jmh.annotations.Benchmark} methods with options collected from + * {@link org.springframework.core.env.Environment}. + * + * @throws Exception + * @see #options(String) + */ + public void run() throws Exception { + + String includes = includes(); + + if (!includes.contains(org.springframework.util.ClassUtils.getShortName(getClass()))) { + return; + } + + publishResults(new Runner(options(includes).build()).run()); + } + + /** + * Get the regex for all benchmarks to be included in the run. By default every benchmark within classes matching the + * current ones short name.
+ * The {@literal benchmark} command line argument allows overriding the defaults using {@code #} as class / method + * name separator. + * + * @return never {@literal null}. + * @see org.springframework.util.ClassUtils#getShortName(Class) + */ + protected String includes() { + + String tests = environment.getProperty("benchmark", String.class); + + if (!StringUtils.hasText(tests)) { + return ".*" + org.springframework.util.ClassUtils.getShortName(getClass()) + ".*"; + } + + if (!tests.contains("#")) { + return ".*" + tests + ".*"; + } + + String[] args = tests.split("#"); + return ".*" + args[0] + "." + args[1]; + } + + /** + * Collect all options for the {@link Runner}. + * + * @param includes regex for matching benchmarks to be included in the run. + * @return never {@literal null}. + * @throws Exception + */ + protected ChainedOptionsBuilder options(String includes) throws Exception { + + ChainedOptionsBuilder optionsBuilder = new OptionsBuilder().include(includes).jvmArgs(jvmArgs()); + + optionsBuilder = warmup(optionsBuilder); + optionsBuilder = measure(optionsBuilder); + optionsBuilder = forks(optionsBuilder); + optionsBuilder = report(optionsBuilder); + + return optionsBuilder; + } + + /** + * JVM args to apply to {@link Runner} via its {@link org.openjdk.jmh.runner.options.Options}. + * + * @return {@link #JVM_ARGS} by default. + */ + protected String[] jvmArgs() { + + String[] args = new String[JVM_ARGS.length]; + System.arraycopy(JVM_ARGS, 0, args, 0, JVM_ARGS.length); + return args; + } + + /** + * Read {@code warmupIterations} property from {@link org.springframework.core.env.Environment}. + * + * @return -1 if not set. + */ + protected int getWarmupIterations() { + return environment.getProperty("warmupIterations", Integer.class, -1); + } + + /** + * Read {@code measurementIterations} property from {@link org.springframework.core.env.Environment}. + * + * @return -1 if not set. + */ + protected int getMeasurementIterations() { + return environment.getProperty("measurementIterations", Integer.class, -1); + + } + + /** + * Read {@code forks} property from {@link org.springframework.core.env.Environment}. + * + * @return -1 if not set. + */ + protected int getForksCount() { + return environment.getProperty("forks", Integer.class, -1); + } + + /** + * Read {@code benchmarkReportDir} property from {@link org.springframework.core.env.Environment}. + * + * @return {@literal null} if not set. + */ + protected String getReportDirectory() { + return environment.getProperty("benchmarkReportDir"); + } + + /** + * Read {@code measurementTime} property from {@link org.springframework.core.env.Environment}. + * + * @return -1 if not set. + */ + protected long getMeasurementTime() { + return environment.getProperty("measurementTime", Long.class, -1L); + } + + /** + * Read {@code warmupTime} property from {@link org.springframework.core.env.Environment}. + * + * @return -1 if not set. + */ + protected long getWarmupTime() { + return environment.getProperty("warmupTime", Long.class, -1L); + } + + /** + * {@code project.version_yyyy-MM-dd_ClassName.json} eg. + * {@literal 1.11.0.BUILD-SNAPSHOT_2017-03-07_MappingMongoConverterBenchmark.json} + * + * @return + */ + protected String reportFilename() { + + StringBuilder sb = new StringBuilder(); + + if (environment.containsProperty("project.version")) { + + sb.append(environment.getProperty("project.version")); + sb.append("_"); + } + + sb.append(new SimpleDateFormat("yyyy-MM-dd").format(new Date())); + sb.append("_"); + sb.append(org.springframework.util.ClassUtils.getShortName(getClass())); + sb.append(".json"); + return sb.toString(); + } + + /** + * Apply measurement options to {@link ChainedOptionsBuilder}. + * + * @param optionsBuilder must not be {@literal null}. + * @return {@link ChainedOptionsBuilder} with options applied. + * @see #getMeasurementIterations() + * @see #getMeasurementTime() + */ + private ChainedOptionsBuilder measure(ChainedOptionsBuilder optionsBuilder) { + + int measurementIterations = getMeasurementIterations(); + long measurementTime = getMeasurementTime(); + + if (measurementIterations > 0) { + optionsBuilder = optionsBuilder.measurementIterations(measurementIterations); + } + + if (measurementTime > 0) { + optionsBuilder = optionsBuilder.measurementTime(TimeValue.seconds(measurementTime)); + } + + return optionsBuilder; + } + + /** + * Apply warmup options to {@link ChainedOptionsBuilder}. + * + * @param optionsBuilder must not be {@literal null}. + * @return {@link ChainedOptionsBuilder} with options applied. + * @see #getWarmupIterations() + * @see #getWarmupTime() + */ + private ChainedOptionsBuilder warmup(ChainedOptionsBuilder optionsBuilder) { + + int warmupIterations = getWarmupIterations(); + long warmupTime = getWarmupTime(); + + if (warmupIterations > 0) { + optionsBuilder = optionsBuilder.warmupIterations(warmupIterations); + } + + if (warmupTime > 0) { + optionsBuilder = optionsBuilder.warmupTime(TimeValue.seconds(warmupTime)); + } + + return optionsBuilder; + } + + /** + * Apply forks option to {@link ChainedOptionsBuilder}. + * + * @param optionsBuilder must not be {@literal null}. + * @return {@link ChainedOptionsBuilder} with options applied. + * @see #getForksCount() + */ + private ChainedOptionsBuilder forks(ChainedOptionsBuilder optionsBuilder) { + + int forks = getForksCount(); + + if (forks <= 0) { + return optionsBuilder; + } + + return optionsBuilder.forks(forks); + } + + /** + * Apply report option to {@link ChainedOptionsBuilder}. + * + * @param optionsBuilder must not be {@literal null}. + * @return {@link ChainedOptionsBuilder} with options applied. + * @throws IOException if report file cannot be created. + * @see #getReportDirectory() + */ + private ChainedOptionsBuilder report(ChainedOptionsBuilder optionsBuilder) throws IOException { + + String reportDir = getReportDirectory(); + + if (!StringUtils.hasText(reportDir)) { + return optionsBuilder; + } + + String reportFilePath = reportDir + (reportDir.endsWith(File.separator) ? "" : File.separator) + reportFilename(); + File file = ResourceUtils.getFile(reportFilePath); + + if (file.exists()) { + file.delete(); + } else { + + file.getParentFile().mkdirs(); + file.createNewFile(); + } + + optionsBuilder.resultFormat(ResultFormatType.JSON); + optionsBuilder.result(reportFilePath); + + return optionsBuilder; + } + + /** + * Publish results to an external system. + * + * @param results must not be {@literal null}. + */ + private void publishResults(Collection results) { + + if (CollectionUtils.isEmpty(results) || !environment.containsProperty("publishTo")) { + return; + } + + String uri = environment.getProperty("publishTo"); + try { + ResultsWriter.forUri(uri).write(results); + } catch (Exception e) { + System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e)); + } + } +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java new file mode 100644 index 0000000000..af56908755 --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java @@ -0,0 +1,88 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.microbenchmark; + + +import java.io.IOException; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.net.URLConnection; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Collection; + +import org.openjdk.jmh.results.RunResult; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.util.CollectionUtils; + +/** + * {@link ResultsWriter} implementation of {@link URLConnection}. + * + * @since 2.0 + */ +class HttpResultsWriter implements ResultsWriter { + + private final String url; + + HttpResultsWriter(String url) { + this.url = url; + } + + @Override + public void write(Collection results) { + + if (CollectionUtils.isEmpty(results)) { + return; + } + + try { + doWrite(results); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void doWrite(Collection results) throws IOException { + StandardEnvironment env = new StandardEnvironment(); + + String projectVersion = env.getProperty("project.version", "unknown"); + String gitBranch = env.getProperty("git.branch", "unknown"); + String gitDirty = env.getProperty("git.dirty", "no"); + String gitCommitId = env.getProperty("git.commit.id", "unknown"); + + HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection(); + connection.setConnectTimeout((int) Duration.ofSeconds(1).toMillis()); + connection.setReadTimeout((int) Duration.ofSeconds(1).toMillis()); + connection.setDoOutput(true); + connection.setRequestMethod("POST"); + + connection.setRequestProperty("Content-Type", "application/json"); + connection.addRequestProperty("X-Project-Version", projectVersion); + connection.addRequestProperty("X-Git-Branch", gitBranch); + connection.addRequestProperty("X-Git-Dirty", gitDirty); + connection.addRequestProperty("X-Git-Commit-Id", gitCommitId); + + try (OutputStream output = connection.getOutputStream()) { + output.write(ResultsWriter.jsonifyResults(results).getBytes(StandardCharsets.UTF_8)); + } + + if (connection.getResponseCode() >= 400) { + throw new IllegalStateException( + String.format("Status %d %s", connection.getResponseCode(), connection.getResponseMessage())); + } + } +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java new file mode 100644 index 0000000000..2114d2a06a --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java @@ -0,0 +1,133 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.microbenchmark; + +import java.util.Collection; +import java.util.Date; +import java.util.List; + +import org.bson.Document; +import org.openjdk.jmh.results.RunResult; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBObject; +import com.mongodb.ConnectionString; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoDatabase; + +/** + * MongoDB specific {@link ResultsWriter} implementation. + * + * @author Christoph Strobl + * @since 2.0 + */ +class MongoResultsWriter implements ResultsWriter { + + private final String uri; + + MongoResultsWriter(String uri) { + this.uri = uri; + } + + @Override + public void write(Collection results) { + + Date now = new Date(); + StandardEnvironment env = new StandardEnvironment(); + + String projectVersion = env.getProperty("project.version", "unknown"); + String gitBranch = env.getProperty("git.branch", "unknown"); + String gitDirty = env.getProperty("git.dirty", "no"); + String gitCommitId = env.getProperty("git.commit.id", "unknown"); + + ConnectionString connectionString = new ConnectionString(this.uri); + MongoClient client = MongoClients.create(this.uri); + + String dbName = StringUtils.hasText(connectionString.getDatabase()) ? connectionString.getDatabase() + : "spring-data-mongodb-benchmarks"; + MongoDatabase db = client.getDatabase(dbName); + + for (Document dbo : (List) Document.parse(ResultsWriter.jsonifyResults(results))) { + + String collectionName = extractClass(dbo.get("benchmark").toString()); + + Document sink = new Document(); + sink.append("_version", projectVersion); + sink.append("_branch", gitBranch); + sink.append("_commit", gitCommitId); + sink.append("_dirty", gitDirty); + sink.append("_method", extractBenchmarkName(dbo.get("benchmark").toString())); + sink.append("_date", now); + sink.append("_snapshot", projectVersion.toLowerCase().contains("snapshot")); + + sink.putAll(dbo); + + db.getCollection(collectionName).insertOne(fixDocumentKeys(sink)); + } + + client.close(); + } + + /** + * Replace {@code .} by {@code ,}. + * + * @param doc + * @return + */ + private Document fixDocumentKeys(Document doc) { + + Document sanitized = new Document(); + + for (Object key : doc.keySet()) { + + Object value = doc.get(key); + if (value instanceof Document) { + value = fixDocumentKeys((Document) value); + } else if (value instanceof BasicDBObject) { + value = fixDocumentKeys(new Document((BasicDBObject) value)); + } + + if (key instanceof String) { + + String newKey = (String) key; + if (newKey.contains(".")) { + newKey = newKey.replace('.', ','); + } + + sanitized.put(newKey, value); + } else { + sanitized.put(ObjectUtils.nullSafeToString(key).replace('.', ','), value); + } + } + + return sanitized; + } + + private static String extractClass(String source) { + + String tmp = source.substring(0, source.lastIndexOf('.')); + return tmp.substring(tmp.lastIndexOf(".") + 1); + } + + private static String extractBenchmarkName(String source) { + return source.substring(source.lastIndexOf(".") + 1); + } + +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java new file mode 100644 index 0000000000..95da1750bc --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java @@ -0,0 +1,64 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.microbenchmark; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.nio.charset.StandardCharsets; +import java.util.Collection; + +import org.openjdk.jmh.results.RunResult; +import org.openjdk.jmh.results.format.ResultFormatFactory; +import org.openjdk.jmh.results.format.ResultFormatType; + +/** + * @author Christoph Strobl + * @since 2.0 + */ +interface ResultsWriter { + + /** + * Write the {@link RunResult}s. + * + * @param results can be {@literal null}. + */ + void write(Collection results); + + /** + * Get the uri specific {@link ResultsWriter}. + * + * @param uri must not be {@literal null}. + * @return + */ + static ResultsWriter forUri(String uri) { + return uri.startsWith("mongodb:") ? new MongoResultsWriter(uri) : new HttpResultsWriter(uri); + } + + /** + * Convert {@link RunResult}s to JMH Json representation. + * + * @param results + * @return json string representation of results. + */ + static String jsonifyResults(Collection results) { + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, StandardCharsets.UTF_8)) + .writeOut(results); + + return new String(baos.toByteArray(), StandardCharsets.UTF_8); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java new file mode 100644 index 0000000000..1f6875c080 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -0,0 +1,149 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json}) + * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter + * binding of placeholders like {@code ?0} is delayed upon first call on the target {@link Document} via + * {@link #toDocument()}. + *
+ * + *
+ * $toUpper : $name                -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '$name' }        -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '?0' }, "$name"  -> { '$toUpper' : '$name' }
+ * 
+ * + * Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry} + * containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}. + * + * @author Christoph Strobl + * @author Giacomo Baso + * @since 3.2 + */ +public class BindableMongoExpression implements MongoExpression { + + private final String expressionString; + + private final @Nullable CodecRegistryProvider codecRegistryProvider; + + private final @Nullable Object[] args; + + private final Lazy target; + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param args can be {@literal null}. + */ + public BindableMongoExpression(String expression, @Nullable Object[] args) { + this(expression, null, args); + } + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param codecRegistryProvider can be {@literal null}. + * @param args can be {@literal null}. + */ + public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider, + @Nullable Object[] args) { + + Assert.notNull(expression, "Expression must not be null"); + + this.expressionString = expression; + this.codecRegistryProvider = codecRegistryProvider; + this.args = args; + this.target = Lazy.of(this::parse); + } + + /** + * Provide the {@link CodecRegistry} used to convert expressions. + * + * @param codecRegistry must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) { + return new BindableMongoExpression(expressionString, () -> codecRegistry, args); + } + + /** + * Provide the arguments to bind to the placeholders via their index. + * + * @param args must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + public BindableMongoExpression bind(Object... args) { + return new BindableMongoExpression(expressionString, codecRegistryProvider, args); + } + + @Override + public Document toDocument() { + return target.get(); + } + + @Override + public String toString() { + return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args=" + + Arrays.toString(args) + '}'; + } + + private Document parse() { + + String expression = wrapJsonIfNecessary(expressionString); + + if (ObjectUtils.isEmpty(args)) { + + if (codecRegistryProvider == null) { + return Document.parse(expression); + } + + return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class) + .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); + } + + ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec() + : new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry()); + return codec.decode(expression, args); + } + + private static String wrapJsonIfNecessary(String json) { + + if(!StringUtils.hasText(json)) { + return json; + } + + String raw = json.trim(); + return (raw.startsWith("{") && raw.endsWith("}")) ? raw : "{%s}".formatted(raw); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java index c0243db316..b36382a58e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,13 +19,13 @@ import org.springframework.dao.DataAccessException; -import com.mongodb.BulkWriteError; -import com.mongodb.BulkWriteException; -import com.mongodb.BulkWriteResult; +import com.mongodb.MongoBulkWriteException; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.BulkWriteResult; /** * Is thrown when errors occur during bulk operations. - * + * * @author Tobias Trelle * @author Oliver Gierke * @since 1.9 @@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException { private final BulkWriteResult result; /** - * Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}. - * + * Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}. + * * @param message must not be {@literal null}. * @param source must not be {@literal null}. */ - public BulkOperationException(String message, BulkWriteException source) { + public BulkOperationException(String message, MongoBulkWriteException source) { super(message, source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java deleted file mode 100644 index 6d21b8cf7b..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2010-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.data.authentication.UserCredentials; - -/** - * Exception being thrown in case we cannot connect to a MongoDB instance. - * - * @author Oliver Gierke - */ -public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException { - - private final UserCredentials credentials; - private final String database; - - private static final long serialVersionUID = 1172099106475265589L; - - public CannotGetMongoDbConnectionException(String msg, Throwable cause) { - super(msg, cause); - this.database = null; - this.credentials = UserCredentials.NO_CREDENTIALS; - } - - public CannotGetMongoDbConnectionException(String msg) { - this(msg, null, UserCredentials.NO_CREDENTIALS); - } - - public CannotGetMongoDbConnectionException(String msg, String database, UserCredentials credentials) { - super(msg); - this.database = database; - this.credentials = credentials; - } - - /** - * Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance. - * - * @return - */ - public UserCredentials getCredentials() { - return this.credentials; - } - - /** - * Returns the name of the database trying to be accessed. - * - * @return - */ - public String getDatabase() { - return database; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java new file mode 100644 index 0000000000..53acf65470 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.NonTransientDataAccessException; +import org.springframework.lang.Nullable; + +/** + * {@link NonTransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ClientSessionException extends NonTransientDataAccessException { + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Must not be {@literal null}. + */ + public ClientSessionException(String msg) { + super(msg); + } + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Can be {@literal null}. + * @param cause the root cause. Can be {@literal null}. + */ + public ClientSessionException(@Nullable String msg, @Nullable Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java new file mode 100644 index 0000000000..53515f9fcd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java @@ -0,0 +1,74 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Optional; + +import org.bson.codecs.Codec; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.util.Assert; + +/** + * Provider interface to obtain {@link CodecRegistry} from the underlying MongoDB Java driver. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +@FunctionalInterface +public interface CodecRegistryProvider { + + /** + * Get the underlying {@link CodecRegistry} used by the MongoDB Java driver. + * + * @return never {@literal null}. + * @throws IllegalStateException if {@link CodecRegistry} cannot be obtained. + */ + CodecRegistry getCodecRegistry(); + + /** + * Checks if a {@link Codec} is registered for a given type. + * + * @param type must not be {@literal null}. + * @return true if {@link #getCodecRegistry()} holds a {@link Codec} for given type. + * @throws IllegalStateException if {@link CodecRegistry} cannot be obtained. + */ + default boolean hasCodecFor(Class type) { + return getCodecFor(type).isPresent(); + } + + /** + * Get the {@link Codec} registered for the given {@literal type} or an {@link Optional#empty() empty Optional} + * instead. + * + * @param type must not be {@literal null}. + * @param + * @return never {@literal null}. + * @throws IllegalArgumentException if {@literal type} is {@literal null}. + */ + default Optional> getCodecFor(Class type) { + + Assert.notNull(type, "Type must not be null"); + + try { + return Optional.of(getCodecRegistry().get(type)); + } catch (CodecConfigurationException e) { + // ignore + } + return Optional.empty(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..c07e2dbe4a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java @@ -0,0 +1,59 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Default implementation of {@link MongoTransactionOptions} using {@literal mongo:} as {@link #getLabelPrefix() label + * prefix} creating {@link SimpleMongoTransactionOptions} out of a given argument {@link Map}. Uses + * {@link SimpleMongoTransactionOptions#KNOWN_KEYS} to validate entries in arguments to resolve and errors on unknown + * entries. + * + * @author Christoph Strobl + * @since 4.3 + */ +enum DefaultMongoTransactionOptionsResolver implements MongoTransactionOptionsResolver { + + INSTANCE; + + private static final String PREFIX = "mongo:"; + + @Override + public MongoTransactionOptions convert(Map options) { + + validateKeys(options.keySet()); + return SimpleMongoTransactionOptions.of(options); + } + + @Nullable + @Override + public String getLabelPrefix() { + return PREFIX; + } + + private static void validateKeys(Set keys) { + + if (!SimpleMongoTransactionOptions.KNOWN_KEYS.containsAll(keys)) { + + throw new IllegalArgumentException("Transaction labels contained invalid values. Has to be one of %s" + .formatted(SimpleMongoTransactionOptions.KNOWN_KEYS)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java index a67b6c7a91..f95a3c5310 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java index 8835009b1e..3fc3f82fbf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,8 +25,10 @@ public class LazyLoadingException extends UncategorizedDataAccessException { private static final long serialVersionUID = -7089224903873220037L; /** - * @param msg - * @param cause + * Constructor for LazyLoadingException. + * + * @param msg the detail message. + * @param cause the exception thrown by underlying data access API. */ public LazyLoadingException(String msg, Throwable cause) { super(msg, cause); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java index db9a4f30c6..72b2794d05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,10 +20,10 @@ /** * Helper class featuring helper methods for working with MongoDb collections. - *

- *

+ *
+ *
* Mainly intended for internal use within the framework. - * + * * @author Thomas Risberg * @since 1.0 */ @@ -38,7 +38,7 @@ private MongoCollectionUtils() { /** * Obtains the collection name to use for the provided class - * + * * @param entityClass The class to determine the preferred collection name for * @return The preferred collection name */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java new file mode 100644 index 0000000000..1fcd5de516 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java @@ -0,0 +1,112 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; + +/** + * Interface for factories creating {@link MongoDatabase} instances. + * + * @author Mark Pollack + * @author Thomas Darimont + * @author Christoph Strobl + * @since 3.0 + */ +public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider { + + /** + * Obtain a {@link MongoDatabase} from the underlying factory. + * + * @return never {@literal null}. + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase() throws DataAccessException; + + /** + * Obtain a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null}. + * @return never {@literal null}. + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); + + /** + * Get the underlying {@link CodecRegistry} used by the MongoDB Java driver. + * + * @return never {@literal null}. + */ + @Override + default CodecRegistry getCodecRegistry() { + return getMongoDatabase().getCodecRegistry(); + } + + /** + * Obtain a {@link ClientSession} for given ClientSessionOptions. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + ClientSession getSession(ClientSessionOptions options); + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to a new session with given {@link ClientSessionOptions options}. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default MongoDatabaseFactory withSession(ClientSessionOptions options) { + return withSession(getSession(options)); + } + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to the given session. + * + * @param session must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + MongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an + * {@link ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.1.3 + */ + default boolean isTransactionActive() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java new file mode 100644 index 0000000000..f73f9fb7ed --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -0,0 +1,227 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.transaction.support.TransactionSynchronization; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; + +/** + * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining + * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and + * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. + *
+ * Note: Intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + */ +public class MongoDatabaseUtils { + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { + return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(null, factory, sessionSynchronization); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory) { + return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(dbName, factory, sessionSynchronization); + } + + private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + + Assert.notNull(factory, "Factory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + ClientSession session = doGetSession(factory, sessionSynchronization); + + if (session == null) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + MongoDatabaseFactory factoryToUse = factory.withSession(session); + return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase(); + } + + /** + * Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active + * transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory + * resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active + * transaction}. + * + * @param dbFactory the resource to check transactions for. Must not be {@literal null}. + * @return {@literal true} if the factory has an ongoing transaction. + * @since 2.1.3 + */ + public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) { + + if (dbFactory.isTransactionActive()) { + return true; + } + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); + return resourceHolder != null && resourceHolder.hasActiveTransaction(); + } + + @Nullable + private static ClientSession doGetSession(MongoDatabaseFactory dbFactory, + SessionSynchronization sessionSynchronization) { + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); + + // check for native MongoDB transaction + if (resourceHolder != null && (resourceHolder.hasSession() || resourceHolder.isSynchronizedWithTransaction())) { + + if (!resourceHolder.hasSession()) { + resourceHolder.setSession(createClientSession(dbFactory)); + } + + return resourceHolder.getSession(); + } + + if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) { + return null; + } + + // init a non native MongoDB transaction by registering a MongoSessionSynchronization + + resourceHolder = new MongoResourceHolder(createClientSession(dbFactory), dbFactory); + resourceHolder.getRequiredSession().startTransaction(); + + TransactionSynchronizationManager + .registerSynchronization(new MongoSessionSynchronization(resourceHolder, dbFactory)); + resourceHolder.setSynchronizedWithTransaction(true); + TransactionSynchronizationManager.bindResource(dbFactory, resourceHolder); + + return resourceHolder.getSession(); + } + + private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) { + return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + } + + /** + * MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when + * participating in a non-native MongoDB transaction, such as a Jta or JDBC transaction. + * + * @author Christoph Strobl + * @since 2.1 + */ + private static class MongoSessionSynchronization extends ResourceHolderSynchronization { + + private final MongoResourceHolder resourceHolder; + + MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) { + + super(resourceHolder, dbFactory); + this.resourceHolder = resourceHolder; + } + + @Override + protected boolean shouldReleaseBeforeCompletion() { + return false; + } + + @Override + protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) { + + if (resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().commitTransaction(); + } + } + + @Override + public void afterCompletion(int status) { + + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && this.resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().abortTransaction(); + } + + super.afterCompletion(status); + } + + @Override + protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) { + + if (resourceHolder.hasActiveSession()) { + resourceHolder.getRequiredSession().close(); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java deleted file mode 100644 index 6e760e6a54..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.core.MongoExceptionTranslator; - -import com.mongodb.DB; - -/** - * Interface for factories creating {@link DB} instances. - * - * @author Mark Pollack - * @author Thomas Darimont - */ -public interface MongoDbFactory { - - /** - * Creates a default {@link DB} instance. - * - * @return - * @throws DataAccessException - */ - DB getDb() throws DataAccessException; - - /** - * Creates a {@link DB} instance to access the database with the given name. - * - * @param dbName must not be {@literal null} or empty. - * @return - * @throws DataAccessException - */ - DB getDb(String dbName) throws DataAccessException; - - /** - * Exposes a shared {@link MongoExceptionTranslator}. - * - * @return will never be {@literal null}. - */ - PersistenceExceptionTranslator getExceptionTranslator(); -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java new file mode 100644 index 0000000000..a087439d72 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +/** + * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when + * passed on to the driver. + *
+ * A set of predefined {@link MongoExpression expressions}, including a + * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method + * like expressions (eg. {@code toUpper(name)}) are available via the + * {@link org.springframework.data.mongodb.core.aggregation Aggregation API}. + * + * @author Christoph Strobl + * @since 3.2 + * @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators + * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators + * @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators + * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators + * @see org.springframework.data.mongodb.core.aggregation.ConvertOperators + * @see org.springframework.data.mongodb.core.aggregation.DateOperators + * @see org.springframework.data.mongodb.core.aggregation.ObjectOperators + * @see org.springframework.data.mongodb.core.aggregation.SetOperators + * @see org.springframework.data.mongodb.core.aggregation.StringOperators + */ +@FunctionalInterface +public interface MongoExpression { + + /** + * Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}).
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression) { + return new BindableMongoExpression(expression, null); + } + + /** + * Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0}) + * that will be resolved on first call of {@link #toDocument()}.
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression, Object... args) { + return new BindableMongoExpression(expression, args); + } + + /** + * Obtain the native {@link org.bson.Document} representation. + * + * @return never {@literal null}. + */ + org.bson.Document toDocument(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java new file mode 100644 index 0000000000..39c4815d47 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java @@ -0,0 +1,81 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; +import java.util.function.Consumer; + +import org.springframework.data.domain.ManagedTypes; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public final class MongoManagedTypes implements ManagedTypes { + + private final ManagedTypes delegate; + + private MongoManagedTypes(ManagedTypes types) { + this.delegate = types; + } + + /** + * Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}. + * + * @param managedTypes + * @return + */ + public static MongoManagedTypes from(ManagedTypes managedTypes) { + return new MongoManagedTypes(managedTypes); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}. + * + * @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}. + */ + public static MongoManagedTypes from(Class... types) { + return fromIterable(Arrays.asList(types)); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of + * {@link Class types}. + * + * @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be + * {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class + * types}. + */ + public static MongoManagedTypes fromIterable(Iterable> types) { + return from(ManagedTypes.fromIterable(types)); + } + + /** + * Factory method to return an empty {@link MongoManagedTypes} object. + * + * @return an empty {@link MongoManagedTypes} object. + */ + public static MongoManagedTypes empty() { + return from(ManagedTypes.empty()); + } + + @Override + public void forEach(Consumer> action) { + delegate.forEach(action); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java new file mode 100644 index 0000000000..a1e8344a9f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.support.ResourceHolderSupport; + +import com.mongodb.client.ClientSession; + +/** + * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. + * {@link MongoTransactionManager} binds instances of this class to the thread. + *
+ * Note: Intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoTransactionManager + * @see org.springframework.data.mongodb.core.MongoTemplate + */ +class MongoResourceHolder extends ResourceHolderSupport { + + private @Nullable ClientSession session; + private MongoDatabaseFactory dbFactory; + + /** + * Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}. + * + * @param session the associated {@link ClientSession}. Can be {@literal null}. + * @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. + */ + MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) { + + this.session = session; + this.dbFactory = dbFactory; + } + + /** + * @return the associated {@link ClientSession}. Can be {@literal null}. + */ + @Nullable + ClientSession getSession() { + return session; + } + + /** + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no {@link ClientSession} is associated with this {@link MongoResourceHolder}. + * @since 2.1.3 + */ + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No session available"); + } + + return session; + } + + /** + * @return the associated {@link MongoDatabaseFactory}. + */ + public MongoDatabaseFactory getDbFactory() { + return dbFactory; + } + + /** + * Set the {@link ClientSession} to guard. + * + * @param session can be {@literal null}. + */ + public void setSession(@Nullable ClientSession session) { + this.session = session; + } + + /** + * Only set the timeout if it does not match the {@link TransactionDefinition#TIMEOUT_DEFAULT default timeout}. + * + * @param seconds + */ + void setTimeoutIfNotDefaulted(int seconds) { + + if (seconds != TransactionDefinition.TIMEOUT_DEFAULT) { + setTimeoutInSeconds(seconds); + } + } + + /** + * @return {@literal true} if session is not {@literal null}. + */ + boolean hasSession() { + return session != null; + } + + /** + * @return {@literal true} if the session is active and has not been closed. + */ + boolean hasActiveSession() { + + if (!hasSession()) { + return false; + } + + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @since 2.1.3 + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); + } + + /** + * @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated + * that is accessible via {@link ClientSession#getServerSession()}. + */ + boolean hasServerSession() { + + try { + return getRequiredSession().getServerSession() != null; + } catch (IllegalStateException serverSessionClosed) { + // ignore + } + + return false; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java new file mode 100644 index 0000000000..645b3508db --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; + +/** + * A simple interface for obtaining a {@link ClientSession} to be consumed by + * {@link org.springframework.data.mongodb.core.MongoOperations} and MongoDB native operations that support causal + * consistency and transactions. + * + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + */ +@FunctionalInterface +public interface MongoSessionProvider { + + /** + * Obtain a {@link ClientSession} with with given options. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @throws org.springframework.dao.DataAccessException + */ + ClientSession getSession(ClientSessionOptions options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java new file mode 100644 index 0000000000..4215479f62 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; + +/** + * A specific {@link ClientSessionException} related to issues with a transaction such as aborted or non existing + * transactions. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class MongoTransactionException extends ClientSessionException { + + /** + * Constructor for {@link MongoTransactionException}. + * + * @param msg the detail message. Must not be {@literal null}. + */ + public MongoTransactionException(String msg) { + super(msg); + } + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Can be {@literal null}. + * @param cause the root cause. Can be {@literal null}. + */ + public MongoTransactionException(@Nullable String msg, @Nullable Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java new file mode 100644 index 0000000000..eda657f5f1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -0,0 +1,493 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.support.AbstractPlatformTransactionManager; +import org.springframework.transaction.support.DefaultTransactionStatus; +import org.springframework.transaction.support.ResourceTransactionManager; +import org.springframework.transaction.support.SmartTransactionObject; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionSynchronizationUtils; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.MongoException; +import com.mongodb.TransactionOptions; +import com.mongodb.client.ClientSession; + +/** + * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages + * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}. + *
+ * Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread. + *
+ * {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal + * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() + * commit} or {@link ClientSession#abortTransaction() abort} a transaction. + *
+ * Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via + * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard + * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as + * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. + *
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override + * {@link #doCommit(MongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + * @see MongoDB Transaction Documentation + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + */ +public class MongoTransactionManager extends AbstractPlatformTransactionManager + implements ResourceTransactionManager, InitializingBean { + + private @Nullable MongoDatabaseFactory databaseFactory; + private MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; + + /** + * Create a new {@link MongoTransactionManager} for bean-style usage.
+ * Note:The {@link MongoDatabaseFactory db factory} has to be + * {@link #setDatabaseFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a + * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
+ * Optionally it is possible to set default {@link TransactionOptions transaction options} defining + * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. + * + * @see #setDatabaseFactory(MongoDatabaseFactory) + * @see #setTransactionSynchronization(int) + */ + public MongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}. + * + * @param databaseFactory must not be {@literal null}. + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param options can be {@literal null}. + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "MongoDatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); + + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; + } + + @Override + protected Object doGetTransaction() throws TransactionException { + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager + .getResource(getRequiredDbFactory()); + return new MongoTransactionObject(resourceHolder); + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return extractMongoTransaction(transaction).hasResourceHolder(); + } + + @Override + protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + + MongoResourceHolder resourceHolder = newResourceHolder(definition, + ClientSessionOptions.builder().causallyConsistent(true).build()); + mongoTransactionObject.setResourceHolder(resourceHolder); + + if (logger.isDebugEnabled()) { + logger + .debug(String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + try { + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition).mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); + } catch (MongoException ex) { + throw new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + resourceHolder.setSynchronizedWithTransaction(true); + TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder); + } + + @Override + protected Object doSuspend(Object transaction) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + mongoTransactionObject.setResourceHolder(null); + + return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); + } + + @Override + protected void doResume(@Nullable Object transaction, Object suspendedResources) { + TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources); + } + + @Override + protected final void doCommit(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to commit transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + try { + doCommit(mongoTransactionObject); + } catch (Exception ex) { + + throw new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + } + + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit.
+ *

+	 * 
+	 * int retries = 3;
+	 * do {
+	 *     try {
+	 *         transactionObject.commitTransaction();
+	 *         break;
+	 *     } catch (MongoException ex) {
+	 *         if (!ex.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)) {
+	 *             throw ex;
+	 *         }
+	 *     }
+	 *     Thread.sleep(500);
+	 * } while (--retries > 0);
+	 * 
+	 * 
+ * + * @param transactionObject never {@literal null}. + * @throws Exception in case of transaction errors. + */ + protected void doCommit(MongoTransactionObject transactionObject) throws Exception { + transactionObject.commitTransaction(); + } + + @Override + protected void doRollback(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to abort transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + try { + mongoTransactionObject.abortTransaction(); + } catch (MongoException ex) { + + throw new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + } + + @Override + protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject transactionObject = extractMongoTransaction(status); + transactionObject.getRequiredResourceHolder().setRollbackOnly(); + } + + @Override + protected void doCleanupAfterCompletion(Object transaction) { + + Assert.isInstanceOf(MongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + transaction.getClass())); + + MongoTransactionObject mongoTransactionObject = (MongoTransactionObject) transaction; + + // Remove the connection holder from the thread. + TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); + mongoTransactionObject.getRequiredResourceHolder().clear(); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to release Session %s after transaction.", + debugString(mongoTransactionObject.getSession()))); + } + + mongoTransactionObject.closeSession(); + } + + /** + * Set the {@link MongoDatabaseFactory} that this instance should manage transactions for. + * + * @param databaseFactory must not be {@literal null}. + */ + public void setDatabaseFactory(MongoDatabaseFactory databaseFactory) { + + Assert.notNull(databaseFactory, "DbFactory must not be null"); + this.databaseFactory = databaseFactory; + } + + /** + * Set the {@link TransactionOptions} to be applied when starting transactions. + * + * @param options can be {@literal null}. + */ + public void setOptions(@Nullable TransactionOptions options) { + this.options = MongoTransactionOptions.of(options); + } + + /** + * Get the {@link MongoDatabaseFactory} that this instance manages transactions for. + * + * @return can be {@literal null}. + */ + @Nullable + public MongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + @Override + public MongoDatabaseFactory getResourceFactory() { + return getRequiredDbFactory(); + } + + @Override + public void afterPropertiesSet() { + getRequiredDbFactory(); + } + + private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) { + + MongoDatabaseFactory dbFactory = getResourceFactory(); + + MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory); + resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition)); + + return resourceHolder; + } + + /** + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. + */ + private MongoDatabaseFactory getRequiredDbFactory() { + + Assert.state(databaseFactory != null, + "MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required"); + + return databaseFactory; + } + + private static MongoTransactionObject extractMongoTransaction(Object transaction) { + + Assert.isInstanceOf(MongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + transaction.getClass())); + + return (MongoTransactionObject) transaction; + } + + private static MongoTransactionObject extractMongoTransaction(DefaultTransactionStatus status) { + + Assert.isInstanceOf(MongoTransactionObject.class, status.getTransaction(), + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + status.getTransaction().getClass())); + + return (MongoTransactionObject) status.getTransaction(); + } + + private static String debugString(@Nullable ClientSession session) { + + if (session == null) { + return "null"; + } + + String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()), + Integer.toHexString(session.hashCode())); + + try { + if (session.getServerSession() != null) { + debugString += String.format("id = %s, ", session.getServerSession().getIdentifier()); + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } else { + debugString += "id = n/a"; + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } + } catch (RuntimeException e) { + debugString += String.format("error = %s", e.getMessage()); + } + + debugString += "]"; + + return debugString; + } + + /** + * MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by + * {@link MongoTransactionManager}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoResourceHolder + */ + protected static class MongoTransactionObject implements SmartTransactionObject { + + private @Nullable MongoResourceHolder resourceHolder; + + MongoTransactionObject(@Nullable MongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * Set the {@link MongoResourceHolder}. + * + * @param resourceHolder can be {@literal null}. + */ + void setResourceHolder(@Nullable MongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * @return {@literal true} if a {@link MongoResourceHolder} is set. + */ + final boolean hasResourceHolder() { + return resourceHolder != null; + } + + /** + * Start a MongoDB transaction optionally given {@link TransactionOptions}. + * + * @param options can be {@literal null} + */ + void startTransaction(@Nullable TransactionOptions options) { + + ClientSession session = getRequiredSession(); + if (options != null) { + session.startTransaction(options); + } else { + session.startTransaction(); + } + } + + /** + * Commit the transaction. + */ + public void commitTransaction() { + getRequiredSession().commitTransaction(); + } + + /** + * Rollback (abort) the transaction. + */ + public void abortTransaction() { + getRequiredSession().abortTransaction(); + } + + /** + * Close a {@link ClientSession} without regard to its transactional state. + */ + void closeSession() { + + ClientSession session = getRequiredSession(); + if (session.getServerSession() != null && !session.getServerSession().isClosed()) { + session.close(); + } + } + + @Nullable + public ClientSession getSession() { + return resourceHolder != null ? resourceHolder.getSession() : null; + } + + private MongoResourceHolder getRequiredResourceHolder() { + + Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O"); + return resourceHolder; + } + + private ClientSession getRequiredSession() { + + ClientSession session = getSession(); + Assert.state(session != null, "A Session is required but it turned out to be null"); + return session; + } + + @Override + public boolean isRollbackOnly() { + return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); + } + + @Override + public void flush() { + TransactionSynchronizationUtils.triggerFlush(); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java new file mode 100644 index 0000000000..e411bd5d2d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java @@ -0,0 +1,204 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.WriteConcernAware; +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; + +/** + * Options to be applied within a specific transaction scope. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptions + extends TransactionMetadata, ReadConcernAware, ReadPreferenceAware, WriteConcernAware { + + /** + * Value Object representing empty options enforcing client defaults. Returns {@literal null} for all getter methods. + */ + MongoTransactionOptions NONE = new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return null; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return null; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return null; + } + }; + + /** + * Merge current options with given ones. Will return first non {@literal null} value from getters whereas the + * {@literal this} has precedence over the given fallbackOptions. + * + * @param fallbackOptions can be {@literal null}. + * @return new instance of {@link MongoTransactionOptions} or this if {@literal fallbackOptions} is {@literal null} or + * {@link #NONE}. + */ + default MongoTransactionOptions mergeWith(@Nullable MongoTransactionOptions fallbackOptions) { + + if (fallbackOptions == null || MongoTransactionOptions.NONE.equals(fallbackOptions)) { + return this; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return MongoTransactionOptions.this.hasMaxCommitTime() ? MongoTransactionOptions.this.getMaxCommitTime() + : fallbackOptions.getMaxCommitTime(); + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return MongoTransactionOptions.this.hasReadConcern() ? MongoTransactionOptions.this.getReadConcern() + : fallbackOptions.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return MongoTransactionOptions.this.hasReadPreference() ? MongoTransactionOptions.this.getReadPreference() + : fallbackOptions.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return MongoTransactionOptions.this.hasWriteConcern() ? MongoTransactionOptions.this.getWriteConcern() + : fallbackOptions.getWriteConcern(); + } + }; + } + + /** + * Apply the current options using the given mapping {@link Function} and return its result. + * + * @param mappingFunction + * @return result of the mapping function. + */ + default T map(Function mappingFunction) { + return mappingFunction.apply(this); + } + + /** + * @return MongoDB driver native {@link TransactionOptions}. + * @see MongoTransactionOptions#map(Function) + */ + @Nullable + default TransactionOptions toDriverOptions() { + + return map(it -> { + + if (MongoTransactionOptions.NONE.equals(it)) { + return null; + } + + TransactionOptions.Builder builder = TransactionOptions.builder(); + if (it.hasMaxCommitTime()) { + builder.maxCommitTime(it.getMaxCommitTime().toMillis(), TimeUnit.MILLISECONDS); + } + if (it.hasReadConcern()) { + builder.readConcern(it.getReadConcern()); + } + if (it.hasReadPreference()) { + builder.readPreference(it.getReadPreference()); + } + if (it.hasWriteConcern()) { + builder.writeConcern(it.getWriteConcern()); + } + return builder.build(); + }); + } + + /** + * Factory method to wrap given MongoDB driver native {@link TransactionOptions} into {@link MongoTransactionOptions}. + * + * @param options + * @return {@link MongoTransactionOptions#NONE} if given object is {@literal null}. + */ + static MongoTransactionOptions of(@Nullable TransactionOptions options) { + + if (options == null) { + return NONE; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + + Long millis = options.getMaxCommitTime(TimeUnit.MILLISECONDS); + return millis != null ? Duration.ofMillis(millis) : null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return options.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return options.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return options.getWriteConcern(); + } + + @Nullable + @Override + public TransactionOptions toDriverOptions() { + return options; + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..b73b079a99 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A {@link TransactionOptionResolver} reading MongoDB specific {@link MongoTransactionOptions transaction options} from + * a {@link TransactionDefinition}. Implementations of {@link MongoTransactionOptions} may choose a specific + * {@link #getLabelPrefix() prefix} for {@link TransactionAttribute#getLabels() transaction attribute labels} to avoid + * evaluating non-store specific ones. + *

+ * {@link TransactionAttribute#getLabels()} evaluated by default should follow the property style using {@code =} to + * separate key and value pairs. + *

+ * By default {@link #resolve(TransactionDefinition)} will filter labels by the {@link #getLabelPrefix() prefix} and + * strip the prefix from the label before handing the pruned {@link Map} to the {@link #convert(Map)} function. + *

+ * A transaction definition with labels targeting MongoDB may look like the following: + *

+ * + * @Transactional(label = { "mongo:readConcern=majority" }) + * + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptionsResolver extends TransactionOptionResolver { + + /** + * Obtain the default {@link MongoTransactionOptionsResolver} implementation using a {@literal mongo:} + * {@link #getLabelPrefix() prefix}. + * + * @return instance of default {@link MongoTransactionOptionsResolver} implementation. + */ + static MongoTransactionOptionsResolver defaultResolver() { + return DefaultMongoTransactionOptionsResolver.INSTANCE; + } + + /** + * Get the prefix used to filter applicable {@link TransactionAttribute#getLabels() labels}. + * + * @return {@literal null} if no label defined. + */ + @Nullable + String getLabelPrefix(); + + /** + * Resolve {@link MongoTransactionOptions} from a given {@link TransactionDefinition} by evaluating + * {@link TransactionAttribute#getLabels()} labels if possible. + *

+ * Splits applicable labels property style using {@literal =} as deliminator and removes a potential + * {@link #getLabelPrefix() prefix} before calling {@link #convert(Map)} with filtered label values. + * + * @param definition + * @return {@link MongoTransactionOptions#NONE} in case the given {@link TransactionDefinition} is not a + * {@link TransactionAttribute} if no matching {@link TransactionAttribute#getLabels() labels} could be found. + * @throws IllegalArgumentException for options that do not map to valid transactions options or malformatted labels. + */ + @Override + default MongoTransactionOptions resolve(TransactionDefinition definition) { + + if (!(definition instanceof TransactionAttribute attribute)) { + return MongoTransactionOptions.NONE; + } + + if (attribute.getLabels().isEmpty()) { + return MongoTransactionOptions.NONE; + } + + Map attributeMap = attribute.getLabels().stream() + .filter(it -> !StringUtils.hasText(getLabelPrefix()) || it.startsWith(getLabelPrefix())) + .map(it -> StringUtils.hasText(getLabelPrefix()) ? it.substring(getLabelPrefix().length()) : it).map(it -> { + + String[] kvPair = StringUtils.split(it, "="); + Assert.isTrue(kvPair != null && kvPair.length == 2, + () -> "No value present for transaction option %s".formatted(kvPair != null ? kvPair[0] : it)); + return kvPair; + }) + + .collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())); + + return attributeMap.isEmpty() ? MongoTransactionOptions.NONE : convert(attributeMap); + } + + /** + * Convert the given {@link Map} into an instance of {@link MongoTransactionOptions}. + * + * @param options never {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException for invalid options. + */ + MongoTransactionOptions convert(Map options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java new file mode 100644 index 0000000000..f2a6714a95 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -0,0 +1,100 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Interface for factories creating reactive {@link MongoDatabase} instances. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.0 + */ +public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { + + /** + * Creates a default {@link MongoDatabase} instance. + * + * @return never {@literal null}. + * @throws DataAccessException + */ + Mono getMongoDatabase() throws DataAccessException; + + /** + * Obtain a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null} or empty. + * @return never {@literal null}. + * @throws DataAccessException + */ + Mono getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); + + /** + * Get the underlying {@link CodecRegistry} used by the reactive MongoDB Java driver. + * + * @return never {@literal null}. + */ + CodecRegistry getCodecRegistry(); + + /** + * Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + Mono getSession(ClientSessionOptions options); + + /** + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoDatabaseFactory} returning + * {@link MongoDatabase} instances that are aware and bound to the given session. + * + * @param session must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + ReactiveMongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.2 + */ + default boolean isTransactionActive() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java new file mode 100644 index 0000000000..f397818a4c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -0,0 +1,266 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; +import reactor.util.context.Context; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.NoTransactionException; +import org.springframework.transaction.reactive.ReactiveResourceSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for + * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} + * suitable for transactional usage. + *
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.2 + */ +public class ReactiveMongoDatabaseUtils { + + /** + * Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a + * {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the + * {@link ReactiveMongoDatabaseFactory resource} and if the associated + * {@link com.mongodb.reactivestreams.client.ClientSession} has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @param databaseFactory the resource to check transactions for. Must not be {@literal null}. + * @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction. + */ + public static Mono isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) { + + if (databaseFactory.isTransactionActive()) { + return Mono.just(true); + } + + return TransactionSynchronizationManager.forCurrentTransaction() // + .map(it -> { + + ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory); + return holder != null && holder.hasActiveTransaction(); + }) // + .onErrorResume(NoTransactionException.class, e -> Mono.just(false)); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(null, factory, sessionSynchronization); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(dbName, factory, sessionSynchronization); + } + + private static Mono doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + + Assert.notNull(factory, "DatabaseFactory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } + + return TransactionSynchronizationManager.forCurrentTransaction() + .filter(TransactionSynchronizationManager::isSynchronizationActive) // + .flatMap(synchronizationManager -> { + + return doGetSession(synchronizationManager, factory, sessionSynchronization) // + .flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it))); + }) // + .onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory)) + .switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory)); + } + + private static Mono getMongoDatabaseOrDefault(@Nullable String dbName, + ReactiveMongoDatabaseFactory factory) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + private static Mono doGetSession(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) { + + final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(dbFactory); + + // check for native MongoDB transaction + if (registeredHolder != null + && (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) { + + return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession()) + : createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent); + } + + if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) { + return Mono.empty(); + } + + // init a non native MongoDB transaction by registering a MongoSessionSynchronization + return createClientSession(dbFactory).map(session -> { + + ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory); + newHolder.getRequiredSession().startTransaction(); + + synchronizationManager + .registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory)); + newHolder.setSynchronizedWithTransaction(true); + synchronizationManager.bindResource(dbFactory, newHolder); + + return newHolder.getSession(); + }); + } + + private static Mono createClientSession(ReactiveMongoDatabaseFactory dbFactory) { + return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + } + + /** + * MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when + * participating in a non-native MongoDB transaction, such as a R2CBC transaction. + * + * @author Mark Paluch + * @since 2.2 + */ + private static class MongoSessionSynchronization + extends ReactiveResourceSynchronization { + + private final ReactiveMongoResourceHolder resourceHolder; + + MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) { + + super(resourceHolder, dbFactory, synchronizationManager); + this.resourceHolder = resourceHolder; + } + + @Override + protected boolean shouldReleaseBeforeCompletion() { + return false; + } + + @Override + protected Mono processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) { + + if (isTransactionActive(resourceHolder)) { + return Mono.from(resourceHolder.getRequiredSession().commitTransaction()); + } + + return Mono.empty(); + } + + @Override + public Mono afterCompletion(int status) { + + return Mono.defer(() -> { + + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) { + + return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) // + .then(super.afterCompletion(status)); + } + + return super.afterCompletion(status); + }); + } + + @Override + protected Mono releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) { + + return Mono.fromRunnable(() -> { + if (resourceHolder.hasActiveSession()) { + resourceHolder.getRequiredSession().close(); + } + }); + } + + private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) { + + if (!resourceHolder.hasSession()) { + return false; + } + + return resourceHolder.getRequiredSession().hasActiveTransaction(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java new file mode 100644 index 0000000000..33caa5e7fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -0,0 +1,155 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.ResourceHolderSupport; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds + * instances of this class to the subscriber context. + *
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + * @see ReactiveMongoTransactionManager + * @see ReactiveMongoTemplate + */ +class ReactiveMongoResourceHolder extends ResourceHolderSupport { + + private @Nullable ClientSession session; + private ReactiveMongoDatabaseFactory databaseFactory; + + /** + * Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}. + * + * @param session the associated {@link ClientSession}. Can be {@literal null}. + * @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. + */ + ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) { + + this.session = session; + this.databaseFactory = databaseFactory; + } + + /** + * @return the associated {@link ClientSession}. Can be {@literal null}. + */ + @Nullable + ClientSession getSession() { + return session; + } + + /** + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no session is associated. + */ + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No ClientSession associated"); + } + return session; + } + + /** + * @return the associated {@link ReactiveMongoDatabaseFactory}. + */ + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + /** + * Set the {@link ClientSession} to guard. + * + * @param session can be {@literal null}. + */ + public void setSession(@Nullable ClientSession session) { + this.session = session; + } + + /** + * @return {@literal true} if session is not {@literal null}. + */ + boolean hasSession() { + return session != null; + } + + /** + * If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a + * {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current + * bound session is returned. + * + * @param session + * @return + */ + @Nullable + public ClientSession setSessionIfAbsent(@Nullable ClientSession session) { + + if (!hasSession()) { + setSession(session); + } + + return session; + } + + /** + * @return {@literal true} if the session is active and has not been closed. + */ + boolean hasActiveSession() { + + if (!hasSession()) { + return false; + } + + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); + } + + /** + * @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated + * that is accessible via {@link ClientSession#getServerSession()}. + */ + boolean hasServerSession() { + + try { + return getRequiredSession().getServerSession() != null; + } catch (IllegalStateException serverSessionClosed) { + // ignore + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java new file mode 100644 index 0000000000..2c65c26b79 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -0,0 +1,501 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.reactive.AbstractReactiveTransactionManager; +import org.springframework.transaction.reactive.GenericReactiveTransaction; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.SmartTransactionObject; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.MongoException; +import com.mongodb.TransactionOptions; +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages + * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
+ * Binds a {@link ClientSession} from the specified + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber + * {@link reactor.util.context.Context}.
+ * {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a + * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, + * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or + * {@link ClientSession#abortTransaction() abort} a transaction.
+ * Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead + * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring + * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. + *
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override + * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see MongoDB Transaction Documentation + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) + */ +public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean { + + private @Nullable ReactiveMongoDatabaseFactory databaseFactory; + private @Nullable MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; + + /** + * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
+ * Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to + * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor + * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. + *
+ * Optionally it is possible to set default {@link TransactionOptions transaction options} defining + * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. + * + * @see #setDatabaseFactory(ReactiveMongoDatabaseFactory) + */ + public ReactiveMongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory}. + * + * @param databaseFactory must not be {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param options can be {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, + @Nullable MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); + + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; + } + + @Override + protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager) + throws TransactionException { + + ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(getRequiredDatabaseFactory()); + return new ReactiveMongoTransactionObject(resourceHolder); + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return extractMongoTransaction(transaction).hasResourceHolder(); + } + + @Override + protected Mono doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction, + TransactionDefinition definition) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + + Mono holder = newResourceHolder(definition, + ClientSessionOptions.builder().causallyConsistent(true).build()); + + return holder.doOnNext(resourceHolder -> { + + mongoTransactionObject.setResourceHolder(resourceHolder); + + if (logger.isDebugEnabled()) { + logger.debug( + String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + }).doOnNext(resourceHolder -> { + + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition) + .mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + })// + .onErrorMap( + ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)) + .doOnSuccess(resourceHolder -> { + + synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder); + }).then(); + }); + } + + @Override + protected Mono doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction) + throws TransactionException { + + return Mono.fromSupplier(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + mongoTransactionObject.setResourceHolder(null); + + return synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + }); + } + + @Override + protected Mono doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction, + Object suspendedResources) { + return Mono + .fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources)); + } + + @Override + protected final Mono doCommit(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to commit transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> { + return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + }); + }); + } + + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit. + * + * @param synchronizationManager reactive synchronization manager. + * @param transactionObject never {@literal null}. + */ + protected Mono doCommit(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoTransactionObject transactionObject) { + return transactionObject.commitTransaction(); + } + + @Override + protected Mono doRollback(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to abort transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> { + return Mono + .error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)); + }); + }); + } + + @Override + protected Mono doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status); + transactionObject.getRequiredResourceHolder().setRollbackOnly(); + }); + } + + @Override + protected Mono doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager, + Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction; + + // Remove the connection holder from the thread. + synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + mongoTransactionObject.getRequiredResourceHolder().clear(); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to release Session %s after transaction.", + debugString(mongoTransactionObject.getSession()))); + } + + mongoTransactionObject.closeSession(); + }); + } + + /** + * Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for. + * + * @param databaseFactory must not be {@literal null}. + */ + public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + this.databaseFactory = databaseFactory; + } + + /** + * Set the {@link TransactionOptions} to be applied when starting transactions. + * + * @param options can be {@literal null}. + */ + public void setOptions(@Nullable TransactionOptions options) { + this.options = MongoTransactionOptions.of(options); + } + + /** + * Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for. + * + * @return can be {@literal null}. + */ + @Nullable + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + @Override + public void afterPropertiesSet() { + getRequiredDatabaseFactory(); + } + + private Mono newResourceHolder(TransactionDefinition definition, + ClientSessionOptions options) { + + ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory(); + + return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory)); + } + + /** + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. + */ + private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() { + + Assert.state(databaseFactory != null, + "ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required"); + + return databaseFactory; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return (ReactiveMongoTransactionObject) transaction; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(), + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + status.getTransaction().getClass())); + + return (ReactiveMongoTransactionObject) status.getTransaction(); + } + + private static String debugString(@Nullable ClientSession session) { + + if (session == null) { + return "null"; + } + + String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()), + Integer.toHexString(session.hashCode())); + + try { + if (session.getServerSession() != null) { + debugString += String.format("id = %s, ", session.getServerSession().getIdentifier()); + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } else { + debugString += "id = n/a"; + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } + } catch (RuntimeException e) { + debugString += String.format("error = %s", e.getMessage()); + } + + debugString += "]"; + + return debugString; + } + + /** + * MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by + * {@link ReactiveMongoTransactionManager}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see ReactiveMongoResourceHolder + */ + protected static class ReactiveMongoTransactionObject implements SmartTransactionObject { + + private @Nullable ReactiveMongoResourceHolder resourceHolder; + + ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * Set the {@link MongoResourceHolder}. + * + * @param resourceHolder can be {@literal null}. + */ + void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * @return {@literal true} if a {@link MongoResourceHolder} is set. + */ + final boolean hasResourceHolder() { + return resourceHolder != null; + } + + /** + * Start a MongoDB transaction optionally given {@link TransactionOptions}. + * + * @param options can be {@literal null} + */ + void startTransaction(@Nullable TransactionOptions options) { + + ClientSession session = getRequiredSession(); + if (options != null) { + session.startTransaction(options); + } else { + session.startTransaction(); + } + } + + /** + * Commit the transaction. + */ + public Mono commitTransaction() { + return Mono.from(getRequiredSession().commitTransaction()); + } + + /** + * Rollback (abort) the transaction. + */ + public Mono abortTransaction() { + return Mono.from(getRequiredSession().abortTransaction()); + } + + /** + * Close a {@link ClientSession} without regard to its transactional state. + */ + void closeSession() { + + ClientSession session = getRequiredSession(); + if (session.getServerSession() != null && !session.getServerSession().isClosed()) { + session.close(); + } + } + + @Nullable + public ClientSession getSession() { + return resourceHolder != null ? resourceHolder.getSession() : null; + } + + private ReactiveMongoResourceHolder getRequiredResourceHolder() { + + Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O"); + return resourceHolder; + } + + private ClientSession getRequiredSession() { + + ClientSession session = getSession(); + Assert.state(session != null, "A Session is required but it turned out to be null"); + return session; + } + + @Override + public boolean isRollbackOnly() { + return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); + } + + @Override + public void flush() { + throw new UnsupportedOperationException("flush() not supported"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java new file mode 100644 index 0000000000..93dbf5db69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -0,0 +1,207 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.util.Optional; +import java.util.function.BiFunction; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.springframework.core.MethodClassKey; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ConcurrentReferenceHashMap; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.WriteConcern; +import com.mongodb.session.ClientSession; + +/** + * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having + * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. + *
+ * The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself + * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them + * if not already proxied. + * + * @param Type of the actual Mongo Database. + * @param Type of the actual Mongo Collection. + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class SessionAwareMethodInterceptor implements MethodInterceptor { + + private static final MethodCache METHOD_CACHE = new MethodCache(); + + private final ClientSession session; + private final ClientSessionOperator collectionDecorator; + private final ClientSessionOperator databaseDecorator; + private final Object target; + private final Class targetType; + private final Class collectionType; + private final Class databaseType; + private final Class sessionType; + + /** + * Create a new SessionAwareMethodInterceptor for given target. + * + * @param session the {@link ClientSession} to be used on invocation. + * @param target the original target object. + * @param databaseType the MongoDB database type + * @param databaseDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive + * {@code MongoDatabase}. + * @param collectionType the MongoDB collection type. + * @param collectionDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive + * {@code MongoCollection}. + * @param target object type. + */ + public SessionAwareMethodInterceptor(ClientSession session, T target, Class sessionType, + Class databaseType, ClientSessionOperator databaseDecorator, Class collectionType, + ClientSessionOperator collectionDecorator) { + + Assert.notNull(session, "ClientSession must not be null"); + Assert.notNull(target, "Target must not be null"); + Assert.notNull(sessionType, "SessionType must not be null"); + Assert.notNull(databaseType, "Database type must not be null"); + Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null"); + Assert.notNull(collectionType, "Collection type must not be null"); + Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null"); + + this.session = session; + this.target = target; + this.databaseType = ClassUtils.getUserClass(databaseType); + this.collectionType = ClassUtils.getUserClass(collectionType); + this.collectionDecorator = collectionDecorator; + this.databaseDecorator = databaseDecorator; + + this.targetType = ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseType : collectionType; + this.sessionType = sessionType; + } + + @Nullable + @Override + public Object invoke(MethodInvocation methodInvocation) throws Throwable { + + if (requiresDecoration(methodInvocation.getMethod())) { + + Object target = methodInvocation.proceed(); + if (target instanceof Proxy) { + return target; + } + + return decorate(target); + } + + if (!requiresSession(methodInvocation.getMethod())) { + return methodInvocation.proceed(); + } + + Optional targetMethod = METHOD_CACHE.lookup(methodInvocation.getMethod(), targetType, sessionType); + + return !targetMethod.isPresent() ? methodInvocation.proceed() + : ReflectionUtils.invokeMethod(targetMethod.get(), target, + prependSessionToArguments(session, methodInvocation)); + } + + private boolean requiresDecoration(Method method) { + + return ClassUtils.isAssignable(databaseType, method.getReturnType()) + || ClassUtils.isAssignable(collectionType, method.getReturnType()); + } + + @SuppressWarnings("unchecked") + protected Object decorate(Object target) { + + return ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseDecorator.apply(session, target) + : collectionDecorator.apply(session, target); + } + + private static boolean requiresSession(Method method) { + + return method.getParameterCount() == 0 + || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0]); + } + + private static Object[] prependSessionToArguments(ClientSession session, MethodInvocation invocation) { + + Object[] args = new Object[invocation.getArguments().length + 1]; + + args[0] = session; + System.arraycopy(invocation.getArguments(), 0, args, 1, invocation.getArguments().length); + + return args; + } + + /** + * Simple {@link Method} to {@link Method} caching facility for {@link ClientSession} overloaded targets. + * + * @since 2.1 + * @author Christoph Strobl + */ + static class MethodCache { + + private final ConcurrentReferenceHashMap> cache = new ConcurrentReferenceHashMap<>(); + + /** + * Lookup the target {@link Method}. + * + * @param method + * @param targetClass + * @return + */ + Optional lookup(Method method, Class targetClass, Class sessionType) { + + return cache.computeIfAbsent(new MethodClassKey(method, targetClass), + val -> Optional.ofNullable(findTargetWithSession(method, targetClass, sessionType))); + } + + @Nullable + private Method findTargetWithSession(Method sourceMethod, Class targetType, + Class sessionType) { + + Class[] argTypes = sourceMethod.getParameterTypes(); + Class[] args = new Class[argTypes.length + 1]; + args[0] = sessionType; + System.arraycopy(argTypes, 0, args, 1, argTypes.length); + + return ReflectionUtils.findMethod(targetType, sourceMethod.getName(), args); + } + + /** + * Check whether the cache contains an entry for {@link Method} and {@link Class}. + * + * @param method + * @param targetClass + * @return + */ + boolean contains(Method method, Class targetClass) { + return cache.containsKey(new MethodClassKey(method, targetClass)); + } + } + + /** + * Represents an operation upon two operands of the same type, producing a result of the same type as the operands + * accepting {@link ClientSession}. This is a specialization of {@link BiFunction} for the case where the operands and + * the result are all of the same type. + * + * @param the type of the operands and result of the operator + */ + public interface ClientSessionOperator extends BiFunction {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java new file mode 100644 index 0000000000..07b5c31586 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -0,0 +1,52 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +/** + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) + */ +public enum SessionSynchronization { + + /** + * Synchronize with any transaction even with empty transactions and initiate a MongoDB transaction when doing so by + * registering a MongoDB specific {@link org.springframework.transaction.support.ResourceHolderSynchronization}. + */ + ALWAYS, + + /** + * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. + */ + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java new file mode 100644 index 0000000000..b52fc0bd71 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java @@ -0,0 +1,154 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.Function; +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Trivial implementation of {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + * @since 4.3 + */ +class SimpleMongoTransactionOptions implements MongoTransactionOptions { + + static final Set KNOWN_KEYS = Arrays.stream(OptionKey.values()).map(OptionKey::getKey) + .collect(Collectors.toSet()); + + private final Duration maxCommitTime; + private final ReadConcern readConcern; + private final ReadPreference readPreference; + private final WriteConcern writeConcern; + + static SimpleMongoTransactionOptions of(Map options) { + return new SimpleMongoTransactionOptions(options); + } + + private SimpleMongoTransactionOptions(Map options) { + + this.maxCommitTime = doGetMaxCommitTime(options); + this.readConcern = doGetReadConcern(options); + this.readPreference = doGetReadPreference(options); + this.writeConcern = doGetWriteConcern(options); + } + + @Nullable + @Override + public Duration getMaxCommitTime() { + return maxCommitTime; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return readConcern; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return readPreference; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return writeConcern; + } + + @Override + public String toString() { + + return "DefaultMongoTransactionOptions{" + "maxCommitTime=" + maxCommitTime + ", readConcern=" + readConcern + + ", readPreference=" + readPreference + ", writeConcern=" + writeConcern + '}'; + } + + @Nullable + private static Duration doGetMaxCommitTime(Map options) { + + return getValue(options, OptionKey.MAX_COMMIT_TIME, value -> { + + Duration timeout = Duration.parse(value); + Assert.isTrue(!timeout.isNegative(), "%s cannot be negative".formatted(OptionKey.MAX_COMMIT_TIME)); + return timeout; + }); + } + + @Nullable + private static ReadConcern doGetReadConcern(Map options) { + return getValue(options, OptionKey.READ_CONCERN, value -> new ReadConcern(ReadConcernLevel.fromString(value))); + } + + @Nullable + private static ReadPreference doGetReadPreference(Map options) { + return getValue(options, OptionKey.READ_PREFERENCE, ReadPreference::valueOf); + } + + @Nullable + private static WriteConcern doGetWriteConcern(Map options) { + + return getValue(options, OptionKey.WRITE_CONCERN, value -> { + + WriteConcern writeConcern = WriteConcern.valueOf(value); + if (writeConcern == null) { + throw new IllegalArgumentException("'%s' is not a valid WriteConcern".formatted(options.get("writeConcern"))); + } + return writeConcern; + }); + } + + @Nullable + private static T getValue(Map options, OptionKey key, Function convertFunction) { + + String value = options.get(key.getKey()); + return value != null ? convertFunction.apply(value) : null; + } + + enum OptionKey { + + MAX_COMMIT_TIME("maxCommitTime"), READ_CONCERN("readConcern"), READ_PREFERENCE("readPreference"), WRITE_CONCERN( + "writeConcern"); + + final String key; + + OptionKey(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + @Override + public String toString() { + return getKey(); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java new file mode 100644 index 0000000000..a3d600270f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -0,0 +1,77 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.data.util.Version; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoDriverInformation; + +/** + * Class that exposes the SpringData MongoDB specific information like the current {@link Version} or + * {@link MongoDriverInformation driver information}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class SpringDataMongoDB { + + private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class); + + private static final Version FALLBACK_VERSION = new Version(3); + private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation + .builder(MongoDriverInformation.builder().build()).driverName("spring-data").build(); + + /** + * Obtain the SpringData MongoDB specific driver information. + * + * @return never {@literal null}. + */ + public static MongoDriverInformation driverInformation() { + return DRIVER_INFORMATION; + } + + /** + * Fetches the "Implementation-Version" manifest attribute from the jar file. + *
+ * Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the + * version in all environments. In this case the current Major version is returned as a fallback. + * + * @return never {@literal null}. + */ + public static Version version() { + + Package pkg = SpringDataMongoDB.class.getPackage(); + String versionString = (pkg != null ? pkg.getImplementationVersion() : null); + + if (!StringUtils.hasText(versionString)) { + + LOGGER.debug("Unable to find Spring Data MongoDB version."); + return FALLBACK_VERSION; + } + + try { + return Version.parse(versionString); + } catch (Exception e) { + LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString)); + } + + return FALLBACK_VERSION; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java new file mode 100644 index 0000000000..cd5f58d5b1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; + +import org.springframework.lang.Nullable; + +/** + * MongoDB-specific transaction metadata. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface TransactionMetadata { + + /** + * @return the maximum commit time. Can be {@literal null} if not configured. + */ + @Nullable + Duration getMaxCommitTime(); + + /** + * @return {@literal true} if the max commit time is configured; {@literal false} otherwise. + */ + default boolean hasMaxCommitTime() { + return getMaxCommitTime() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java new file mode 100644 index 0000000000..37c7e3686b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; + +/** + * Interface that defines a resolver for {@link TransactionMetadata} based on a {@link TransactionDefinition}. + * Transaction metadata is used to enrich the MongoDB transaction with additional information. + * + * @author Christoph Strobl + * @since 4.3 + */ +interface TransactionOptionResolver { + + /** + * Resolves the transaction metadata from a given {@link TransactionDefinition}. + * + * @param definition the {@link TransactionDefinition}. + * @return the resolved {@link TransactionMetadata} or {@literal null} if the resolver cannot resolve any metadata. + */ + @Nullable + T resolve(TransactionDefinition definition); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java new file mode 100644 index 0000000000..5446170ff9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * {@link TransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientClientSessionException extends TransientMongoDbException { + + /** + * Constructor for {@link TransientClientSessionException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientClientSessionException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java new file mode 100644 index 0000000000..cad05ca17c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * Root of the hierarchy of MongoDB specific data access exceptions that are considered transient such as + * {@link com.mongodb.MongoException MongoExceptions} carrying {@link com.mongodb.MongoException#hasErrorLabel(String) + * specific labels}. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientMongoDbException extends TransientDataAccessException { + + /** + * Constructor for {@link TransientMongoDbException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientMongoDbException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java index 7d33b0871a..bec05d0d68 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,12 +16,13 @@ package org.springframework.data.mongodb; import org.springframework.dao.UncategorizedDataAccessException; +import org.springframework.lang.Nullable; public class UncategorizedMongoDbException extends UncategorizedDataAccessException { private static final long serialVersionUID = -2336595514062364929L; - public UncategorizedMongoDbException(String msg, Throwable cause) { + public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) { super(msg, cause); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java new file mode 100644 index 0000000000..2254b3c9a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.hint.TypeReference; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public class LazyLoadingProxyAotProcessor { + + private boolean generalLazyLoadingProxyContributed = false; + + public void registerLazyLoadingProxyIfNeeded(Class type, GenerationContext generationContext) { + + Set refFields = getFieldsWithAnnotationPresent(type, Reference.class); + if (refFields.isEmpty()) { + return; + } + + refFields.stream() // + .filter(LazyLoadingProxyAotProcessor::isLazyLoading) // + .forEach(field -> { + + if (!generalLazyLoadingProxyContributed) { + generationContext.getRuntimeHints().proxies().registerJdkProxy( + TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class), + TypeReference.of(org.springframework.aop.SpringProxy.class), + TypeReference.of(org.springframework.aop.framework.Advised.class), + TypeReference.of(org.springframework.core.DecoratingProxy.class)); + generalLazyLoadingProxyContributed = true; + } + + if (field.getType().isInterface()) { + + List> interfaces = new ArrayList<>( + Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces())); + interfaces.add(org.springframework.aop.SpringProxy.class); + interfaces.add(org.springframework.aop.framework.Advised.class); + interfaces.add(org.springframework.core.DecoratingProxy.class); + + generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new)); + } else { + + Class proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(), + LazyLoadingInterceptor::none); + + // see: spring-projects/spring-framework/issues/29309 + generationContext.getRuntimeHints().reflection().registerType(proxyClass, MongoAotReflectionHelper::cglibProxyReflectionMemberAccess); + } + }); + } + + private static boolean isLazyLoading(Field field) { + if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy(); + } + if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy(); + } + return false; + } + + private static Set getFieldsWithAnnotationPresent(Class type, Class annotation) { + + Set fields = new LinkedHashSet<>(); + for (Field field : type.getDeclaredFields()) { + if (MergedAnnotations.from(field).get(annotation).isPresent()) { + fields.add(field); + } + } + return fields; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java new file mode 100644 index 0000000000..2fe27a2c9e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.util.function.Predicate; + +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.data.util.TypeUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * Collection of {@link Predicate predicates} to determine dynamic library aspects during AOT computation. Intended for + * internal usage only. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class MongoAotPredicates { + + public static final Predicate> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) + || TypeUtils.type(type).isPartOf("org.bson"); + public static final Predicate IS_REACTIVE_LIBARARY_AVAILABLE = ReactiveWrappers::isAvailable; + public static final Predicate IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.client.MongoClient", classLoader); + public static final Predicate IS_REACTIVE_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", classLoader); + + /** + * @return {@literal true} if the Project Reactor is present. + */ + public static boolean isReactorPresent() { + return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.client.MongoClient} is present. + * @since 4.0 + */ + public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) { + return IS_SYNC_CLIENT_PRESENT.test(classLoader); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.reactivestreams.client.MongoClient} is present. + * @since 4.3 + */ + public static boolean isReactiveClientPresent(@Nullable ClassLoader classLoader) { + return IS_REACTIVE_CLIENT_PRESENT.test(classLoader); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java new file mode 100644 index 0000000000..ff8d04b382 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java @@ -0,0 +1,31 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.TypeHint.Builder; + +/** + * @author Christoph Strobl + */ +public final class MongoAotReflectionHelper { + + public static void cglibProxyReflectionMemberAccess(Builder builder) { + + builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, + MemberCategory.DECLARED_FIELDS); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java new file mode 100644 index 0000000000..a33f20ffb6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.core.ResolvableType; +import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 2022/06 + */ +class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + public MongoManagedTypesBeanRegistrationAotProcessor() { + setModuleIdentifier("mongo"); + } + + @Override + protected boolean isMatch(@Nullable Class beanType, @Nullable String beanName) { + return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName); + } + + protected boolean isMongoManagedTypes(@Nullable Class beanType) { + return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType); + } + + @Override + protected void contributeType(ResolvableType type, GenerationContext generationContext) { + + if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) { + return; + } + + super.contributeType(type, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java new file mode 100644 index 0000000000..538fe4e812 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.*; + +import java.util.Arrays; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * {@link RuntimeHintsRegistrar} for repository types and entity callbacks. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.0 + */ +class MongoRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class), + TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + registerTransactionProxyHints(hints, classLoader); + registerMongoCompatibilityAdapterHints(hints, classLoader); + + if (isReactorPresent()) { + + hints.reflection() + .registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class), + TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class), + TypeReference.of(ReactiveAfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + } + + private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (MongoAotPredicates.isSyncClientPresent(classLoader) + && ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) { + + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + } + } + + @SuppressWarnings("deprecation") + private static void registerMongoCompatibilityAdapterHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection() // + .registerType(MongoClientSettings.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MongoClientSettings.Builder.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(IndexOptions.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(ServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(UnixServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) // + .registerType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.build.MongoDriverVersion"), MemberCategory.PUBLIC_FIELDS); + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + + hints.reflection() // + .registerType(MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReduceIterable.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + + if (MongoAotPredicates.isReactiveClientPresent(classLoader)) { + + hints.reflection() // + .registerType(com.mongodb.reactivestreams.client.MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReducePublisher.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java new file mode 100644 index 0000000000..93033417fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java @@ -0,0 +1,111 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}. + * + * @author Christoph Strobl + * @since 2.1 + * @see MongoConfigurationSupport + */ +@Configuration(proxyBeanMethods = false) +public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport { + + /** + * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a + * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. + * + * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) + */ + public MongoClient mongoClient() { + return createMongoClient(mongoClientSettings()); + } + + /** + * Creates a {@link MongoTemplate}. + * + * @see #mongoDbFactory() + * @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) + */ + @Bean + public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) { + return new MongoTemplate(databaseFactory, converter); + } + + /** + * Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the + * {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}. + * + * @see #mongoClient() + * @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter) + */ + @Bean + public MongoDatabaseFactory mongoDbFactory() { + return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName()); + } + + /** + * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. + * + * @see #customConversions() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) + * @see #mongoDbFactory() + */ + @Bean + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + + DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory); + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); + + return converter; + } + + /** + * Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given + * {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java deleted file mode 100644 index b3915c7530..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.type.filter.AnnotationTypeFilter; -import org.springframework.data.annotation.Persistent; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; -import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; -import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.core.convert.CustomConversions; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.support.CachingIsNewStrategyFactory; -import org.springframework.data.support.IsNewStrategyFactory; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - -/** - * Base class for Spring Data MongoDB configuration using JavaConfig. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Ryan Tenney - * @author Christoph Strobl - */ -@Configuration -public abstract class AbstractMongoConfiguration { - - /** - * Return the name of the database to connect to. - * - * @return must not be {@literal null}. - */ - protected abstract String getDatabaseName(); - - /** - * Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value - * returned by {@link #getDatabaseName()} later on effectively. - * - * @return - * @deprecated since 1.7. {@link MongoClient} should hold authentication data within - * {@link MongoClient#getCredentialsList()} - */ - @Deprecated - protected String getAuthenticationDatabaseName() { - return null; - } - - /** - * Return the {@link Mongo} instance to connect to. Annotate with {@link Bean} in case you want to expose a - * {@link Mongo} instance to the {@link org.springframework.context.ApplicationContext}. - * - * @return - * @throws Exception - */ - public abstract Mongo mongo() throws Exception; - - /** - * Creates a {@link MongoTemplate}. - * - * @return - * @throws Exception - */ - @Bean - public MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongoDbFactory(), mappingMongoConverter()); - } - - /** - * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link Mongo} instance - * configured in {@link #mongo()}. - * - * @see #mongo() - * @see #mongoTemplate() - * @return - * @throws Exception - */ - @Bean - public MongoDbFactory mongoDbFactory() throws Exception { - return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials(), getAuthenticationDatabaseName()); - } - - /** - * Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration - * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending - * {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is - * overriden to implement alternate behaviour. - * - * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for - * entities. - */ - protected String getMappingBasePackage() { - - Package mappingBasePackage = getClass().getPackage(); - return mappingBasePackage == null ? null : mappingBasePackage.getName(); - } - - /** - * Return {@link UserCredentials} to be used when connecting to the MongoDB instance or {@literal null} if none shall - * be used. - * - * @return - * @deprecated since 1.7. {@link MongoClient} should hold authentication data within - * {@link MongoClient#getCredentialsList()} - */ - @Deprecated - protected UserCredentials getUserCredentials() { - return null; - } - - /** - * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. - * - * @see #getMappingBasePackage() - * @return - * @throws ClassNotFoundException - */ - @Bean - public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); - mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); - mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); - - return mappingContext; - } - - /** - * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. - * - * @return - * @throws ClassNotFoundException - */ - @Bean - public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { - return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext())); - } - - /** - * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and - * {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default. - * - * @return must not be {@literal null}. - */ - @Bean - public CustomConversions customConversions() { - return new CustomConversions(Collections.emptyList()); - } - - /** - * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. - * - * @see #customConversions() - * @see #mongoMappingContext() - * @see #mongoDbFactory() - * @return - * @throws Exception - */ - @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory()); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext()); - converter.setCustomConversions(customConversions()); - - return converter; - } - - /** - * Scans the mapping base package for classes annotated with {@link Document}. - * - * @see #getMappingBasePackage() - * @return - * @throws ClassNotFoundException - */ - protected Set> getInitialEntitySet() throws ClassNotFoundException { - - String basePackage = getMappingBasePackage(); - Set> initialEntitySet = new HashSet>(); - - if (StringUtils.hasText(basePackage)) { - ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( - false); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - - for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { - initialEntitySet.add(ClassUtils.forName(candidate.getBeanClassName(), - AbstractMongoConfiguration.class.getClassLoader())); - } - } - - return initialEntitySet; - } - - /** - * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. - * - * @return - */ - protected boolean abbreviateFieldNames() { - return false; - } - - /** - * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. - * - * @return - * @since 1.5 - */ - protected FieldNamingStrategy fieldNamingStrategy() { - return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() - : PropertyNameFieldNamingStrategy.INSTANCE; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java new file mode 100644 index 0000000000..f93c4ae708 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -0,0 +1,114 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Base class for reactive Spring Data MongoDB configuration using JavaConfig. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + * @see MongoConfigurationSupport + */ +@Configuration(proxyBeanMethods = false) +public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport { + + /** + * Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want + * to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. + * + * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) + */ + public MongoClient reactiveMongoClient() { + return createReactiveMongoClient(mongoClientSettings()); + } + + /** + * Creates {@link ReactiveMongoOperations}. + * + * @see #reactiveMongoDbFactory() + * @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) + * @return never {@literal null}. + */ + @Bean + public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory, + MappingMongoConverter mongoConverter) { + return new ReactiveMongoTemplate(databaseFactory, mongoConverter); + } + + /** + * Creates a {@link ReactiveMongoDatabaseFactory} to be used by the {@link ReactiveMongoOperations}. Will use the + * {@link MongoClient} instance configured in {@link #reactiveMongoClient()}. + * + * @see #reactiveMongoClient() + * @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter) + * @return never {@literal null}. + */ + @Bean + public ReactiveMongoDatabaseFactory reactiveMongoDbFactory() { + return new SimpleReactiveMongoDatabaseFactory(reactiveMongoClient(), getDatabaseName()); + } + + /** + * Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. + * + * @see #customConversions() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) + * @see #reactiveMongoDbFactory() + * @return never {@literal null}. + */ + @Bean + public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); + + return converter; + } + + /** + * Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createReactiveMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java index e3da277512..584fbfba30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,17 +17,18 @@ /** * Constants to declare bean names used by the namespace configuration. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Martin Baumgartner + * @author Christoph Strobl */ public abstract class BeanNames { public static final String MAPPING_CONTEXT_BEAN_NAME = "mongoMappingContext"; static final String INDEX_HELPER_BEAN_NAME = "indexCreationHelper"; - static final String MONGO_BEAN_NAME = "mongo"; + static final String MONGO_BEAN_NAME = "mongoClient"; static final String DB_FACTORY_BEAN_NAME = "mongoDbFactory"; static final String VALIDATING_EVENT_LISTENER_BEAN_NAME = "validatingMongoEventListener"; static final String IS_NEW_STRATEGY_FACTORY_BEAN_NAME = "isNewStrategyFactory"; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java new file mode 100644 index 0000000000..b070a0190f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ConnectionString; + +/** + * Parse a {@link String} to a {@link com.mongodb.ConnectionString}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ConnectionStringPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String connectionString) { + + if (!StringUtils.hasText(connectionString)) { + return; + } + + setValue(new ConnectionString(connectionString)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java index fff1b9f3df..d6ce19f3ee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,7 +28,7 @@ /** * Annotation to enable auditing in MongoDB via annotation configuration. - * + * * @author Thomas Darimont * @author Oliver Gierke */ @@ -41,30 +41,30 @@ /** * Configures the {@link AuditorAware} bean to be used to lookup the current principal. - * - * @return + * + * @return empty {@link String} by default. */ String auditorAwareRef() default ""; /** * Configures whether the creation and modification dates are set. Defaults to {@literal true}. - * - * @return + * + * @return {@literal true} by default. */ boolean setDates() default true; /** * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. - * - * @return + * + * @return {@literal true} by default. */ boolean modifyOnCreate() default true; /** - * Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be - * used for setting creation and modification dates. - * - * @return + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. + * + * @return empty {@link String} by default. */ String dateTimeProviderRef() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java new file mode 100644 index 0000000000..21fadf86c6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Import; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.ReactiveAuditorAware; + +/** + * Annotation to enable auditing in MongoDB using reactive infrastructure via annotation configuration. + * + * @author Mark Paluch + * @since 3.1 + */ +@Inherited +@Documented +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Import(ReactiveMongoAuditingRegistrar.class) +public @interface EnableReactiveMongoAuditing { + + /** + * Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal. + * + * @return empty {@link String} by default. + */ + String auditorAwareRef() default ""; + + /** + * Configures whether the creation and modification dates are set. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean setDates() default true; + + /** + * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean modifyOnCreate() default true; + + /** + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. + * + * @return empty {@link String} by default. + */ + String dateTimeProviderRef() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoJsonConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java similarity index 71% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoJsonConfiguration.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java index cb29dca57d..3b10019cc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoJsonConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.core; +package org.springframework.data.mongodb.config; import org.springframework.context.annotation.Bean; import org.springframework.data.mongodb.core.geo.GeoJsonModule; -import org.springframework.data.web.config.SpringDataWebConfigurationMixin; +import org.springframework.data.web.config.SpringDataJacksonModules; /** * Configuration class to expose {@link GeoJsonModule} as a Spring bean. - * + * * @author Oliver Gierke + * @author Jens Schauder */ -@SpringDataWebConfigurationMixin -public class GeoJsonConfiguration { +public class GeoJsonConfiguration implements SpringDataJacksonModules { @Bean public GeoJsonModule geoJsonModule() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java index 83da976e1f..b86da91dad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,15 +29,11 @@ /** * {@link BeanDefinitionParser} to parse {@code gridFsTemplate} elements into {@link BeanDefinition}s. - * + * * @author Martin Baumgartner */ class GridFsTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -46,10 +42,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 3aae756891..164b4defb6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,10 @@ import static org.springframework.data.mongodb.config.BeanNames.*; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; @@ -51,33 +48,38 @@ import org.springframework.core.type.filter.TypeFilter; import org.springframework.data.annotation.Persistent; import org.springframework.data.config.BeanComponentDefinitionBuilder; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; -import org.springframework.data.mongodb.core.convert.CustomConversions; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; /** * Bean definition parser for the {@code mapping-converter} element. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Maciej Walkowiak * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Zied Yaich + * @author Tomasz Forys */ public class MappingMongoConverterParser implements BeanDefinitionParser { private static final String BASE_PACKAGE = "base-package"; - private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator", + private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator", MappingMongoConverterParser.class.getClassLoader()); /* (non-Javadoc) @@ -93,12 +95,12 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE); id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME; + boolean autoIndexCreationEnabled = isAutoIndexCreationEnabled(element); + parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element)); BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext); - String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id); - - createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element); + String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id, autoIndexCreationEnabled); // Need a reference to a Mongo instance String dbFactoryRef = element.getAttribute("db-factory-ref"); @@ -120,27 +122,34 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { converterBuilder.addPropertyValue("customConversions", conversionsDefinition); } - try { - registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); - } catch (NoSuchBeanDefinitionException ignored) { - if (!StringUtils.hasText(dbFactoryRef)) { - dbFactoryRef = DB_FACTORY_BEAN_NAME; - } + if (!registry.containsBeanDefinition("indexOperationsProvider")) { + + BeanDefinitionBuilder indexOperationsProviderBuilder = BeanDefinitionBuilder + .genericBeanDefinition("org.springframework.data.mongodb.core.DefaultIndexOperationsProvider"); + indexOperationsProviderBuilder.addConstructorArgReference(dbFactoryRef); + indexOperationsProviderBuilder.addConstructorArgValue(BeanDefinitionBuilder + .genericBeanDefinition(QueryMapper.class).addConstructorArgReference(id).getBeanDefinition()); + parserContext.registerBeanComponent( + new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); + } + + if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) { + BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); indexHelperBuilder.addConstructorArgReference(ctxRef); - indexHelperBuilder.addConstructorArgReference(dbFactoryRef); + indexHelperBuilder.addConstructorArgReference("indexOperationsProvider"); indexHelperBuilder.addDependsOn(ctxRef); - parserContext.registerBeanComponent(new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(), - INDEX_HELPER_BEAN_NAME)); + parserContext.registerBeanComponent( + new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(), INDEX_HELPER_BEAN_NAME)); } BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext); - if (validatingMongoEventListener != null) { - parserContext.registerBeanComponent(new BeanComponentDefinition(validatingMongoEventListener, - VALIDATING_EVENT_LISTENER_BEAN_NAME)); + if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) { + parserContext.registerBeanComponent( + new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME)); } parserContext.registerBeanComponent(new BeanComponentDefinition(converterBuilder.getBeanDefinition(), id)); @@ -148,18 +157,20 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { return null; } + @Nullable private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) { String disableValidation = element.getAttribute("disable-validation"); - boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation); + boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation); if (!validationDisabled) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); - RuntimeBeanReference validator = getValidator(builder, parserContext); + RuntimeBeanReference validator = getValidator(element, parserContext); if (validator != null) { builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class); + builder.getRawBeanDefinition().setSource(element); builder.addConstructorArgValue(validator); return builder.getBeanDefinition(); @@ -169,6 +180,7 @@ private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element ele return null; } + @Nullable private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) { if (!JSR_303_PRESENT) { @@ -180,13 +192,39 @@ private RuntimeBeanReference getValidator(Object source, ParserContext parserCon validatorDef.setSource(source); validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef); - parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName)); return new RuntimeBeanReference(validatorName); } + private static boolean isAutoIndexCreationEnabled(Element element) { + + String autoIndexCreation = element.getAttribute("auto-index-creation"); + return StringUtils.hasText(autoIndexCreation) && Boolean.parseBoolean(autoIndexCreation); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + * @deprecated since 4.3. Use + * {@link #potentiallyCreateMappingContext(Element, ParserContext, BeanDefinition, String, boolean)} + * instead. + */ + @Deprecated(since = "4.3", forRemoval = true) public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, - BeanDefinition conversionsDefinition, String converterId) { + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) { + return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + */ + public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) { String ctxRef = element.getAttribute("mapping-context-ref"); @@ -200,7 +238,7 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoMappingContext.class); - Set classesToAdd = getInititalEntityClasses(element); + Set classesToAdd = getInitialEntityClasses(element); if (classesToAdd != null) { mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd); @@ -214,6 +252,8 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition); } + mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation); + parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder); ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME @@ -233,7 +273,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont && Boolean.parseBoolean(abbreviateFieldNames); if (fieldNamingStrategyReferenced && abbreviationActivated) { - context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!", + context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured", element); return; } @@ -251,6 +291,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont } } + @Nullable private BeanDefinition getCustomConversions(Element element, ParserContext parserContext) { List customConvertersElements = DomUtils.getChildElementsByTagName(element, "custom-converters"); @@ -258,10 +299,10 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse if (customConvertersElements.size() == 1) { Element customerConvertersElement = customConvertersElements.get(0); - ManagedList converterBeans = new ManagedList(); + ManagedList converterBeans = new ManagedList<>(); List converterElements = DomUtils.getChildElementsByTagName(customerConvertersElement, "converter"); - if (converterElements != null) { + if (!ObjectUtils.isEmpty(converterElements)) { for (Element listenerElement : converterElements) { converterBeans.add(parseConverter(listenerElement, parserContext)); } @@ -274,12 +315,10 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse provider.addExcludeFilter(new NegatingFilter(new AssignableTypeFilter(Converter.class), new AssignableTypeFilter(GenericConverter.class))); - for (BeanDefinition candidate : provider.findCandidateComponents(packageToScan)) { - converterBeans.add(candidate); - } + converterBeans.addAll(provider.findCandidateComponents(packageToScan)); } - BeanDefinitionBuilder conversionsBuilder = BeanDefinitionBuilder.rootBeanDefinition(CustomConversions.class); + BeanDefinitionBuilder conversionsBuilder = BeanDefinitionBuilder.rootBeanDefinition(MongoCustomConversions.class); conversionsBuilder.addConstructorArgValue(converterBeans); AbstractBeanDefinition conversionsBean = conversionsBuilder.getBeanDefinition(); @@ -293,7 +332,8 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse return null; } - private static Set getInititalEntityClasses(Element element) { + @Nullable + private static Set getInitialEntityClasses(Element element) { String basePackage = element.getAttribute(BASE_PACKAGE); @@ -306,7 +346,7 @@ private static Set getInititalEntityClasses(Element element) { componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - Set classes = new ManagedSet(); + Set classes = new ManagedSet<>(); for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { classes.add(candidate.getBeanClassName()); } @@ -314,6 +354,7 @@ private static Set getInititalEntityClasses(Element element) { return classes; } + @Nullable public BeanMetadataElement parseConverter(Element element, ParserContext parserContext) { String converterRef = element.getAttribute("ref"); @@ -327,28 +368,14 @@ public BeanMetadataElement parseConverter(Element element, ParserContext parserC return beanDef; } - parserContext.getReaderContext().error( - "Element must specify 'ref' or contain a bean definition for the converter", element); + parserContext.getReaderContext() + .error("Element must specify 'ref' or contain a bean definition for the converter", element); return null; } - public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context, - Element element) { - - BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder - .rootBeanDefinition(MappingContextIsNewStrategyFactory.class); - mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef); - - BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context); - context.registerBeanComponent(builder.getComponent(mappingContextStrategyFactoryBuilder, - IS_NEW_STRATEGY_FACTORY_BEAN_NAME)); - - return IS_NEW_STRATEGY_FACTORY_BEAN_NAME; - } - /** * {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches. - * + * * @author Oliver Gierke */ private static class NegatingFilter implements TypeFilter { @@ -357,19 +384,18 @@ private static class NegatingFilter implements TypeFilter { /** * Creates a new {@link NegatingFilter} with the given delegates. - * + * * @param filters */ public NegatingFilter(TypeFilter... filters) { - Assert.notNull(filters); - this.delegates = new HashSet(Arrays.asList(filters)); + + Assert.notNull(filters, "TypeFilters must not be null"); + + this.delegates = Set.of(filters); } - /* - * (non-Javadoc) - * @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory) - */ - public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) throws IOException { + public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) + throws IOException { for (TypeFilter delegate : delegates) { if (delegate.match(metadataReader, metadataReaderFactory)) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java index e4d6aedd78..4e05fe6c39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.BeanNames.*; +import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.RootBeanDefinition; @@ -26,40 +27,36 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; + import org.w3c.dom.Element; /** - * {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on - * an entity. - * + * {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information + * on an entity. + * * @author Oliver Gierke + * @author Mark Paluch */ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ + private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono", + MongoAuditingRegistrar.class.getClassLoader()); + @Override protected Class getBeanClass(Element element) { - return AuditingEventListener.class; + return AuditingEntityCallback.class; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId() - */ @Override protected boolean shouldGenerateId() { return true; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { @@ -80,7 +77,24 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit mappingContextRef); parser.parse(element, parserContext); - builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), - parserContext.extractSource(element))); + AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), + parserContext.extractSource(element)); + builder.addConstructorArgValue(isNewAwareAuditingHandler); + + if (PROJECT_REACTOR_AVAILABLE) { + registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler, + parserContext.extractSource(element)); + } + } + + private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, + AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) { + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(isNewAwareAuditingHandler); + builder.getRawBeanDefinition().setSource(source); + + registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java index 5e64972830..37e509a38a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,116 +15,74 @@ */ package org.springframework.data.mongodb.config; -import static org.springframework.beans.factory.config.BeanDefinition.*; -import static org.springframework.data.mongodb.config.BeanNames.*; - import java.lang.annotation.Annotation; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.core.type.AnnotationMetadata; +import org.springframework.core.Ordered; import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; import org.springframework.data.auditing.config.AuditingConfiguration; import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; -import org.springframework.data.support.IsNewStrategyFactory; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; import org.springframework.util.Assert; /** * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation. - * + * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { +class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered { - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoAuditing.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ @Override protected String getAuditingHandlerBeanName() { return "mongoAuditingHandler"; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override - public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) { - - Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { - defaultDependenciesIfNecessary(registry, annotationMetadata); - super.registerBeanDefinitions(annotationMetadata, registry); + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ @Override protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); + Assert.notNull(configuration, "AuditingConfiguration must not be null"); - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class); - builder.addConstructorArgReference(MAPPING_CONTEXT_BEAN_NAME); - return configureDefaultAuditHandlerAttributes(configuration, builder); + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) { - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder - .rootBeanDefinition(AuditingEventListener.class); - listenerBeanDefinitionBuilder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition( - getAuditingHandlerBeanName(), registry)); + .rootBeanDefinition(AuditingEntityCallback.class); + listenerBeanDefinitionBuilder + .addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(), - AuditingEventListener.class.getName(), registry); + AuditingEntityCallback.class.getName(), registry); } - /** - * Register default bean definitions for a {@link MongoMappingContext} and an {@link IsNewStrategyFactory} in case we - * don't find beans with the assumed names in the registry. - * - * @param registry the {@link BeanDefinitionRegistry} to use to register the components into. - * @param source the source which the registered components shall be registered with - */ - private void defaultDependenciesIfNecessary(BeanDefinitionRegistry registry, Object source) { - - if (!registry.containsBeanDefinition(MAPPING_CONTEXT_BEAN_NAME)) { - - RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class); - definition.setRole(ROLE_INFRASTRUCTURE); - definition.setSource(source); - - registry.registerBeanDefinition(MAPPING_CONTEXT_BEAN_NAME, definition); - } + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java index 5bceb62b50..501c00b9d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,16 +29,12 @@ /** * Parser for {@code mongo-client} definitions. - * + * * @author Christoph Strobl * @since 1.7 */ public class MongoClientParser implements BeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ public BeanDefinition parse(Element element, ParserContext parserContext) { Object source = parserContext.extractSource(element); @@ -50,10 +46,11 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { ParsingUtils.setPropertyValue(builder, element, "port", "port"); ParsingUtils.setPropertyValue(builder, element, "host", "host"); - ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials"); + ParsingUtils.setPropertyValue(builder, element, "credential", "credential"); + ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet"); + ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString"); - MongoParsingUtils.parseMongoClientOptions(element, builder); - MongoParsingUtils.parseReplicaSet(element, builder); + MongoParsingUtils.parseMongoClientSettings(element, builder); String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME; @@ -62,22 +59,34 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId); parserContext.registerBeanComponent(mongoComponent); - BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils - .getServerAddressPropertyEditorBuilder()); + BeanComponentDefinition connectionStringPropertyEditor = helper + .getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder()); + parserContext.registerBeanComponent(connectionStringPropertyEditor); + + BeanComponentDefinition serverAddressPropertyEditor = helper + .getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder()); parserContext.registerBeanComponent(serverAddressPropertyEditor); - BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils - .getWriteConcernPropertyEditorBuilder()); + BeanComponentDefinition writeConcernEditor = helper + .getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder()); parserContext.registerBeanComponent(writeConcernEditor); - BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils - .getReadPreferencePropertyEditorBuilder()); + BeanComponentDefinition readConcernEditor = helper + .getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder()); + parserContext.registerBeanComponent(readConcernEditor); + + BeanComponentDefinition readPreferenceEditor = helper + .getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder()); parserContext.registerBeanComponent(readPreferenceEditor); - BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils - .getMongoCredentialPropertyEditor()); + BeanComponentDefinition credentialsEditor = helper + .getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor()); parserContext.registerBeanComponent(credentialsEditor); + BeanComponentDefinition uuidRepresentationEditor = helper + .getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder()); + parserContext.registerBeanComponent(uuidRepresentationEditor); + parserContext.popAndRegisterContainingComponent(); return mongoComponent.getBeanDefinition(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java new file mode 100644 index 0000000000..0594f6176c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -0,0 +1,240 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.bson.UuidRepresentation; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.type.filter.AnnotationTypeFilter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; +import org.springframework.data.mapping.model.FieldNamingStrategy; +import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; + +/** + * Base class for Spring Data MongoDB to be extended for JavaConfiguration usage. + * + * @author Mark Paluch + * @since 2.0 + */ +public abstract class MongoConfigurationSupport { + + /** + * Return the name of the database to connect to. + * + * @return must not be {@literal null}. + */ + protected abstract String getDatabaseName(); + + /** + * Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the + * configuration class' (the concrete class, not this one here) by default. So if you have a + * {@code com.acme.AppConfig} extending {@link MongoConfigurationSupport} the base package will be considered + * {@code com.acme} unless the method is overridden to implement alternate behavior. + * + * @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning + * for entities. + * @since 1.10 + */ + protected Collection getMappingBasePackages() { + + Package mappingBasePackage = getClass().getPackage(); + return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName()); + } + + /** + * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. + * + * @see #getMappingBasePackages() + * @return + */ + @Bean + public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions, + MongoManagedTypes mongoManagedTypes) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setManagedTypes(mongoManagedTypes); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); + mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); + mappingContext.setAutoIndexCreation(autoIndexCreation()); + + return mappingContext; + } + + /** + * @return new instance of {@link MongoManagedTypes}. + * @throws ClassNotFoundException + * @since 4.0 + */ + @Bean + public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException { + return MongoManagedTypes.fromIterable(getInitialEntitySet()); + } + + /** + * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These + * {@link CustomConversions} will be registered with the + * {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link MongoMappingContext}. + * Returns an empty {@link MongoCustomConversions} instance by default. + *

+ * NOTE: Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB + * native simple types and register custom {@link Converter converters}. + * + * @return must not be {@literal null}. + */ + @Bean + public MongoCustomConversions customConversions() { + return MongoCustomConversions.create(this::configureConverters); + } + + /** + * Configuration hook for {@link MongoCustomConversions} creation. + * + * @param converterConfigurationAdapter never {@literal null}. + * @since 2.3 + * @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs() + * @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs() + */ + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + + } + + /** + * Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in + * all packages returned by {@link #getMappingBasePackages()}. + * + * @see #getMappingBasePackages() + * @return + * @throws ClassNotFoundException + */ + protected Set> getInitialEntitySet() throws ClassNotFoundException { + + Set> initialEntitySet = new HashSet>(); + + for (String basePackage : getMappingBasePackages()) { + initialEntitySet.addAll(scanForEntities(basePackage)); + } + + return initialEntitySet; + } + + /** + * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}. + * + * @param basePackage must not be {@literal null}. + * @return + * @throws ClassNotFoundException + * @since 1.10 + */ + protected Set> scanForEntities(String basePackage) throws ClassNotFoundException { + + if (!StringUtils.hasText(basePackage)) { + return Collections.emptySet(); + } + + Set> initialEntitySet = new HashSet>(); + + if (StringUtils.hasText(basePackage)) { + + ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( + false); + componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); + + for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { + + initialEntitySet + .add(ClassUtils.forName(candidate.getBeanClassName(), MongoConfigurationSupport.class.getClassLoader())); + } + } + + return initialEntitySet; + } + + /** + * Configures whether to abbreviate field names for domain objects by configuring a + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. + * + * @return + */ + protected boolean abbreviateFieldNames() { + return false; + } + + /** + * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. + * + * @return + * @since 1.5 + */ + protected FieldNamingStrategy fieldNamingStrategy() { + return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() + : PropertyNameFieldNamingStrategy.INSTANCE; + } + + /** + * Configure whether to automatically create indices for domain types by deriving the + * {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not. + * + * @return {@literal false} by default.
+ * INFO: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}. + * @since 2.2 + */ + protected boolean autoIndexCreation() { + return false; + } + + /** + * Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}.
+ * Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClientSettings mongoClientSettings() { + + MongoClientSettings.Builder builder = MongoClientSettings.builder(); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + configureClientSettings(builder); + return builder.build(); + } + + /** + * Configure {@link MongoClientSettings} via its {@link Builder} API. + * + * @param builder never {@literal null}. + * @since 3.0 + */ + protected void configureClientSettings(MongoClientSettings.Builder builder) { + // customization hook + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java index a008d1a6b0..b8f23a35af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,9 @@ package org.springframework.data.mongodb.config; import java.beans.PropertyEditorSupport; +import java.lang.reflect.Method; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -23,15 +26,19 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.springframework.lang.Nullable; +import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import com.mongodb.MongoCredential; /** * Parse a {@link String} to a Collection of {@link MongoCredential}. - * + * * @author Christoph Strobl * @author Oliver Gierke + * @author Stephen Tyler Conrad + * @author Mark Paluch * @since 1.7 */ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { @@ -39,23 +46,19 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { private static final Pattern GROUP_PATTERN = Pattern.compile("(\\\\?')(.*?)\\1"); private static final String AUTH_MECHANISM_KEY = "uri.authMechanism"; - private static final String USERNAME_PASSWORD_DELIMINATOR = ":"; - private static final String DATABASE_DELIMINATOR = "@"; - private static final String OPTIONS_DELIMINATOR = "?"; - private static final String OPTION_VALUE_DELIMINATOR = "&"; - - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ + private static final String USERNAME_PASSWORD_DELIMITER = ":"; + private static final String DATABASE_DELIMITER = "@"; + private static final String OPTIONS_DELIMITER = "?"; + private static final String OPTION_VALUE_DELIMITER = "&"; + @Override - public void setAsText(String text) throws IllegalArgumentException { + public void setAsText(@Nullable String text) throws IllegalArgumentException { if (!StringUtils.hasText(text)) { return; } - List credentials = new ArrayList(); + List credentials = new ArrayList<>(); for (String credentialString : extractCredentialsString(text)) { @@ -73,12 +76,23 @@ public void setAsText(String text) throws IllegalArgumentException { verifyUserNamePresent(userNameAndPassword); credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0])); - } else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) { + } else if ("MONGODB-CR".equals(authMechanism)) { verifyUsernameAndPasswordPresent(userNameAndPassword); verifyDatabasePresent(database); - credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database, - userNameAndPassword[1].toCharArray())); + + Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class, + "createMongoCRCredential", String.class, String.class, char[].class); + + if (createCRCredentialMethod == null) { + throw new IllegalArgumentException("MONGODB-CR is no longer supported."); + } + + MongoCredential credential = MongoCredential.class + .cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); + credentials.add(credential); + } else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) { verifyUserNamePresent(userNameAndPassword); @@ -95,9 +109,15 @@ public void setAsText(String text) throws IllegalArgumentException { verifyDatabasePresent(database); credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray())); + } else if (MongoCredential.SCRAM_SHA_256_MECHANISM.equals(authMechanism)) { + + verifyUsernameAndPasswordPresent(userNameAndPassword); + verifyDatabasePresent(database); + credentials.add(MongoCredential.createScramSha256Credential(userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); } else { throw new IllegalArgumentException( - String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism)); + String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism)); } } } else { @@ -115,7 +135,7 @@ public void setAsText(String text) throws IllegalArgumentException { private List extractCredentialsString(String source) { Matcher matcher = GROUP_PATTERN.matcher(source); - List list = new ArrayList(); + List list = new ArrayList<>(); while (matcher.find()) { @@ -132,40 +152,51 @@ private List extractCredentialsString(String source) { private static String[] extractUserNameAndPassword(String text) { - int index = text.lastIndexOf(DATABASE_DELIMINATOR); + int index = text.lastIndexOf(DATABASE_DELIMITER); + + index = index != -1 ? index : text.lastIndexOf(OPTIONS_DELIMITER); - index = index != -1 ? index : text.lastIndexOf(OPTIONS_DELIMINATOR); + if (index == -1) { + return new String[] {}; + } - return index == -1 ? new String[] {} : text.substring(0, index).split(USERNAME_PASSWORD_DELIMINATOR); + return Arrays.stream(text.substring(0, index).split(USERNAME_PASSWORD_DELIMITER)) + .map(MongoCredentialPropertyEditor::decodeParameter).toArray(String[]::new); } private static String extractDB(String text) { - int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR); + int dbSeparationIndex = text.lastIndexOf(DATABASE_DELIMITER); - if (dbSeperationIndex == -1) { + if (dbSeparationIndex == -1) { return ""; } - String tmp = text.substring(dbSeperationIndex + 1); - int optionsSeperationIndex = tmp.lastIndexOf(OPTIONS_DELIMINATOR); + String tmp = text.substring(dbSeparationIndex + 1); + int optionsSeparationIndex = tmp.lastIndexOf(OPTIONS_DELIMITER); - return optionsSeperationIndex > -1 ? tmp.substring(0, optionsSeperationIndex) : tmp; + return optionsSeparationIndex > -1 ? tmp.substring(0, optionsSeparationIndex) : tmp; } private static Properties extractOptions(String text) { - int optionsSeperationIndex = text.lastIndexOf(OPTIONS_DELIMINATOR); - int dbSeperationIndex = text.lastIndexOf(OPTIONS_DELIMINATOR); + int optionsSeparationIndex = text.lastIndexOf(OPTIONS_DELIMITER); + int dbSeparationIndex = text.lastIndexOf(DATABASE_DELIMITER); - if (optionsSeperationIndex == -1 || dbSeperationIndex > optionsSeperationIndex) { + if (optionsSeparationIndex == -1 || dbSeparationIndex > optionsSeparationIndex) { return new Properties(); } Properties properties = new Properties(); - for (String option : text.substring(optionsSeperationIndex + 1).split(OPTION_VALUE_DELIMINATOR)) { + for (String option : text.substring(optionsSeparationIndex + 1).split(OPTION_VALUE_DELIMITER)) { + String[] optionArgs = option.split("="); + + if (optionArgs.length == 1) { + throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0])); + } + properties.put(optionArgs[0], optionArgs[1]); } @@ -178,21 +209,25 @@ private static void verifyUsernameAndPasswordPresent(String[] source) { if (source.length != 2) { throw new IllegalArgumentException( - "Credentials need to specify username and password like in 'username:password@database'!"); + "Credentials need to specify username and password like in 'username:password@database'"); } } private static void verifyDatabasePresent(String source) { if (!StringUtils.hasText(source)) { - throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!"); + throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'"); } } private static void verifyUserNamePresent(String[] source) { if (source.length == 0 || !StringUtils.hasText(source[0])) { - throw new IllegalArgumentException("Credentials need to specify username!"); + throw new IllegalArgumentException("Credentials need to specify username"); } } + + private static String decodeParameter(String it) { + return URLDecoder.decode(it, StandardCharsets.UTF_8); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java index 8e1245984f..2e733cc79f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,6 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.MongoParsingUtils.*; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; import org.springframework.beans.factory.BeanDefinitionStoreException; @@ -30,43 +28,29 @@ import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.data.authentication.UserCredentials; import org.springframework.data.config.BeanComponentDefinitionBuilder; -import org.springframework.data.mongodb.core.MongoFactoryBean; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.MongoClientFactoryBean; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.lang.Nullable; import org.springframework.util.StringUtils; import org.w3c.dom.Element; -import com.mongodb.Mongo; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; +import com.mongodb.ConnectionString; /** * {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Viktor Khoroshko + * @author Mark Paluch */ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser { - private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES; - - static { - - Set mongoUriAllowedAdditionalAttributes = new HashSet(); - mongoUriAllowedAdditionalAttributes.add("id"); - mongoUriAllowedAdditionalAttributes.add("write-concern"); + private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Set.of("id", "write-concern"); - MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -75,18 +59,15 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { // Common setup - BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class); + BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder + .genericBeanDefinition(SimpleMongoClientDatabaseFactory.class); setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern"); - BeanDefinition mongoUri = getMongoUri(element, parserContext); + BeanDefinition mongoUri = getConnectionString(element, parserContext); if (mongoUri != null) { @@ -96,10 +77,9 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext); - String mongoRef = element.getAttribute("mongo-ref"); - String dbname = element.getAttribute("dbname"); + String mongoRef = element.getAttribute("mongo-client-ref"); - BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext); + String dbname = element.getAttribute("dbname"); // Defaulting if (StringUtils.hasText(mongoRef)) { @@ -109,8 +89,6 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa } dbFactoryBuilder.addConstructorArgValue(StringUtils.hasText(dbname) ? dbname : "db"); - dbFactoryBuilder.addConstructorArgValue(userCredentials); - dbFactoryBuilder.addConstructorArgValue(element.getAttribute("authentication-dbname")); BeanDefinitionBuilder writeConcernPropertyEditorBuilder = getWriteConcernPropertyEditorBuilder(); @@ -122,16 +100,16 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa } /** - * Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the - * {@link Mongo} instance was registered under. - * + * Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the + * name under which the {@link com.mongodb.client.MongoClient} instance was registered under. + * * @param element must not be {@literal null}. * @param parserContext must not be {@literal null}. * @return */ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContext parserContext) { - BeanDefinitionBuilder mongoBuilder = BeanDefinitionBuilder.genericBeanDefinition(MongoFactoryBean.class); + BeanDefinitionBuilder mongoBuilder = BeanDefinitionBuilder.genericBeanDefinition(MongoClientFactoryBean.class); setPropertyValue(mongoBuilder, element, "host"); setPropertyValue(mongoBuilder, element, "port"); @@ -139,42 +117,28 @@ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContex } /** - * Returns a {@link BeanDefinition} for a {@link UserCredentials} object. - * - * @param element - * @return the {@link BeanDefinition} or {@literal null} if neither username nor password given. - */ - private BeanDefinition getUserCredentialsBeanDefinition(Element element, ParserContext context) { - - String username = element.getAttribute("username"); - String password = element.getAttribute("password"); - - if (!StringUtils.hasText(username) && !StringUtils.hasText(password)) { - return null; - } - - BeanDefinitionBuilder userCredentialsBuilder = BeanDefinitionBuilder.genericBeanDefinition(UserCredentials.class); - userCredentialsBuilder.addConstructorArgValue(StringUtils.hasText(username) ? username : null); - userCredentialsBuilder.addConstructorArgValue(StringUtils.hasText(password) ? password : null); - - return getSourceBeanDefinition(userCredentialsBuilder, context, element); - } - - /** - * Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured - * attributes.
+ * Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes.
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except * {@literal write-concern} and/or {@literal id}. - * + * * @param element must not be {@literal null}. * @param parserContext * @return {@literal null} in case no client-/uri defined. */ - private BeanDefinition getMongoUri(Element element, ParserContext parserContext) { + @Nullable + private BeanDefinition getConnectionString(Element element, ParserContext parserContext) { - boolean hasClientUri = element.hasAttribute("client-uri"); + String type = null; - if (!hasClientUri && !element.hasAttribute("uri")) { + if (element.hasAttribute("client-uri")) { + type = "client-uri"; + } else if (element.hasAttribute("connection-string")) { + type = "connection-string"; + } else if (element.hasAttribute("uri")) { + type = "uri"; + } + + if (!StringUtils.hasText(type)) { return null; } @@ -188,16 +152,12 @@ private BeanDefinition getMongoUri(Element element, ParserContext parserContext) if (element.getAttributes().getLength() > allowedAttributesCount) { - parserContext.getReaderContext().error( - "Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!", + parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually", parserContext.extractSource(element)); } - Class type = hasClientUri ? MongoClientURI.class : MongoURI.class; - String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type); - builder.addConstructorArgValue(uri); + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class); + builder.addConstructorArgValue(element.getAttribute(type)); return builder.getBeanDefinition(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java index f32a16d542..af1ffbbb02 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java @@ -1,69 +1,78 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.parsing.BeanComponentDefinition; -import org.springframework.beans.factory.parsing.CompositeComponentDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.data.mongodb.core.MongoAdmin; -import org.springframework.data.mongodb.monitor.*; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -public class MongoJmxParser implements BeanDefinitionParser { - - public BeanDefinition parse(Element element, ParserContext parserContext) { - String name = element.getAttribute("mongo-ref"); - if (!StringUtils.hasText(name)) { - name = "mongo"; - } - registerJmxComponents(name, element, parserContext); - return null; - } - - protected void registerJmxComponents(String mongoRefName, Element element, ParserContext parserContext) { - Object eleSource = parserContext.extractSource(element); - - CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(), eleSource); - - createBeanDefEntry(AssertMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(BackgroundFlushingMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(BtreeIndexCounters.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(ConnectionMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(GlobalLockMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(MemoryMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(OperationCounters.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(ServerInfo.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(MongoAdmin.class, compositeDef, mongoRefName, eleSource, parserContext); - - parserContext.registerComponent(compositeDef); - - } - - protected void createBeanDefEntry(Class clazz, CompositeComponentDefinition compositeDef, String mongoRefName, - Object eleSource, ParserContext parserContext) { - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(clazz); - builder.getRawBeanDefinition().setSource(eleSource); - builder.addConstructorArgReference(mongoRefName); - BeanDefinition assertDef = builder.getBeanDefinition(); - String assertName = parserContext.getReaderContext().registerWithGeneratedName(assertDef); - compositeDef.addNestedComponent(new BeanComponentDefinition(assertDef, assertName)); - } - -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.parsing.BeanComponentDefinition; +import org.springframework.beans.factory.parsing.CompositeComponentDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.xml.BeanDefinitionParser; +import org.springframework.beans.factory.xml.ParserContext; +import org.springframework.data.mongodb.core.MongoAdmin; +import org.springframework.data.mongodb.monitor.*; +import org.springframework.util.StringUtils; +import org.w3c.dom.Element; + +/** + * @author Mark Pollack + * @author Thomas Risberg + * @author John Brisbin + * @author Oliver Gierke + * @author Christoph Strobl + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +public class MongoJmxParser implements BeanDefinitionParser { + + public BeanDefinition parse(Element element, ParserContext parserContext) { + String name = element.getAttribute("mongo-ref"); + if (!StringUtils.hasText(name)) { + name = BeanNames.MONGO_BEAN_NAME; + } + registerJmxComponents(name, element, parserContext); + return null; + } + + protected void registerJmxComponents(String mongoRefName, Element element, ParserContext parserContext) { + Object eleSource = parserContext.extractSource(element); + + CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(), eleSource); + + createBeanDefEntry(AssertMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(BackgroundFlushingMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(BtreeIndexCounters.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(ConnectionMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(GlobalLockMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(MemoryMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(OperationCounters.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(ServerInfo.class, compositeDef, mongoRefName, eleSource, parserContext); + createBeanDefEntry(MongoAdmin.class, compositeDef, mongoRefName, eleSource, parserContext); + + parserContext.registerComponent(compositeDef); + + } + + protected void createBeanDefEntry(Class clazz, CompositeComponentDefinition compositeDef, String mongoRefName, + Object eleSource, ParserContext parserContext) { + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(clazz); + builder.getRawBeanDefinition().setSource(eleSource); + builder.addConstructorArgReference(mongoRefName); + BeanDefinition assertDef = builder.getBeanDefinition(); + String assertName = parserContext.getReaderContext().registerWithGeneratedName(assertDef); + compositeDef.addNestedComponent(new BeanComponentDefinition(assertDef, assertName)); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java index 377eedb446..47519ca615 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,21 +19,16 @@ /** * {@link org.springframework.beans.factory.xml.NamespaceHandler} for Mongo DB configuration. - * + * * @author Oliver Gierke * @author Martin Baumgartner * @author Christoph Strobl */ public class MongoNamespaceHandler extends NamespaceHandlerSupport { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.NamespaceHandler#init() - */ public void init() { registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser()); - registerBeanDefinitionParser("mongo", new MongoParser()); registerBeanDefinitionParser("mongo-client", new MongoClientParser()); registerBeanDefinitionParser("db-factory", new MongoDbFactoryParser()); registerBeanDefinitionParser("jmx", new MongoJmxParser()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParser.java deleted file mode 100644 index 604214f261..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParser.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.parsing.BeanComponentDefinition; -import org.springframework.beans.factory.parsing.CompositeComponentDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.data.config.BeanComponentDefinitionBuilder; -import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mongodb.core.MongoFactoryBean; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * Parser for <mongo;gt; definitions. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Christoph Strobl - */ -public class MongoParser implements BeanDefinitionParser { - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ - public BeanDefinition parse(Element element, ParserContext parserContext) { - - Object source = parserContext.extractSource(element); - String id = element.getAttribute("id"); - - BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(MongoFactoryBean.class); - ParsingUtils.setPropertyValue(builder, element, "port", "port"); - ParsingUtils.setPropertyValue(builder, element, "host", "host"); - ParsingUtils.setPropertyValue(builder, element, "write-concern", "writeConcern"); - - MongoParsingUtils.parseMongoOptions(element, builder); - MongoParsingUtils.parseReplicaSet(element, builder); - - String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME; - - parserContext.pushContainingComponent(new CompositeComponentDefinition("Mongo", source)); - - BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId); - parserContext.registerBeanComponent(mongoComponent); - BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils - .getServerAddressPropertyEditorBuilder()); - parserContext.registerBeanComponent(serverAddressPropertyEditor); - BeanComponentDefinition writeConcernPropertyEditor = helper.getComponent(MongoParsingUtils - .getWriteConcernPropertyEditorBuilder()); - parserContext.registerBeanComponent(writeConcernPropertyEditor); - - parserContext.popAndRegisterContainingComponent(); - - return mongoComponent.getBeanDefinition(); - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index 29c92097a7..95b56b58f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -1,206 +1,261 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import static org.springframework.data.config.ParsingUtils.*; - -import java.util.Map; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.CustomEditorConfigurer; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.ManagedMap; -import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean; -import org.springframework.data.mongodb.core.MongoOptionsFactoryBean; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Utility methods for {@link BeanDefinitionParser} implementations for MongoDB. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -@SuppressWarnings("deprecation") -abstract class MongoParsingUtils { - - private MongoParsingUtils() {} - - /** - * Parses the mongo replica-set element. - * - * @param parserContext the parser context - * @param element the mongo element - * @param mongoBuilder the bean definition builder to populate - * @return - */ - static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) { - setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds"); - } - - /** - * Parses the {@code mongo:options} sub-element. Populates the given attribute factory with the proper attributes. - * - * @return true if parsing actually occured, {@literal false} otherwise - */ - static boolean parseMongoOptions(Element element, BeanDefinitionBuilder mongoBuilder) { - - Element optionsElement = DomUtils.getChildElementByTagName(element, "options"); - - if (optionsElement == null) { - return false; - } - - BeanDefinitionBuilder optionsDefBuilder = BeanDefinitionBuilder - .genericBeanDefinition(MongoOptionsFactoryBean.class); - - setPropertyValue(optionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost"); - setPropertyValue(optionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier", - "threadsAllowedToBlockForConnectionMultiplier"); - setPropertyValue(optionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime"); - setPropertyValue(optionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout"); - setPropertyValue(optionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout"); - setPropertyValue(optionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive"); - setPropertyValue(optionsDefBuilder, optionsElement, "auto-connect-retry", "autoConnectRetry"); - setPropertyValue(optionsDefBuilder, optionsElement, "max-auto-connect-retry-time", "maxAutoConnectRetryTime"); - setPropertyValue(optionsDefBuilder, optionsElement, "write-number", "writeNumber"); - setPropertyValue(optionsDefBuilder, optionsElement, "write-timeout", "writeTimeout"); - setPropertyValue(optionsDefBuilder, optionsElement, "write-fsync", "writeFsync"); - setPropertyValue(optionsDefBuilder, optionsElement, "slave-ok", "slaveOk"); - setPropertyValue(optionsDefBuilder, optionsElement, "ssl", "ssl"); - setPropertyReference(optionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory"); - - mongoBuilder.addPropertyValue("mongoOptions", optionsDefBuilder.getBeanDefinition()); - return true; - } - - /** - * Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper - * attributes. - * - * @param element must not be {@literal null}. - * @param mongoClientBuilder must not be {@literal null}. - * @return - * @since 1.7 - */ - public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) { - - Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options"); - - if (optionsElement == null) { - return false; - } - - BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder - .genericBeanDefinition(MongoClientOptionsFactoryBean.class); - - setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier", - "threadsAllowedToBlockForConnectionMultiplier"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl"); - setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory"); - - mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition()); - - return true; - } - - /** - * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a - * {@link WriteConcernPropertyEditor}. - * - * @return - */ - static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { - - Map> customEditors = new ManagedMap>(); - customEditors.put("com.mongodb.WriteConcern", WriteConcernPropertyEditor.class); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); - builder.addPropertyValue("customEditors", customEditors); - - return builder; - } - - /** - * One should only register one bean definition but want to have the convenience of using - * AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the - * container. - */ - static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { - - Map customEditors = new ManagedMap(); - customEditors.put("com.mongodb.ServerAddress[]", - "org.springframework.data.mongodb.config.ServerAddressPropertyEditor"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); - builder.addPropertyValue("customEditors", customEditors); - return builder; - } - - /** - * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a - * {@link ReadPreferencePropertyEditor}. - * - * @return - * @since 1.7 - */ - static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() { - - Map> customEditors = new ManagedMap>(); - customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); - builder.addPropertyValue("customEditors", customEditors); - - return builder; - } - - /** - * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a - * {@link MongoCredentialPropertyEditor}. - * - * @return - * @since 1.7 - */ - static BeanDefinitionBuilder getMongoCredentialPropertyEditor() { - - Map> customEditors = new ManagedMap>(); - customEditors.put("com.mongodb.MongoCredential[]", MongoCredentialPropertyEditor.class); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); - builder.addPropertyValue("customEditors", customEditors); - - return builder; - } -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.springframework.data.config.ParsingUtils.*; + +import java.util.Map; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.config.CustomEditorConfigurer; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; +import org.springframework.beans.factory.support.ManagedMap; +import org.springframework.beans.factory.xml.BeanDefinitionParser; +import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; +import org.springframework.util.xml.DomUtils; +import org.w3c.dom.Element; + +/** + * Utility methods for {@link BeanDefinitionParser} implementations for MongoDB. + * + * @author Mark Pollack + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch + */ +abstract class MongoParsingUtils { + + private MongoParsingUtils() {} + + /** + * Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper + * attributes. + * + * @param element + * @param mongoClientBuilder + * @return + * @since 3.0 + */ + public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) { + + Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings"); + if (settingsElement == null) { + return false; + } + + BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder + .genericBeanDefinition(MongoClientSettingsFactoryBean.class); + + setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation"); + + // SocketSettings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize"); + + // Server Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency", + "serverHeartbeatFrequencyMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency", + "serverMinHeartbeatFrequencyMS"); + + // Cluster Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout", + "clusterServerSelectionTimeoutMS"); + + // Connection Pool Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time", + "poolMaxConnectionLifeTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time", + "poolMaxConnectionIdleTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay", + "poolMaintenanceInitialDelayMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency", + "poolMaintenanceFrequencyMS"); + + // SSL Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed", + "sslInvalidHostNameAllowed"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider"); + + // Field level encryption + setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + + // and the rest + + mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); + + return true; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link WriteConcernPropertyEditor}. + * + * @return + */ + static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap>(); + customEditors.put("com.mongodb.WriteConcern", WriteConcernPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ReadConcernPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * One should only register one bean definition but want to have the convenience of using + * AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the + * container. + */ + static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { + + Map customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ServerAddress[]", + "org.springframework.data.mongodb.config.ServerAddressPropertyEditor"); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ReadPreferencePropertyEditor}. + * + * @return + * @since 1.7 + */ + static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link MongoCredentialPropertyEditor}. + * + * @return + * @since 1.7 + */ + static BeanDefinitionBuilder getMongoCredentialPropertyEditor() { + + Map> customEditors = new ManagedMap>(); + customEditors.put("com.mongodb.MongoCredential[]", MongoCredentialPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java index cfc5fdabca..1e1b11356f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,16 +33,12 @@ /** * {@link BeanDefinitionParser} to parse {@code template} elements into {@link BeanDefinition}s. - * + * * @author Martin Baumgartner * @author Oliver Gierke */ class MongoTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -51,10 +47,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java new file mode 100644 index 0000000000..e46701a7f3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -0,0 +1,53 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; + +/** + * Simple helper to be able to wire the {@link PersistentEntities} from a {@link MappingMongoConverter} bean available + * in the application context. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +public class PersistentEntitiesFactoryBean implements FactoryBean { + + private final MappingMongoConverter converter; + + /** + * Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link MappingMongoConverter}. + * + * @param converter must not be {@literal null}. + */ + public PersistentEntitiesFactoryBean(MappingMongoConverter converter) { + this.converter = converter; + } + + @Override + public PersistentEntities getObject() { + return PersistentEntities.of(converter.getMappingContext()); + } + + @Override + public Class getObjectType() { + return PersistentEntities.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java new file mode 100644 index 0000000000..80cf404434 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Annotation; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; +import org.springframework.data.auditing.config.AuditingConfiguration; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.util.Assert; + +/** + * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { + + @Override + protected Class getAnnotation() { + return EnableReactiveMongoAuditing.class; + } + + @Override + protected String getAuditingHandlerBeanName() { + return "reactiveMongoAuditingHandler"; + } + + @Override + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); + } + + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { + + Assert.notNull(configuration, "AuditingConfiguration must not be null"); + + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class)); + } + + @Override + protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, + BeanDefinitionRegistry registry) { + + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); + builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource()); + + registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(), + registry); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java new file mode 100644 index 0000000000..60bf126ae7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; + +/** + * Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the + * {@link ReadConcernLevel#fromString(String)}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ReadConcernPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String readConcernString) { + + if (!StringUtils.hasText(readConcernString)) { + return; + } + + setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java index e5509e0cec..5ed9b66619 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,20 @@ import java.beans.PropertyEditorSupport; +import org.springframework.lang.Nullable; + import com.mongodb.ReadPreference; /** * Parse a {@link String} to a {@link ReadPreference}. - * + * * @author Christoph Strobl * @since 1.7 */ public class ReadPreferencePropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override - public void setAsText(String readPreferenceString) throws IllegalArgumentException { + public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException { if (readPreferenceString == null) { return; @@ -59,8 +57,8 @@ public void setAsText(String readPreferenceString) throws IllegalArgumentExcepti } else if ("NEAREST".equalsIgnoreCase(readPreferenceString)) { setValue(ReadPreference.nearest()); } else { - throw new IllegalArgumentException(String.format("Cannot find matching ReadPreference for %s", - readPreferenceString)); + throw new IllegalArgumentException( + String.format("Cannot find matching ReadPreference for %s", readPreferenceString)); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java index 32383ba49a..9c51900902 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,8 +21,9 @@ import java.util.HashSet; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -30,10 +31,11 @@ /** * Parse a {@link String} to a {@link ServerAddress} array. The format is host1:port1,host2:port2,host3:port3. - * + * * @author Mark Pollack * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl */ public class ServerAddressPropertyEditor extends PropertyEditorSupport { @@ -41,15 +43,11 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport { * A port is a number without a leading 0 at the end of the address that is proceeded by just a single :. */ private static final String HOST_PORT_SPLIT_PATTERN = "(? 2) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source)); + } return null; } @@ -102,9 +105,13 @@ private ServerAddress parseServerAddress(String source) { return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port); } catch (UnknownHostException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0])); + } } catch (NumberFormatException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1])); + } } return null; @@ -112,13 +119,13 @@ private ServerAddress parseServerAddress(String source) { /** * Extract the host and port from the given {@link String}. - * + * * @param addressAndPortSource must not be {@literal null}. * @return */ private String[] extractHostAddressAndPort(String addressAndPortSource) { - Assert.notNull(addressAndPortSource, "Address and port source must not be null!"); + Assert.notNull(addressAndPortSource, "Address and port source must not be null"); String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN); String hostAddress = hostAndPort[0]; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java index b0fba90e2e..9f579b8fe9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,15 +21,11 @@ /** * Converter to create {@link WriteConcern} instances from String representations. - * + * * @author Oliver Gierke */ public class StringToWriteConcernConverter implements Converter { - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ public WriteConcern convert(String source) { WriteConcern writeConcern = WriteConcern.valueOf(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java new file mode 100644 index 0000000000..b777969967 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.bson.UuidRepresentation; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Parse a {@link String} to a {@link UuidRepresentation}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class UUidRepresentationPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String value) { + + if (!StringUtils.hasText(value)) { + return; + } + + setValue(UuidRepresentation.valueOf(value)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java index a98d5f6cbe..ee0d09e555 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,9 @@ import java.beans.PropertyEditorSupport; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + import com.mongodb.WriteConcern; /** @@ -24,16 +27,21 @@ * {@link WriteConcern#valueOf(String)}, use the well known {@link WriteConcern} value, otherwise pass the string as is * to the constructor of the write concern. There is no support for other constructor signatures when parsing from a * string value. - * + * * @author Mark Pollack + * @author Christoph Strobl */ public class WriteConcernPropertyEditor extends PropertyEditorSupport { /** - * Parse a string to a List + * Parse a string to a {@link WriteConcern}. */ @Override - public void setAsText(String writeConcernString) { + public void setAsText(@Nullable String writeConcernString) { + + if (!StringUtils.hasText(writeConcernString)) { + return; + } WriteConcern writeConcern = WriteConcern.valueOf(writeConcernString); if (writeConcern != null) { @@ -43,6 +51,5 @@ public void setAsText(String writeConcernString) { // pass on the string to the constructor setValue(new WriteConcern(writeConcernString)); } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java index f098200afe..5a1e5b725e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java @@ -1,5 +1,6 @@ /** * Spring XML namespace configuration for MongoDB specific repositories. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.config; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java new file mode 100644 index 0000000000..a00d95a9ad --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; + +/** + * Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and + * create type-bound {@link AggregationOperationContext}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class AggregationUtil { + + final QueryMapper queryMapper; + final MappingContext, MongoPersistentProperty> mappingContext; + final Lazy untypedMappingContext; + + AggregationUtil(QueryMapper queryMapper, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.queryMapper = queryMapper; + this.mappingContext = mappingContext; + this.untypedMappingContext = Lazy.of(() -> new TypeBasedAggregationOperationContext(Object.class, mappingContext, + queryMapper, FieldLookupPolicy.relaxed())); + } + + AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class inputType) { + + DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping(); + + if (domainTypeMapping == DomainTypeMapping.NONE) { + return Aggregation.DEFAULT_CONTEXT; + } + + FieldLookupPolicy lookupPolicy = domainTypeMapping == DomainTypeMapping.STRICT + && !aggregation.getPipeline().containsUnionWith() ? FieldLookupPolicy.strict() : FieldLookupPolicy.relaxed(); + + if (aggregation instanceof TypedAggregation ta) { + return new TypeBasedAggregationOperationContext(ta.getInputType(), mappingContext, queryMapper, lookupPolicy); + } + + if (inputType == null) { + return untypedMappingContext.get(); + } + + return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper, lookupPolicy); + } + + /** + * Extract and map the aggregation pipeline into a {@link List} of {@link Document}. + * + * @param aggregation + * @param context + * @return + */ + List createPipeline(Aggregation aggregation, AggregationOperationContext context) { + return aggregation.toPipeline(context); + } + + /** + * Extract the command and map the aggregation pipeline. + * + * @param aggregation + * @param context + * @return + */ + Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) { + return aggregation.toDocument(collection, context); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java index d4762e738d..4820c2355c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,20 +17,36 @@ import java.util.List; +import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; -import com.mongodb.BulkWriteResult; +import com.mongodb.bulk.BulkWriteResult; /** - * Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB - * 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add - * multiple single operations or list of similar operations in sequence which can then eventually be executed by calling + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling * {@link #execute()}. - * + * + *

+ * MongoOperations ops = …;
+ *
+ * ops.bulkOps(BulkMode.UNORDERED, Person.class)
+ * 				.insert(newPerson)
+ * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
+ * 				.execute();
+ * 
+ *

+ * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * * @author Tobias Trelle * @author Oliver Gierke + * @author Minsu Kim * @since 1.9 */ public interface BulkOperations { @@ -38,18 +54,18 @@ public interface BulkOperations { /** * Mode for bulk operation. **/ - public enum BulkMode { + enum BulkMode { /** Perform bulk operations in sequence. The first error will cancel processing. */ ORDERED, /** Perform bulk operations in parallel. Processing will continue on errors. */ UNORDERED - }; + } /** * Add a single insert to the bulk operation. - * + * * @param documents the document to insert, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}. */ @@ -57,7 +73,7 @@ public enum BulkMode { /** * Add a list of inserts to the bulk operation. - * + * * @param documents List of documents to insert, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}. */ @@ -65,63 +81,99 @@ public enum BulkMode { /** * Add a single update to the bulk operation. For the update request, only the first matching document is updated. - * - * @param query update criteria, must not be {@literal null}. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. * @param update {@link Update} operation to perform, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateOne(Query query, Update update); + default BulkOperations updateOne(Query query, Update update) { + return updateOne(query, (UpdateDefinition) update); + } + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link Update} operation to perform, must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 + */ + BulkOperations updateOne(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, only the first matching document is updated. - * + * * @param updates Update operations to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateOne(List> updates); + BulkOperations updateOne(List> updates); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateMulti(Query query, Update update) { + return updateMulti(query, (UpdateDefinition) update); + } /** * Add a single update to the bulk operation. For the update request, all matching documents are updated. - * + * * @param query Update criteria. * @param update Update operation to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateMulti(Query query, Update update); + BulkOperations updateMulti(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, all matching documents are updated. - * + * * @param updates Update operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateMulti(List> updates); + BulkOperations updateMulti(List> updates); /** * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, * else an insert. - * + * * @param query Update criteria. * @param update Update operation to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations upsert(Query query, Update update); + default BulkOperations upsert(Query query, Update update) { + return upsert(query, (UpdateDefinition) update); + } + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 + */ + BulkOperations upsert(Query query, UpdateDefinition update); /** * Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty, * else an insert. - * + * * @param updates Updates/insert operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ BulkOperations upsert(List> updates); /** * Add a single remove operation to the bulk operation. - * + * * @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}. */ @@ -129,17 +181,42 @@ public enum BulkMode { /** * Add a list of remove operations to the bulk operation. - * + * * @param removes the remove operations to perform, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}. */ BulkOperations remove(List removes); + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + default BulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + /** * Execute all bulk operations using the default write concern. - * + * * @return Result of the bulk operation providing counters for inserts/updates etc. - * @throws {@link BulkOperationException} if an error occurred during bulk processing. + * @throws org.springframework.data.mongodb.BulkOperationException if an error occurred during bulk processing. */ BulkWriteResult execute(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java new file mode 100644 index 0000000000..1f5509cd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.context.ApplicationEvent; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.util.Assert; + +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOneModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; + +/** + * Support class for bulk operations. + * + * @author Mark Paluch + * @since 4.1 + */ +abstract class BulkOperationsSupport { + + private final String collectionName; + + BulkOperationsSupport(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + + this.collectionName = collectionName; + } + + /** + * Emit a {@link BeforeSaveEvent}. + * + * @param holder + */ + void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } + } + + /** + * Emit a {@link AfterSaveEvent}. + * + * @param holder + */ + void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } + } + + WriteModel mapWriteModel(Object source, WriteModel writeModel) { + + if (writeModel instanceof UpdateOneModel model) { + + Bson sort = model.getOptions().getSort(); + if (sort instanceof Document sortDocument) { + model.getOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof UpdateManyModel model) { + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof DeleteOneModel model) { + return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof DeleteManyModel model) { + return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof ReplaceOneModel model) { + + Bson sort = model.getReplaceOptions().getSort(); + + if (sort instanceof Document sortDocument) { + model.getReplaceOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + return new ReplaceOneModel<>(getMappedQuery(model.getFilter()), model.getReplacement(), + model.getReplaceOptions()); + } + + return writeModel; + } + + private List mapUpdatePipeline(AggregationUpdate source) { + + Class type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class; + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, + updateMapper().getMappingContext(), queryMapper()); + + return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context); + } + + /** + * Emit a {@link ApplicationEvent} if event multicasting is enabled. + * + * @param event + */ + protected abstract void maybeEmitEvent(ApplicationEvent event); + + /** + * @return the {@link UpdateMapper} to use. + */ + protected abstract UpdateMapper updateMapper(); + + /** + * @return the {@link QueryMapper} to use. + */ + protected abstract QueryMapper queryMapper(); + + /** + * @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}. + */ + protected abstract Optional> entity(); + + protected Bson getMappedUpdate(Bson update) { + return updateMapper().getMappedObject(update, entity()); + } + + protected Bson getMappedQuery(Bson query) { + return queryMapper().getMappedObject(query, entity()); + } + + protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { + + BulkWriteOptions options = new BulkWriteOptions(); + + return switch (bulkMode) { + case ORDERED -> options.ordered(true); + case UNORDERED -> options.ordered(false); + }; + } + + /** + * @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}. + * @param update The {@link Update} to apply + * @param upsert flag to indicate if document should be upserted. + * @param multi flag to indicate if update might affect multiple documents. + * @return new instance of {@link UpdateOptions}. + */ + protected UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert, + boolean multi) { + + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update.hasArrayFilters()) { + List list = new ArrayList<>(update.getArrayFilters().size()); + for (ArrayFilter arrayFilter : update.getArrayFilters()) { + list.add(arrayFilter.asDocument()); + } + options.arrayFilters(list); + } + + if (!multi && filterQuery.isSorted()) { + options.sort(filterQuery.getSortObject()); + } + + filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + return options; + } + + /** + * Value object chaining together an actual source with its {@link WriteModel} representation. + * + * @author Christoph Strobl + */ + record SourceAwareWriteModelHolder(Object source, WriteModel model) { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java new file mode 100644 index 0000000000..17b8835b7e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java @@ -0,0 +1,243 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.messaging.Message; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.OperationType; + +/** + * {@link Message} implementation specific to MongoDB Change + * Streams. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + * @since 2.1 + */ +public class ChangeStreamEvent { + + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument"); + + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange"); + + private final @Nullable ChangeStreamDocument raw; + + private final Class targetType; + private final MongoConverter converter; + + // accessed through CONVERTED_FULL_DOCUMENT_UPDATER. + private volatile @Nullable T convertedFullDocument; + + // accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER. + private volatile @Nullable T convertedFullDocumentBeforeChange; + + /** + * @param raw can be {@literal null}. + * @param targetType must not be {@literal null}. + * @param converter must not be {@literal null}. + */ + public ChangeStreamEvent(@Nullable ChangeStreamDocument raw, Class targetType, + MongoConverter converter) { + + this.raw = raw; + this.targetType = targetType; + this.converter = converter; + } + + /** + * Get the raw {@link ChangeStreamDocument} as emitted by the driver. + * + * @return can be {@literal null}. + */ + @Nullable + public ChangeStreamDocument getRaw() { + return raw; + } + + /** + * Get the {@link ChangeStreamDocument#getClusterTime() cluster time} as {@link Instant} the event was emitted at. + * + * @return can be {@literal null}. + */ + @Nullable + public Instant getTimestamp() { + + return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) + : null; + } + + /** + * Get the {@link ChangeStreamDocument#getClusterTime() cluster time}. + * + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + public BsonTimestamp getBsonTimestamp() { + return raw != null ? raw.getClusterTime() : null; + } + + /** + * Get the {@link ChangeStreamDocument#getResumeToken() resume token} for this event. + * + * @return can be {@literal null}. + */ + @Nullable + public BsonValue getResumeToken() { + return raw != null ? raw.getResumeToken() : null; + } + + /** + * Get the {@link ChangeStreamDocument#getOperationType() operation type} for this event. + * + * @return can be {@literal null}. + */ + @Nullable + public OperationType getOperationType() { + return raw != null ? raw.getOperationType() : null; + } + + /** + * Get the database name the event was originated at. + * + * @return can be {@literal null}. + */ + @Nullable + public String getDatabaseName() { + return raw != null ? raw.getNamespace().getDatabaseName() : null; + } + + /** + * Get the collection name the event was originated at. + * + * @return can be {@literal null}. + */ + @Nullable + public String getCollectionName() { + return raw != null ? raw.getNamespace().getCollectionName() : null; + } + + /** + * Get the potentially converted {@link ChangeStreamDocument#getFullDocument()}. + * + * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocument()} is + * {@literal null}. + */ + @Nullable + public T getBody() { + + if (raw == null || raw.getFullDocument() == null) { + return null; + } + + return getConvertedFullDocument(raw.getFullDocument()); + } + + /** + * Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed. + * + * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is + * {@literal null}. + * @since 4.0 + */ + @Nullable + public T getBodyBeforeChange() { + + if (raw == null || raw.getFullDocumentBeforeChange() == null) { + return null; + } + + return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange()); + } + + @SuppressWarnings("unchecked") + private T getConvertedFullDocumentBeforeChange(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER); + } + + @SuppressWarnings("unchecked") + private T getConvertedFullDocument(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER); + } + + private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater updater) { + + Object result = updater.get(this); + + if (result != null) { + return result; + } + + if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) { + + result = converter.read(targetType, fullDocument); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); + } + + if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) { + + result = converter.getConversionService().convert(fullDocument, targetType); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); + } + + throw new IllegalArgumentException( + String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType)); + } + + @Override + public String toString() { + return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamEvent that = (ChangeStreamEvent) o; + + if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.targetType, that.targetType); + } + + @Override + public int hashCode() { + int result = raw != null ? raw.hashCode() : 0; + result = 31 * result + ObjectUtils.nullSafeHashCode(targetType); + return result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java new file mode 100644 index 0000000000..aaee3b76af --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -0,0 +1,444 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.time.Instant; +import java.util.Arrays; +import java.util.Optional; + +import org.bson.BsonDocument; +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * Options applicable to MongoDB Change Streams. Intended + * to be used along with {@link org.springframework.data.mongodb.core.messaging.ChangeStreamRequest} in a sync world as + * well {@link ReactiveMongoOperations} if you prefer it that way. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + * @since 2.1 + */ +public class ChangeStreamOptions { + + private @Nullable Object filter; + private @Nullable BsonValue resumeToken; + private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; + private @Nullable Collation collation; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; + + protected ChangeStreamOptions() {} + + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getFilter() { + return Optional.ofNullable(filter); + } + + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getResumeToken() { + return Optional.ofNullable(resumeToken); + } + + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getFullDocumentLookup() { + return Optional.ofNullable(fullDocumentLookup); + } + + /** + * @return {@link Optional#empty()} if not set. + * @since 4.0 + */ + public Optional getFullDocumentBeforeChangeLookup() { + return Optional.ofNullable(fullDocumentBeforeChangeLookup); + } + + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getResumeTimestamp() { + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class)); + } + + /** + * @return {@link Optional#empty()} if not set. + * @since 2.2 + */ + public Optional getResumeBsonTimestamp() { + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class)); + } + + /** + * @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isStartAfter() { + return Resume.START_AFTER.equals(resume); + } + + /** + * @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isResumeAfter() { + return Resume.RESUME_AFTER.equals(resume); + } + + /** + * @return empty {@link ChangeStreamOptions}. + */ + public static ChangeStreamOptions empty() { + return ChangeStreamOptions.builder().build(); + } + + /** + * Obtain a shiny new {@link ChangeStreamOptionsBuilder} and start defining options in this fancy fluent way. Just + * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when done. + * + * @return new instance of {@link ChangeStreamOptionsBuilder}. + */ + public static ChangeStreamOptionsBuilder builder() { + return new ChangeStreamOptionsBuilder(); + } + + private static T asTimestampOfType(Object timestamp, Class targetType) { + return targetType.cast(doGetTimestamp(timestamp, targetType)); + } + + private static Object doGetTimestamp(Object timestamp, Class targetType) { + + if (ClassUtils.isAssignableValue(targetType, timestamp)) { + return timestamp; + } + + if (timestamp instanceof Instant instant) { + return new BsonTimestamp((int) instant.getEpochSecond(), 0); + } + + if (timestamp instanceof BsonTimestamp bsonTimestamp) { + return Instant.ofEpochSecond(bsonTimestamp.getTime()); + } + + throw new IllegalArgumentException( + "o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was " + + ObjectUtils.nullSafeClassName(timestamp)); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamOptions that = (ChangeStreamOptions) o; + + if (!ObjectUtils.nullSafeEquals(this.filter, that.filter)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeToken, that.resumeToken)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeTimestamp, that.resumeTimestamp)) { + return false; + } + return resume == that.resume; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(filter); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp); + result = 31 * result + ObjectUtils.nullSafeHashCode(resume); + return result; + } + + /** + * @author Christoph Strobl + * @since 2.2 + */ + enum Resume { + + UNDEFINED, + + /** + * @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument) + */ + START_AFTER, + + /** + * @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument) + */ + RESUME_AFTER + } + + /** + * Builder for creating {@link ChangeStreamOptions}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class ChangeStreamOptionsBuilder { + + private @Nullable Object filter; + private @Nullable BsonValue resumeToken; + private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; + private @Nullable Collation collation; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; + + private ChangeStreamOptionsBuilder() {} + + /** + * Set the collation to use. + * + * @param collation must not be {@literal null} nor {@literal empty}. + * @return this. + */ + public ChangeStreamOptionsBuilder collation(Collation collation) { + + Assert.notNull(collation, "Collation must not be null nor empty"); + + this.collation = collation; + return this; + } + + /** + * Set the filter to apply. + *
+ * Fields on aggregation expression root level are prefixed to map to fields contained in + * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, + * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken + * as given, during the mapping procedure. You may want to have a look at the + * structure of Change Events. + *
+ * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are + * mapped to domain type fields. + * + * @param filter the {@link Aggregation Aggregation pipeline} to apply for filtering events. Must not be + * {@literal null}. + * @return this. + */ + public ChangeStreamOptionsBuilder filter(Aggregation filter) { + + Assert.notNull(filter, "Filter must not be null"); + + this.filter = filter; + return this; + } + + /** + * Set the plain filter chain to apply. + * + * @param filter must not be {@literal null} nor contain {@literal null} values. + * @return this. + */ + public ChangeStreamOptionsBuilder filter(Document... filter) { + + Assert.noNullElements(filter, "Filter must not contain null values"); + + this.filter = Arrays.asList(filter); + return this; + } + + /** + * Set the resume token (typically a {@link org.bson.BsonDocument} containing a {@link org.bson.BsonBinary binary + * token}) after which to start with listening. + * + * @param resumeToken must not be {@literal null}. + * @return this. + */ + public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "ResumeToken must not be null"); + + this.resumeToken = resumeToken; + + if (this.resume == Resume.UNDEFINED) { + this.resume = Resume.RESUME_AFTER; + } + + return this; + } + + /** + * Set the {@link FullDocument} lookup to {@link FullDocument#UPDATE_LOOKUP}. + * + * @return this. + * @see #fullDocumentLookup(FullDocument) + */ + public ChangeStreamOptionsBuilder returnFullDocumentOnUpdate() { + return fullDocumentLookup(FullDocument.UPDATE_LOOKUP); + } + + /** + * Set the {@link FullDocument} lookup to use. + * + * @param lookup must not be {@literal null}. + * @return this. + */ + public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) { + + Assert.notNull(lookup, "Lookup must not be null"); + + this.fullDocumentLookup = lookup; + return this; + } + + /** + * Set the {@link FullDocumentBeforeChange} lookup to use. + * + * @param lookup must not be {@literal null}. + * @return this. + * @since 4.0 + */ + public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "Lookup must not be null"); + + this.fullDocumentBeforeChangeLookup = lookup; + return this; + } + + /** + * Return the full document before being changed if it is available. + * + * @return this. + * @since 4.0 + * @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() { + return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE); + } + + /** + * Set the cluster time to resume from. + * + * @param resumeTimestamp must not be {@literal null}. + * @return this. + */ + public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) { + + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); + + this.resumeTimestamp = resumeTimestamp; + return this; + } + + /** + * Set the cluster time to resume from. + * + * @param resumeTimestamp must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) { + + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); + + this.resumeTimestamp = resumeTimestamp; + return this; + } + + /** + * Set the resume token after which to continue emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.RESUME_AFTER; + + return this; + } + + /** + * Set the resume token after which to start emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.START_AFTER; + + return this; + } + + /** + * @return the built {@link ChangeStreamOptions} + */ + public ChangeStreamOptions build() { + + ChangeStreamOptions options = new ChangeStreamOptions(); + + options.filter = this.filter; + options.resumeToken = this.resumeToken; + options.fullDocumentLookup = this.fullDocumentLookup; + options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup; + options.collation = this.collation; + options.resumeTimestamp = this.resumeTimestamp; + options.resume = this.resume; + + return options; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java index 97b32f6119..c142aca173 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java @@ -1,26 +1,46 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import com.mongodb.DBCollection; -import com.mongodb.MongoException; -import org.springframework.dao.DataAccessException; - -public interface CollectionCallback { - - T doInCollection(DBCollection collection) throws MongoException, DataAccessException; - -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.dao.DataAccessException; +import org.springframework.lang.Nullable; + +import com.mongodb.MongoException; +import com.mongodb.client.MongoCollection; + +/** + * Callback interface for executing actions against a {@link MongoCollection}. + * + * @author Mark Pollak + * @author Grame Rocher + * @author Oliver Gierke + * @author John Brisbin + * @author Christoph Strobl + * @since 1.0 + */ +public interface CollectionCallback { + + /** + * @param collection never {@literal null}. + * @return can be {@literal null}. + * @throws MongoException + * @throws DataAccessException + */ + @Nullable + T doInCollection(MongoCollection collection) throws MongoException, DataAccessException; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index 756e2863e4..5df30e0b92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -1,70 +1,1048 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -/** - * Provides a simple wrapper to encapsulate the variety of settings you can use when creating a collection. - * - * @author Thomas Risberg - */ -public class CollectionOptions { - - private Integer maxDocuments; - - private Integer size; - - private Boolean capped; - - /** - * Constructs a new CollectionOptions instance. - * - * @param size the collection size in bytes, this data space is preallocated - * @param maxDocuments the maximum number of documents in the collection. - * @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order), - * false otherwise. - */ - public CollectionOptions(Integer size, Integer maxDocuments, Boolean capped) { - super(); - this.maxDocuments = maxDocuments; - this.size = size; - this.capped = capped; - } - - public Integer getMaxDocuments() { - return maxDocuments; - } - - public void setMaxDocuments(Integer maxDocuments) { - this.maxDocuments = maxDocuments; - } - - public Integer getSize() { - return size; - } - - public void setSize(Integer size) { - this.size = size; - } - - public Boolean getCapped() { - return capped; - } - - public void setCapped(Boolean capped) { - this.capped = capped; - } - -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.StreamSupport; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.util.Optionals; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Contract; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.ValidationAction; +import com.mongodb.client.model.ValidationLevel; + +/** + * Provides a simple wrapper to encapsulate the variety of settings you can use when creating a collection. + * + * @author Thomas Risberg + * @author Christoph Strobl + * @author Mark Paluch + * @author Andreas Zink + * @author Ben Foster + * @author Ross Lawley + */ +public class CollectionOptions { + + private @Nullable Long maxDocuments; + private @Nullable Long size; + private @Nullable Boolean capped; + private @Nullable Collation collation; + private ValidationOptions validationOptions; + private @Nullable TimeSeriesOptions timeSeriesOptions; + private @Nullable CollectionChangeStreamOptions changeStreamOptions; + private @Nullable EncryptedFieldsOptions encryptedFieldsOptions; + + private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, + @Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions, + @Nullable CollectionChangeStreamOptions changeStreamOptions, + @Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + + this.maxDocuments = maxDocuments; + this.size = size; + this.capped = capped; + this.collation = collation; + this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; + this.changeStreamOptions = changeStreamOptions; + this.encryptedFieldsOptions = encryptedFieldsOptions; + } + + /** + * Create new {@link CollectionOptions} by just providing the {@link Collation} to use. + * + * @param collation must not be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public static CollectionOptions just(Collation collation) { + + Assert.notNull(collation, "Collation must not be null"); + + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null, null); + } + + /** + * Create new empty {@link CollectionOptions}. + * + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public static CollectionOptions empty() { + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null, null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(String, Function)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return timeSeries(timeField, it -> it); + } + + /** + * Set up {@link CollectionOptions} for a Time Series collection. + * + * @param timeField the name of the field that contains the date in each time series document. + * @param options a function to apply additional settings to {@link TimeSeriesOptions}. + * @return new instance of {@link CollectionOptions}. + * @since 4.4 + */ + public static CollectionOptions timeSeries(String timeField, Function options) { + return empty().timeSeries(options.apply(TimeSeriesOptions.timeSeries(timeField))); + } + + /** + * Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events. + * + * @return new instance of {@link CollectionOptions}. + * @see #changeStream(CollectionChangeStreamOptions) + * @see CollectionChangeStreamOptions#preAndPostImages(boolean) + * @since 4.0 + */ + public static CollectionOptions emitChangedRevisions() { + return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true)); + } + + /** + * Create new {@link CollectionOptions} with the given {@code encryptedFields}. + * + * @param encryptedFieldsOptions can be null + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(@Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + return new CollectionOptions(null, null, null, null, ValidationOptions.NONE, null, null, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} reading encryption options from the given {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(MongoJsonSchema schema) { + return encryptedCollection(EncryptedFieldsOptions.fromSchema(schema)); + } + + /** + * Create new {@link CollectionOptions} building encryption options in a fluent style. + * + * @param optionsFunction must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection( + Function optionsFunction) { + return encryptedCollection(optionsFunction.apply(new EncryptedFieldsOptions())); + } + + /** + * Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
+ * NOTE: Using capped collections requires defining {@link #size(long)}. + * + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public CollectionOptions capped() { + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code maxDocuments} set to given value. + * + * @param maxDocuments can be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public CollectionOptions maxDocuments(long maxDocuments) { + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code size} set to given value. + * + * @param size can be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public CollectionOptions size(long size) { + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code collation} set to given value. + * + * @param collation can be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.0 + */ + public CollectionOptions collation(@Nullable Collation collation) { + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given + * {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions schema(MongoJsonSchema schema) { + return validator(Validator.schema(schema)); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given + * {@link Validator}. + * + * @param validator can be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions validator(@Nullable Validator validator) { + return validation(validationOptions.validator(validator)); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to + * {@link ValidationLevel#OFF}. + * + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions disableValidation() { + return schemaValidationLevel(ValidationLevel.OFF); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to + * {@link ValidationLevel#STRICT}. + * + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions strictValidation() { + return schemaValidationLevel(ValidationLevel.STRICT); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to + * {@link ValidationLevel#MODERATE}. + * + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions moderateValidation() { + return schemaValidationLevel(ValidationLevel.MODERATE); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationAction} set to + * {@link ValidationAction#WARN}. + * + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions warnOnValidationError() { + return schemaValidationAction(ValidationAction.WARN); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationAction} set to + * {@link ValidationAction#ERROR}. + * + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions failOnValidationError() { + return schemaValidationAction(ValidationAction.ERROR); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set given + * {@link ValidationLevel}. + * + * @param validationLevel must not be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) { + + Assert.notNull(validationLevel, "ValidationLevel must not be null"); + return validation(validationOptions.validationLevel(validationLevel)); + } + + /** + * Create new {@link CollectionOptions} with already given settings and {@code validationAction} set given + * {@link ValidationAction}. + * + * @param validationAction must not be {@literal null}. + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions schemaValidationAction(ValidationAction validationAction) { + + Assert.notNull(validationAction, "ValidationAction must not be null"); + return validation(validationOptions.validationAction(validationAction)); + } + + /** + * Create new {@link CollectionOptions} with the given {@link ValidationOptions}. + * + * @param validationOptions must not be {@literal null}. Use {@link ValidationOptions#none()} to remove validation. + * @return new {@link CollectionOptions}. + * @since 2.1 + */ + public CollectionOptions validation(ValidationOptions validationOptions) { + + Assert.notNull(validationOptions, "ValidationOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param changeStreamOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) { + + Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Set the {@link EncryptedFieldsOptions} for collections using queryable encryption. + * + * @param encryptedFieldsOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + */ + @Contract("_ -> new") + @CheckReturnValue + public CollectionOptions encrypted(EncryptedFieldsOptions encryptedFieldsOptions) { + + Assert.notNull(encryptedFieldsOptions, "EncryptedCollectionOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Get the max number of documents the collection should be limited to. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getMaxDocuments() { + return Optional.ofNullable(maxDocuments); + } + + /** + * Get the {@literal size} in bytes the collection should be limited to. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getSize() { + return Optional.ofNullable(size); + } + + /** + * Get if the collection should be capped. + * + * @return {@link Optional#empty()} if not set. + * @since 2.0 + */ + public Optional getCapped() { + return Optional.ofNullable(capped); + } + + /** + * Get the {@link Collation} settings. + * + * @return {@link Optional#empty()} if not set. + * @since 2.0 + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * Get the {@link MongoJsonSchema} for the collection. + * + * @return {@link Optional#empty()} if not set. + * @since 2.1 + */ + public Optional getValidationOptions() { + return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); + } + + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + + /** + * Get the {@link CollectionChangeStreamOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.0 + */ + public Optional getChangeStreamOptions() { + return Optional.ofNullable(changeStreamOptions); + } + + /** + * Get the {@code encryptedFields} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.5 + */ + public Optional getEncryptedFieldsOptions() { + return Optional.ofNullable(encryptedFieldsOptions); + } + + @Override + public String toString() { + return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped + + ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions=" + + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", encryptedCollectionOptions=" + + encryptedFieldsOptions + ", disableValidation=" + disableValidation() + ", strictValidation=" + + strictValidation() + ", moderateValidation=" + moderateValidation() + ", warnOnValidationError=" + + warnOnValidationError() + ", failOnValidationError=" + failOnValidationError() + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionOptions that = (CollectionOptions) o; + + if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(size, that.size)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(capped, that.capped)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions)) { + return false; + } + return ObjectUtils.nullSafeEquals(encryptedFieldsOptions, that.encryptedFieldsOptions); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(maxDocuments); + result = 31 * result + ObjectUtils.nullSafeHashCode(size); + result = 31 * result + ObjectUtils.nullSafeHashCode(capped); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(encryptedFieldsOptions); + return result; + } + + /** + * Encapsulation of ValidationOptions options. + * + * @author Christoph Strobl + * @author Andreas Zink + * @since 2.1 + */ + public static class ValidationOptions { + + private static final ValidationOptions NONE = new ValidationOptions(null, null, null); + + private final @Nullable Validator validator; + private final @Nullable ValidationLevel validationLevel; + private final @Nullable ValidationAction validationAction; + + public ValidationOptions(@Nullable Validator validator, @Nullable ValidationLevel validationLevel, + @Nullable ValidationAction validationAction) { + + this.validator = validator; + this.validationLevel = validationLevel; + this.validationAction = validationAction; + } + + /** + * Create an empty {@link ValidationOptions}. + * + * @return never {@literal null}. + */ + public static ValidationOptions none() { + return NONE; + } + + /** + * Define the {@link Validator} to be used for document validation. + * + * @param validator can be {@literal null}. + * @return new instance of {@link ValidationOptions}. + */ + public ValidationOptions validator(@Nullable Validator validator) { + return new ValidationOptions(validator, validationLevel, validationAction); + } + + /** + * Define the validation level to apply. + * + * @param validationLevel can be {@literal null}. + * @return new instance of {@link ValidationOptions}. + */ + public ValidationOptions validationLevel(ValidationLevel validationLevel) { + return new ValidationOptions(validator, validationLevel, validationAction); + } + + /** + * Define the validation action to take. + * + * @param validationAction can be {@literal null}. + * @return new instance of {@link ValidationOptions}. + */ + public ValidationOptions validationAction(ValidationAction validationAction) { + return new ValidationOptions(validator, validationLevel, validationAction); + } + + /** + * Get the {@link Validator} to use. + * + * @return never {@literal null}. + */ + public Optional getValidator() { + return Optional.ofNullable(validator); + } + + /** + * Get the {@code validationLevel} to apply. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getValidationLevel() { + return Optional.ofNullable(validationLevel); + } + + /** + * Get the {@code validationAction} to perform. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getValidationAction() { + return Optional.ofNullable(validationAction); + } + + /** + * @return {@literal true} if no arguments set. + */ + boolean isEmpty() { + return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); + } + + @Override + public String toString() { + + return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel + + ", validationAction=" + validationAction + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ValidationOptions that = (ValidationOptions) o; + + if (!ObjectUtils.nullSafeEquals(validator, that.validator)) { + return false; + } + if (validationLevel != that.validationLevel) + return false; + return validationAction == that.validationAction; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(validator); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction); + return result; + } + } + + /** + * Encapsulation of Encryption options for collections. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class EncryptedFieldsOptions { + + private static final EncryptedFieldsOptions NONE = new EncryptedFieldsOptions(); + + private final @Nullable MongoJsonSchema schema; + private final List queryableProperties; + + EncryptedFieldsOptions() { + this(null, List.of()); + } + + private EncryptedFieldsOptions(@Nullable MongoJsonSchema schema, + List queryableProperties) { + + this.schema = schema; + this.queryableProperties = queryableProperties; + } + + /** + * @return {@link EncryptedFieldsOptions#NONE} + */ + public static EncryptedFieldsOptions none() { + return NONE; + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromSchema(MongoJsonSchema schema) { + return new EncryptedFieldsOptions(schema, List.of()); + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromProperties(List properties) { + return new EncryptedFieldsOptions(null, List.copyOf(properties)); + } + + /** + * Add a new {@link QueryableJsonSchemaProperty queryable property} for the given source property. + *

+ * Please note that, a given {@link JsonSchemaProperty} may override options from a given {@link MongoJsonSchema} if + * set. + * + * @param property the queryable source - typically + * {@link org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty + * encrypted}. + * @param characteristics the query options to set. + * @return new instance of {@link EncryptedFieldsOptions}. + */ + @Contract("_, _ -> new") + @CheckReturnValue + public EncryptedFieldsOptions queryable(JsonSchemaProperty property, QueryCharacteristic... characteristics) { + + List targetPropertyList = new ArrayList<>(queryableProperties.size() + 1); + targetPropertyList.addAll(queryableProperties); + targetPropertyList.add(JsonSchemaProperty.queryable(property, List.of(characteristics))); + + return new EncryptedFieldsOptions(schema, targetPropertyList); + } + + public Document toDocument() { + return new Document("fields", selectPaths()); + } + + private List selectPaths() { + + Map fields = new LinkedHashMap<>(); + for (Document field : fromSchema()) { + fields.put(field.get("path", String.class), field); + } + for (Document field : fromProperties()) { + fields.put(field.get("path", String.class), field); + } + return List.copyOf(fields.values()); + } + + private List fromProperties() { + + if (queryableProperties.isEmpty()) { + return List.of(); + } + + List converted = new ArrayList<>(queryableProperties.size()); + for (QueryableJsonSchemaProperty property : queryableProperties) { + + Document field = new Document("path", property.getIdentifier()); + + if (!property.getTypes().isEmpty()) { + field.append("bsonType", property.getTypes().iterator().next().toBsonType().value()); + } + + if (property + .getTargetProperty() instanceof IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty encrypted) { + if (encrypted.getKeyId() != null) { + if (encrypted.getKeyId() instanceof String stringKey) { + field.append("keyId", + new BsonBinary(BsonBinarySubType.UUID_STANDARD, stringKey.getBytes(StandardCharsets.UTF_8))); + } else { + field.append("keyId", encrypted.getKeyId()); + } + } + } + + field.append("queries", StreamSupport.stream(property.getCharacteristics().spliterator(), false) + .map(QueryCharacteristic::toDocument).toList()); + + if (!field.containsKey("keyId")) { + field.append("keyId", BsonNull.VALUE); + } + + converted.add(field); + } + return converted; + } + + private List fromSchema() { + + if (schema == null) { + return List.of(); + } + + Document root = schema.schemaDocument(); + Map paths = new LinkedHashMap<>(); + collectPaths(root, null, paths); + + List fields = new ArrayList<>(); + if (!paths.isEmpty()) { + + for (Entry entry : paths.entrySet()) { + Document field = new Document("path", entry.getKey()); + field.append("keyId", entry.getValue().getOrDefault("keyId", BsonNull.VALUE)); + if (entry.getValue().containsKey("bsonType")) { + field.append("bsonType", entry.getValue().get("bsonType")); + } + field.put("queries", entry.getValue().get("queries")); + fields.add(field); + } + } + + return fields; + } + } + + private static void collectPaths(Document document, @Nullable String currentPath, Map paths) { + + if (document.containsKey("type") && document.get("type").equals("object")) { + Object o = document.get("properties"); + if (o == null) { + return; + } + + if (o instanceof Document properties) { + for (Entry entry : properties.entrySet()) { + if (entry.getValue() instanceof Document nested) { + + String path = currentPath == null ? entry.getKey() : (currentPath + "." + entry.getKey()); + if (nested.containsKey("encrypt")) { + Document target = new Document(nested.get("encrypt", Document.class)); + if (nested.containsKey("queries")) { + List queries = nested.get("queries", List.class); + if (!queries.isEmpty() && queries.iterator().next() instanceof Document qd) { + target.putAll(qd); + } + } + paths.put(path, target); + } else { + collectPaths(nested, path, paths); + } + } + } + } + } + } + + /** + * Encapsulation of options applied to define collections change stream behaviour. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class CollectionChangeStreamOptions { + + private final boolean preAndPostImages; + + private CollectionChangeStreamOptions(boolean emitChangedRevisions) { + this.preAndPostImages = emitChangedRevisions; + } + + /** + * Output the version of a document before and after changes (the document pre- and post-images). + * + * @return new instance of {@link CollectionChangeStreamOptions}. + */ + public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) { + return new CollectionChangeStreamOptions(true); + } + + public boolean getPreAndPostImages() { + return preAndPostImages; + } + + @Override + public String toString() { + return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o; + + return preAndPostImages == that.preAndPostImages; + } + + @Override + public int hashCode() { + return (preAndPostImages ? 1 : 0); + } + } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/core/timeseries-collections + * @since 3.3 + */ + public static class TimeSeriesOptions { + + private final String timeField; + + private @Nullable final String metaField; + + private final GranularityDefinition granularity; + + private final Duration expireAfter; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity, + Duration expireAfter) { + Assert.hasText(timeField, "Time field must not be empty or null"); + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + this.expireAfter = expireAfter; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT, Duration.ofSeconds(-1)); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity + */ + public TimeSeriesOptions granularity(GranularityDefinition granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Set the {@link Duration} for automatic removal of documents older than a specified value. + * + * @param ttl must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + * @see com.mongodb.client.model.CreateCollectionOptions#expireAfter(long, java.util.concurrent.TimeUnit) + * @since 4.4 + */ + public TimeSeriesOptions expireAfter(Duration ttl) { + return new TimeSeriesOptions(timeField, metaField, granularity, ttl); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public GranularityDefinition getGranularity() { + return granularity; + } + + /** + * Get the {@link Duration} for automatic removal of documents. + * + * @return a {@link Duration#isNegative() negative} value if not specified. + * @since 4.4 + */ + public Duration getExpireAfter() { + return expireAfter; + } + + @Override + public String toString() { + + return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\'' + + ", granularity=" + granularity + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TimeSeriesOptions that = (TimeSeriesOptions) o; + + if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) { + return false; + } + return ObjectUtils.nullSafeEquals(granularity, that.granularity); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(timeField); + result = 31 * result + ObjectUtils.nullSafeHashCode(metaField); + result = 31 * result + ObjectUtils.nullSafeHashCode(granularity); + return result; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java new file mode 100644 index 0000000000..f3769355c7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.util.Assert; + +import com.mongodb.client.MongoCollection; + +/** + * Interface for functional preparation of a {@link MongoCollection}. + * + * @author Mark Paluch + * @since 4.1 + */ +public interface CollectionPreparer { + + /** + * Returns a preparer that always returns its input collection. + * + * @return a preparer that always returns its input collection. + */ + static CollectionPreparer identity() { + return it -> it; + } + + /** + * Prepare the {@code collection}. + * + * @param collection the collection to prepare. + * @return the prepared collection. + */ + T prepare(T collection); + + /** + * Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies + * the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to + * the caller of the composed function. + * + * @param after the collection preparer to apply after this function is applied. + * @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after} + * preparer. + */ + default CollectionPreparer andThen(CollectionPreparer after) { + Assert.notNull(after, "After CollectionPreparer must not be null"); + return c -> after.prepare(prepare(c)); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java new file mode 100644 index 0000000000..644a3a54d1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java @@ -0,0 +1,182 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; + +import org.bson.Document; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.client.MongoCollection; + +/** + * Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon + * {@link CollectionPreparer preparing a collection}. + * + * @author Mark Paluch + * @since 4.1 + */ +class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware { + + private final List sources; + + private CollectionPreparerSupport(List sources) { + this.sources = sources; + } + + T doPrepare(T collection, Function concernAccessor, BiFunction concernFunction, + Function preferenceAccessor, BiFunction preferenceFunction) { + + T collectionToUse = collection; + + for (Object source : sources) { + if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) { + + ReadConcern concern = rca.getReadConcern(); + if (concernAccessor.apply(collectionToUse) != concern) { + collectionToUse = concernFunction.apply(collectionToUse, concern); + } + break; + } + } + + for (Object source : sources) { + if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + + ReadPreference preference = rpa.getReadPreference(); + if (preferenceAccessor.apply(collectionToUse) != preference) { + collectionToUse = preferenceFunction.apply(collectionToUse, preference); + } + break; + } + } + + return collectionToUse; + } + + @Override + public boolean hasReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return true; + } + } + + return false; + } + + @Override + public ReadConcern getReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return rca.getReadConcern(); + } + } + + return null; + } + + @Override + public boolean hasReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return true; + } + } + + return false; + } + + @Override + public ReadPreference getReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return rpa.getReadPreference(); + } + } + + return null; + } + + static class CollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private CollectionPreparerDelegate(List sources) { + super(sources); + } + + public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static CollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (CollectionPreparerDelegate) mixedAwares[0]; + } + + return new CollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public MongoCollection prepare(MongoCollection collection) { + return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern, + MongoCollection::getReadPreference, MongoCollection::withReadPreference); + } + + } + + static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private ReactiveCollectionPreparerDelegate(List sources) { + super(sources); + } + + public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (ReactiveCollectionPreparerDelegate) mixedAwares[0]; + } + + return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public com.mongodb.reactivestreams.client.MongoCollection prepare( + com.mongodb.reactivestreams.client.MongoCollection collection) { + return doPrepare(collection, // + com.mongodb.reactivestreams.client.MongoCollection::getReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::withReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::getReadPreference, + com.mongodb.reactivestreams.client.MongoCollection::withReadPreference); + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java new file mode 100644 index 0000000000..4fa6b3e97d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java @@ -0,0 +1,260 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.MetricConversion; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite + * to {@code $geoWithin}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +class CountQuery { + + private final Document source; + + private CountQuery(Document source) { + this.source = source; + } + + public static CountQuery of(Document source) { + return new CountQuery(source); + } + + /** + * Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query + * to be usable with {@code countDocuments()}. + * + * @return the query {@link Document} that can be used with {@code countDocuments()}. + */ + public Document toQueryDocument() { + + if (!requiresRewrite(source)) { + return source; + } + + Document target = new Document(); + + for (Map.Entry entry : source.entrySet()) { + + if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) { + + target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and"))); + continue; + } + + if (entry.getValue() instanceof Collection collection && requiresRewrite(entry.getValue())) { + + target.put(entry.getKey(), rewriteCollection(collection)); + continue; + } + + if ("$and".equals(entry.getKey()) && target.containsKey("$and")) { + // Expect $and to be processed with Document and createGeoWithin. + continue; + } + + target.put(entry.getKey(), entry.getValue()); + } + + return target; + } + + /** + * @param valueToInspect + * @return {@code true} if the enclosing element needs to be rewritten. + */ + private boolean requiresRewrite(Object valueToInspect) { + + if (valueToInspect instanceof Document document) { + return requiresRewrite(document); + } + + if (valueToInspect instanceof Collection collection) { + return requiresRewrite(collection); + } + + return false; + } + + private boolean requiresRewrite(Collection collection) { + + for (Object o : collection) { + if (o instanceof Document document && requiresRewrite(document)) { + return true; + } + } + + return false; + } + + private boolean requiresRewrite(Document document) { + + if (containsNear(document)) { + return true; + } + + for (Object entry : document.values()) { + + if (requiresRewrite(entry)) { + return true; + } + } + + return false; + } + + private Collection rewriteCollection(Collection source) { + + Collection rewrittenCollection = new ArrayList<>(source.size()); + + for (Object item : source) { + if (item instanceof Document document && requiresRewrite(item)) { + rewrittenCollection.add(CountQuery.of(document).toQueryDocument()); + } else { + rewrittenCollection.add(item); + } + } + + return rewrittenCollection; + } + + /** + * Rewrite the near query for field {@code key} to {@code $geoWithin}. + * + * @param key the queried field. + * @param source source {@link Document}. + * @param $and potentially existing {@code $and} condition. + * @return the rewritten query {@link Document}. + */ + @SuppressWarnings("unchecked") + private static Document createGeoWithin(String key, Document source, @Nullable Object $and) { + + boolean spheric = source.containsKey("$nearSphere"); + Object $near = spheric ? source.get("$nearSphere") : source.get("$near"); + + Number maxDistance = getMaxDistance(source, $near, spheric); + + List $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance); + Document $geoWithinMax = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMax)); + + if (!containsNearWithMinDistance(source)) { + return new Document(key, $geoWithinMax); + } + + Number minDistance = (Number) source.get("$minDistance"); + List $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance); + Document $geoWithinMin = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMin)); + + List criteria; + + if ($and != null) { + if ($and instanceof Collection) { + Collection andElements = (Collection) $and; + criteria = new ArrayList<>(andElements.size() + 2); + criteria.addAll(andElements); + } else { + throw new IllegalArgumentException( + "Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: " + + $and); + } + } else { + criteria = new ArrayList<>(2); + } + + criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin)))); + criteria.add(new Document(key, $geoWithinMax)); + + return new Document("$and", criteria); + } + + private static Number getMaxDistance(Document source, Object $near, boolean spheric) { + + Number maxDistance = Double.MAX_VALUE; + + if (source.containsKey("$maxDistance")) { // legacy coordinate pair + return (Number) source.get("$maxDistance"); + } + + if ($near instanceof Document nearDoc) { + + if (nearDoc.containsKey("$maxDistance")) { + + maxDistance = (Number) nearDoc.get("$maxDistance"); + // geojson is in Meters but we need radians x/(6378.1*1000) + if (spheric && nearDoc.containsKey("$geometry")) { + maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue()); + } + } + } + + return maxDistance; + } + + private static boolean containsNear(Document source) { + return source.containsKey("$near") || source.containsKey("$nearSphere"); + } + + private static boolean containsNearWithMinDistance(Document source) { + + if (!containsNear(source)) { + return false; + } + + return source.containsKey("$minDistance"); + } + + private static Object toCenterCoordinates(Object value) { + + if (ObjectUtils.isArray(value)) { + return value; + } + + if (value instanceof Point point) { + return Arrays.asList(point.getX(), point.getY()); + } + + if (value instanceof Document document) { + + if (document.containsKey("x")) { + return Arrays.asList(document.get("x"), document.get("y")); + } + + if (document.containsKey("$geometry")) { + Document geoJsonPoint = document.get("$geometry", Document.class); + return geoJsonPoint.get("coordinates"); + } + } + + return value; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java index 2be9f33766..9b7408b0cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2010 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,69 @@ */ package org.springframework.data.mongodb.core; -import com.mongodb.DBCursor; +import java.util.function.Function; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadPreference; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; /** - * Simple callback interface to allow customization of a {@link DBCursor}. - * + * Simple callback interface to allow customization of a {@link FindIterable}. + * * @author Oliver Gierke + * @author Christoph Strobl */ -interface CursorPreparer { +public interface CursorPreparer extends ReadPreferenceAware { + + /** + * Default {@link CursorPreparer} just passing on the given {@link FindIterable}. + * + * @since 2.2 + */ + CursorPreparer NO_OP_PREPARER = (iterable -> iterable); /** * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. - * - * @param cursor + * + * @param iterable must not be {@literal null}. + * @return never {@literal null}. + */ + FindIterable prepare(FindIterable iterable); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindIterable} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindIterable initiateFind(MongoCollection collection, + Function, FindIterable> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 */ - DBCursor prepare(DBCursor cursor); + @Override + @Nullable + default ReadPreference getReadPreference() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java index 4b72c6271e..9d588ad16d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java @@ -1,25 +1,44 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import com.mongodb.DB; -import com.mongodb.MongoException; -import org.springframework.dao.DataAccessException; - -public interface DbCallback { - - T doInDB(DB db) throws MongoException, DataAccessException; -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.dao.DataAccessException; +import org.springframework.lang.Nullable; + +import com.mongodb.MongoException; +import com.mongodb.client.MongoDatabase; + +/** + * Callback interface for executing actions against a {@link MongoDatabase}. + * + * @author Mark Pollak + * @author Graeme Rocher + * @author Thomas Risberg + * @author Oliver Gierke + * @author John Brisbin + * @author Christoph Strobl + */ +public interface DbCallback { + + /** + * @param db must not be {@literal null}. + * @return can be {@literal null}. + * @throws MongoException + * @throws DataAccessException + */ + @Nullable + T doInDB(MongoDatabase db) throws MongoException, DataAccessException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbHolder.java deleted file mode 100644 index 35dcb37234..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbHolder.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.springframework.data.mongodb.core; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import com.mongodb.DB; -import org.springframework.transaction.support.ResourceHolderSupport; -import org.springframework.util.Assert; - -class DbHolder extends ResourceHolderSupport { - private static final Object DEFAULT_KEY = new Object(); - - private final Map dbMap = new ConcurrentHashMap(); - - public DbHolder(DB db) { - addDB(db); - } - - public DbHolder(Object key, DB db) { - addDB(key, db); - } - - public DB getDB() { - return getDB(DEFAULT_KEY); - } - - public DB getDB(Object key) { - return this.dbMap.get(key); - } - - public DB getAnyDB() { - if (!this.dbMap.isEmpty()) { - return this.dbMap.values().iterator().next(); - } - return null; - } - - public void addDB(DB session) { - addDB(DEFAULT_KEY, session); - } - - public void addDB(Object key, DB session) { - Assert.notNull(key, "Key must not be null"); - Assert.notNull(session, "DB must not be null"); - this.dbMap.put(key, session); - } - - public DB removeDB(Object key) { - return this.dbMap.remove(key); - } - - public boolean containsDB(DB session) { - return this.dbMap.containsValue(session); - } - - public boolean isEmpty() { - return this.dbMap.isEmpty(); - } - - public boolean doesNotHoldNonDefaultDB() { - synchronized (this.dbMap) { - return this.dbMap.isEmpty() || (this.dbMap.size() == 1 && this.dbMap.containsKey(DEFAULT_KEY)); - } - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java index 40f3bf77c9..52343522a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,201 +15,176 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; +import java.util.ArrayList; import java.util.List; - -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mongodb.BulkOperationException; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BulkWriteException; -import com.mongodb.BulkWriteOperation; -import com.mongodb.BulkWriteRequestBuilder; -import com.mongodb.BulkWriteResult; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; +import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; /** * Default implementation for {@link BulkOperations}. - * + * * @author Tobias Trelle * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + * @author Minsu Kim + * @author Jens Schauder + * @author Michail Nikolaev + * @author Roman Puchkovskiy + * @author Jacob Botuck * @since 1.9 */ -class DefaultBulkOperations implements BulkOperations { +class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations { private final MongoOperations mongoOperations; - private final BulkMode bulkMode; private final String collectionName; - private final Class entityType; + private final BulkOperationContext bulkOperationContext; + private final List models = new ArrayList<>(); - private PersistenceExceptionTranslator exceptionTranslator; - private WriteConcernResolver writeConcernResolver; - private WriteConcern defaultWriteConcern; - - private BulkWriteOperation bulk; + private @Nullable WriteConcern defaultWriteConcern; + private BulkWriteOptions bulkOptions; /** - * Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, {@link BulkMode}, collection - * name and {@link WriteConcern}. - * - * @param mongoOperations The underlying {@link MongoOperations}, must not be {@literal null}. - * @param bulkMode must not be {@literal null}. - * @param collectionName Name of the collection to work on, must not be {@literal null} or empty. - * @param entityType the entity type, can be {@literal null}. + * Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, collection name and + * {@link BulkOperationContext}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param bulkOperationContext must not be {@literal null}. + * @since 2.0 */ - DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName, - Class entityType) { + DefaultBulkOperations(MongoOperations mongoOperations, String collectionName, + BulkOperationContext bulkOperationContext) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.notNull(bulkMode, "BulkMode must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + super(collectionName); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); this.mongoOperations = mongoOperations; - this.bulkMode = bulkMode; this.collectionName = collectionName; - this.entityType = entityType; - - this.exceptionTranslator = new MongoExceptionTranslator(); - this.writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; - - this.bulk = initBulkOperation(); - } - - /** - * Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}. - * - * @param exceptionTranslator can be {@literal null}. - */ - public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator; - } - - /** - * Configures the {@link WriteConcernResolver} to be used. Defaults to {@link DefaultWriteConcernResolver}. - * - * @param writeConcernResolver can be {@literal null}. - */ - public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) { - this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE - : writeConcernResolver; + this.bulkOperationContext = bulkOperationContext; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } /** * Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}. - * + * * @param defaultWriteConcern can be {@literal null}. */ - public void setDefaultWriteConcern(WriteConcern defaultWriteConcern) { + void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { this.defaultWriteConcern = defaultWriteConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object) - */ @Override public BulkOperations insert(Object document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); + + maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(document); + addModel(source, new InsertOneModel<>(getMappedObject(source))); - bulk.insert((DBObject) mongoOperations.getConverter().convertToMongoType(document)); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List) - */ @Override public BulkOperations insert(List documents) { - Assert.notNull(documents, "Documents must not be null!"); + Assert.notNull(documents, "Documents must not be null"); - for (Object document : documents) { - insert(document); - } + documents.forEach(this::insert); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateOne(Query query, Update update) { + public BulkOperations updateOne(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateOne(Arrays.asList(Pair.of(query, update))); + return update(query, update, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List) - */ @Override - public BulkOperations updateOne(List> updates) { + public BulkOperations updateOne(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, false); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateMulti(Query query, Update update) { + public BulkOperations updateMulti(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + update(query, update, false, true); - return updateMulti(Arrays.asList(Pair.of(query, update))); + return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List) - */ @Override - public BulkOperations updateMulti(List> updates) { + public BulkOperations updateMulti(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, true); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - public BulkOperations upsert(Query query, Update update) { + public BulkOperations upsert(Query query, UpdateDefinition update) { return update(query, update, true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List) - */ @Override public BulkOperations upsert(List> updates) { @@ -220,28 +195,23 @@ public BulkOperations upsert(List> updates) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query) - */ @Override public BulkOperations remove(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); + + DeleteOptions deleteOptions = new DeleteOptions(); + query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); - bulk.find(query.getQueryObject()).remove(); + addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions)); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List) - */ @Override public BulkOperations remove(List removes) { - Assert.notNull(removes, "Removals must not be null!"); + Assert.notNull(removes, "Removals must not be null"); for (Query query : removes) { remove(query); @@ -250,78 +220,227 @@ public BulkOperations remove(List removes) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#executeBulk() - */ @Override - public BulkWriteResult execute() { + public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(replacement); + addModel(source, new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(source), replaceOptions)); + + return this; + } - MongoAction action = new MongoAction(defaultWriteConcern, MongoActionOperation.BULK, collectionName, entityType, - null, null); - WriteConcern writeConcern = writeConcernResolver.resolve(action); + @Override + public com.mongodb.bulk.BulkWriteResult execute() { try { - return writeConcern == null ? bulk.execute() : bulk.execute(writeConcern); + com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo); - } catch (BulkWriteException o_O) { + Assert.state(result != null, "Result must not be null"); - DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O); - throw toThrow == null ? o_O : toThrow; + models.forEach(this::maybeEmitAfterSaveEvent); + models.forEach(this::maybeInvokeAfterSaveCallback); + return result; } finally { - this.bulk = initBulkOperation(); + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private BulkWriteResult bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + try { + + return collection.bulkWrite( // + models.stream() // + .map(this::extractAndMapWriteModel) // + .collect(Collectors.toList()), // + bulkOptions); + } catch (RuntimeException ex) { + + if (ex instanceof MongoBulkWriteException mongoBulkWriteException) { + + if (mongoBulkWriteException.getWriteConcernError() != null) { + throw new DataIntegrityViolationException(ex.getMessage(), ex); + } + throw new BulkOperationException(ex.getMessage(), mongoBulkWriteException); + } + + throw ex; + } + } + + private WriteModel extractAndMapWriteModel(SourceAwareWriteModelHolder it) { + + maybeEmitBeforeSaveEvent(it); + + if (it.model() instanceof InsertOneModel model) { + + Document target = model.getDocument(); + maybeInvokeBeforeSaveCallback(it.source(), target); + } else if (it.model() instanceof ReplaceOneModel model) { + + Document target = model.getReplacement(); + maybeInvokeBeforeSaveCallback(it.source(), target); } + + return mapWriteModel(it.source(), it.model()); } /** * Performs update and upsert bulk operations. - * + * * @param query the {@link Query} to determine documents to update. * @param update the {@link Update} to perform, must not be {@literal null}. * @param upsert whether to upsert. * @param multi whether to issue a multi-update. * @return the {@link BulkOperations} with the update registered. */ - private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) { + private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - BulkWriteRequestBuilder builder = bulk.find(query.getQueryObject()); + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); - if (upsert) { + if (multi) { + addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + } else { + addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + } - if (multi) { - builder.upsert().update(update.getUpdateObject()); - } else { - builder.upsert().updateOne(update.getUpdateObject()); - } + return this; + } - } else { + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } - if (multi) { - builder.update(update.getUpdateObject()); - } else { - builder.updateOne(update.getUpdateObject()); - } + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } + + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } + + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } + + private Document getMappedObject(Object source) { + + if (source instanceof Document document) { + return document; } - return this; + Document sink = new Document(); + + mongoOperations.getConverter().write(source, sink); + return sink; + } + + private void addModel(Object source, WriteModel model) { + models.add(new SourceAwareWriteModelHolder(source, model)); } - private final BulkWriteOperation initBulkOperation() { + private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel model) { - DBCollection collection = mongoOperations.getCollection(collectionName); + Document target = model.getDocument(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel model) { - switch (bulkMode) { - case ORDERED: - return collection.initializeOrderedBulkOperation(); - case UNORDERED: - return collection.initializeUnorderedBulkOperation(); + Document target = model.getReplacement(); + maybeInvokeAfterSaveCallback(holder.source(), target); } + } + + private void publishEvent(MongoMappingEvent event) { + bulkOperationContext.publishEvent(event); + } - throw new IllegalStateException("BulkMode was null!"); + private Object maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName); } + + private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName); + } + + private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName); + } + + /** + * {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to + * {@link QueryMapper} and {@link UpdateMapper}. + * + * @author Christoph Strobl + * @since 2.0 + */ + record BulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable EntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index a1f2c96725..2057e2f046 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,169 +15,226 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.domain.Sort.Direction.*; - import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.List; +import org.bson.Document; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; -import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.IndexOptions; /** * Default implementation of {@link IndexOperations}. - * + * * @author Mark Pollack * @author Oliver Gierke * @author Komi Innocent * @author Christoph Strobl + * @author Mark Paluch */ public class DefaultIndexOperations implements IndexOperations { - private static final Double ONE = Double.valueOf(1); - private static final Double MINUS_ONE = Double.valueOf(-1); - private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); + private static final String PARTIAL_FILTER_EXPRESSION_KEY = "partialFilterExpression"; - private final MongoOperations mongoOperations; private final String collectionName; + private final QueryMapper mapper; + private final @Nullable Class type; + + private final MongoOperations mongoOperations; /** * Creates a new {@link DefaultIndexOperations}. - * - * @param mongoOperations must not be {@literal null}. + * + * @param mongoDbFactory must not be {@literal null}. * @param collectionName must not be {@literal null}. + * @param queryMapper must not be {@literal null}. + * @deprecated since 2.1. Please use + * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. */ - public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName) { + @Deprecated + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) { + this(mongoDbFactory, collectionName, queryMapper, null); + } - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.notNull(collectionName, "Collection name can not be null!"); + /** + * Creates a new {@link DefaultIndexOperations}. + * + * @param mongoDbFactory must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param queryMapper must not be {@literal null}. + * @param type Type used for mapping potential partial index filter expression. Can be {@literal null}. + * @since 1.10 + * @deprecated since 2.1. Please use + * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. + */ + @Deprecated + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, + @Nullable Class type) { + + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); + Assert.notNull(collectionName, "Collection name can not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); - this.mongoOperations = mongoOperations; this.collectionName = collectionName; + this.mapper = queryMapper; + this.type = type; + this.mongoOperations = new MongoTemplate(mongoDbFactory); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) + /** + * Creates a new {@link DefaultIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @param type can be {@literal null}. + * @since 2.1 */ - public void ensureIndex(final IndexDefinition indexDefinition) { - mongoOperations.execute(collectionName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - DBObject indexOptions = indexDefinition.getIndexOptions(); - if (indexOptions != null) { - collection.createIndex(indexDefinition.getIndexKeys(), indexOptions); - } else { - collection.createIndex(indexDefinition.getIndexKeys()); - } - return null; - } - }); - } + public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.IndexOperations#dropIndex(java.lang.String) - */ - public void dropIndex(final String name) { - mongoOperations.execute(collectionName, new CollectionCallback() { - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { - collection.dropIndex(name); - return null; - } - }); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + this.mongoOperations = mongoOperations; + this.mapper = new QueryMapper(mongoOperations.getConverter()); + this.collectionName = collectionName; + this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.IndexOperations#dropAllIndexes() - */ - public void dropAllIndexes() { - dropIndex("*"); - } + @Override + public String ensureIndex(IndexDefinition indexDefinition) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.IndexOperations#resetIndexCache() - */ - @Deprecated - public void resetIndexCache() { - mongoOperations.execute(collectionName, new CollectionCallback() { - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { + return execute(collection -> { - ReflectiveDBCollectionInvoker.resetIndexCache(collection); - return null; - } + MongoPersistentEntity entity = lookupPersistentEntity(type, collectionName); + + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); + + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); + + Document mappedKeys = mapper.getMappedSort(indexDefinition.getIndexKeys(), entity); + return collection.createIndex(mappedKeys, indexOptions); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.IndexOperations#getIndexInfo() - */ - public List getIndexInfo() { + @Nullable + private MongoPersistentEntity lookupPersistentEntity(@Nullable Class entityType, String collection) { + + if (entityType != null) { + return mapper.getMappingContext().getRequiredPersistentEntity(entityType); + } + + Collection> entities = mapper.getMappingContext().getPersistentEntities(); - return mongoOperations.execute(collectionName, new CollectionCallback>() { - public List doInCollection(DBCollection collection) throws MongoException, DataAccessException { - List dbObjectList = collection.getIndexInfo(); - return getIndexData(dbObjectList); + for (MongoPersistentEntity entity : entities) { + if (entity.getCollection().equals(collection)) { + return entity; } + } - private List getIndexData(List dbObjectList) { + return null; + } + + @Override + public void dropIndex(String name) { + + execute(collection -> { + collection.dropIndex(name); + return null; + }); - List indexInfoList = new ArrayList(); + } + + @Override + public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { - for (DBObject ix : dbObjectList) { + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); - DBObject keyDbObject = (DBObject) ix.get("key"); - int numberOfElements = keyDbObject.keySet().size(); + Document result = mongoOperations + .execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))); - List indexFields = new ArrayList(numberOfElements); + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + } - for (String key : keyDbObject.keySet()) { + @Override + public void dropAllIndexes() { + dropIndex("*"); + } - Object value = keyDbObject.get(key); + @Override + public List getIndexInfo() { - if (TWO_D_IDENTIFIERS.contains(value)) { - indexFields.add(IndexField.geo(key)); - } else if ("text".equals(value)) { + return execute(new CollectionCallback>() { - DBObject weights = (DBObject) ix.get("weights"); - for (String fieldName : weights.keySet()) { - indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); - } + public List doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { - } else { + MongoCursor cursor = collection.listIndexes(Document.class).iterator(); + return getIndexData(cursor); + } - Double keyValue = new Double(value.toString()); + private List getIndexData(MongoCursor cursor) { - if (ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, ASC)); - } else if (MINUS_ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, DESC)); - } - } - } + int available = cursor.available(); + List indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); - String name = ix.get("name").toString(); + while (cursor.hasNext()) { - boolean unique = ix.containsField("unique") ? (Boolean) ix.get("unique") : false; - boolean dropDuplicates = ix.containsField("dropDups") ? (Boolean) ix.get("dropDups") : false; - boolean sparse = ix.containsField("sparse") ? (Boolean) ix.get("sparse") : false; - String language = ix.containsField("default_language") ? (String) ix.get("default_language") : ""; - indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language)); + Document ix = cursor.next(); + IndexInfo indexInfo = IndexConverters.documentToIndexInfoConverter().convert(ix); + indexInfoList.add(indexInfo); } return indexInfoList; } }); } + + @Nullable + public T execute(CollectionCallback callback) { + + Assert.notNull(callback, "CollectionCallback must not be null"); + + return mongoOperations.execute(collectionName, callback); + } + + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { + + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; + } + + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java new file mode 100644 index 0000000000..e2471dbb14 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -0,0 +1,49 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.index.IndexOperationsProvider; + +/** + * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class DefaultIndexOperationsProvider implements IndexOperationsProvider { + + private final MongoDatabaseFactory mongoDbFactory; + private final QueryMapper mapper; + + /** + * @param mongoDbFactory must not be {@literal null}. + * @param mapper must not be {@literal null}. + */ + DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) { + + this.mongoDbFactory = mongoDbFactory; + this.mapper = mapper; + } + + @Override + public IndexOperations indexOps(String collectionName, Class type) { + return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java new file mode 100644 index 0000000000..59b7ccd63e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java @@ -0,0 +1,393 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Default implementation for {@link ReactiveBulkOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.1 + */ +class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + private final ReactiveBulkOperationContext bulkOperationContext; + private final List> models = new ArrayList<>(); + + private @Nullable WriteConcern defaultWriteConcern; + + private BulkWriteOptions bulkOptions; + + /** + * Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and + * {@link ReactiveBulkOperationContext}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param bulkOperationContext must not be {@literal null}. + */ + DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName, + ReactiveBulkOperationContext bulkOperationContext) { + + super(collectionName); + + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + this.bulkOperationContext = bulkOperationContext; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + + /** + * Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}. + * + * @param defaultWriteConcern can be {@literal null}. + */ + void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { + this.defaultWriteConcern = defaultWriteConcern; + } + + @Override + public ReactiveBulkOperations insert(Object document) { + + Assert.notNull(document, "Document must not be null"); + + this.models.add(Mono.just(document).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it))))); + + return this; + } + + @Override + public ReactiveBulkOperations insert(List documents) { + + Assert.notNull(documents, "Documents must not be null"); + + documents.forEach(this::insert); + + return this; + } + + @Override + public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, false); + return this; + } + + @Override + public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, true); + return this; + } + + @Override + public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) { + return update(query, update, true, true); + } + + @Override + public ReactiveBulkOperations remove(Query query) { + + Assert.notNull(query, "Query must not be null"); + + DeleteOptions deleteOptions = new DeleteOptions(); + query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + + this.models.add(Mono.just(query) + .map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions)))); + + return this; + } + + @Override + public ReactiveBulkOperations remove(List removes) { + + Assert.notNull(removes, "Removals must not be null"); + + for (Query query : removes) { + remove(query); + } + + return this; + } + + @Override + public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + this.models.add(Mono.just(replacement).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, + new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(it), replaceOptions)))); + + return this; + } + + @Override + public Mono execute() { + + try { + return mongoOperations.execute(collectionName, this::bulkWriteTo).next(); + } finally { + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private Mono bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + Flux concat = Flux.concat(models).flatMapSequential(it -> { + + if (it.model() instanceof InsertOneModel iom) { + + Document target = iom.getDocument(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom))); + } else if (it.model() instanceof ReplaceOneModel rom) { + + Document target = rom.getReplacement(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom))); + } + + return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model()))); + }); + + MongoCollection theCollection = collection; + return concat.collectList().flatMap(it -> { + + return Mono + .from(theCollection + .bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions)) + .doOnSuccess(state -> { + it.forEach(this::maybeEmitAfterSaveEvent); + }).flatMap(state -> { + List> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList()); + + return Flux.concat(monos).then(Mono.just(state)); + }); + }); + } + + /** + * Performs update and upsert bulk operations. + * + * @param query the {@link Query} to determine documents to update. + * @param update the {@link Update} to perform, must not be {@literal null}. + * @param upsert whether to upsert. + * @param multi whether to issue a multi-update. + * @return the {@link BulkOperations} with the update registered. + */ + private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); + + this.models.add(Mono.just(update).map(it -> { + if (multi) { + return new SourceAwareWriteModelHolder(update, + new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + } + return new SourceAwareWriteModelHolder(update, + new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + })); + + return this; + } + + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } + + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } + + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } + + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } + + private Document getMappedObject(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + Document sink = new Document(); + + mongoOperations.getConverter().write(source, sink); + return sink; + } + + private Mono maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } + return Mono.just(holder.source()); + } + + private Mono maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName); + } + + private Mono maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName); + } + + private Mono maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName); + } + + /** + * {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as + * references to {@link QueryMapper} and {@link UpdateMapper}. + * + * @author Christoph Strobl + * @since 2.0 + */ + record ReactiveBulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable ReactiveEntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java new file mode 100644 index 0000000000..8e78f421f4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -0,0 +1,176 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; + +import com.mongodb.client.model.IndexOptions; + +/** + * Default implementation of {@link ReactiveIndexOperations}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public class DefaultReactiveIndexOperations implements ReactiveIndexOperations { + + private static final String PARTIAL_FILTER_EXPRESSION_KEY = "partialFilterExpression"; + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + private final QueryMapper queryMapper; + private final Optional> type; + + /** + * Creates a new {@link DefaultReactiveIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param queryMapper must not be {@literal null}. + */ + public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, + QueryMapper queryMapper) { + this(mongoOperations, collectionName, queryMapper, Optional.empty()); + } + + /** + * Creates a new {@link DefaultReactiveIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param queryMapper must not be {@literal null}. + * @param type used for mapping potential partial index filter expression, must not be {@literal null}. + */ + public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, + QueryMapper queryMapper, Class type) { + this(mongoOperations, collectionName, queryMapper, Optional.of(type)); + } + + private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, + QueryMapper queryMapper, Optional> type) { + + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); + Assert.notNull(collectionName, "Collection must not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + this.queryMapper = queryMapper; + this.type = type; + } + + @Override + public Mono ensureIndex(IndexDefinition indexDefinition) { + + return mongoOperations.execute(collectionName, collection -> { + + MongoPersistentEntity entity = type + .map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val)) + .orElseGet(() -> lookupPersistentEntity(collectionName)); + + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); + + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); + + return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions); + + }).next(); + } + + @Override + public Mono alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + return mongoOperations.execute(db -> { + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))) + .doOnNext(result -> { + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + }); + }).then(); + } + + @Nullable + private MongoPersistentEntity lookupPersistentEntity(String collection) { + + Collection> entities = queryMapper.getMappingContext().getPersistentEntities(); + + return entities.stream() // + .filter(entity -> entity.getCollection().equals(collection)) // + .findFirst() // + .orElse(null); + } + + @Override + public Mono dropIndex(String name) { + return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then(); + } + + @Override + public Mono dropAllIndexes() { + return dropIndex("*"); + } + + @Override + public Flux getIndexInfo() { + + return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) // + .map(IndexConverters.documentToIndexInfoConverter()::convert); + } + + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { + + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; + } + + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java index 52c7431bfb..b236b4df28 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,13 +20,16 @@ import static org.springframework.data.mongodb.core.query.Query.*; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; +import org.bson.Document; import org.bson.types.ObjectId; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; import org.springframework.util.Assert; @@ -34,16 +37,20 @@ import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.DB; +import com.mongodb.BasicDBList; import com.mongodb.MongoException; +import com.mongodb.client.MongoDatabase; /** - * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}. - * + * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}. + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. */ +@Deprecated class DefaultScriptOperations implements ScriptOperations { private static final String SCRIPT_COLLECTION_NAME = "system.js"; @@ -53,90 +60,74 @@ class DefaultScriptOperations implements ScriptOperations { /** * Creates new {@link DefaultScriptOperations} using given {@link MongoOperations}. - * + * * @param mongoOperations must not be {@literal null}. */ public DefaultScriptOperations(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript) - */ @Override public NamedMongoScript register(ExecutableMongoScript script) { return register(new NamedMongoScript(generateScriptName(), script)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript) - */ @Override public NamedMongoScript register(NamedMongoScript script) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); mongoOperations.save(script, SCRIPT_COLLECTION_NAME); return script; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[]) - */ @Override - public Object execute(final ExecutableMongoScript script, final Object... args) { + public Object execute(ExecutableMongoScript script, Object... args) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); return mongoOperations.execute(new DbCallback() { @Override - public Object doInDB(DB db) throws MongoException, DataAccessException { - return db.eval(script.getCode(), convertScriptArgs(args)); + public Object doInDB(MongoDatabase db) throws MongoException, DataAccessException { + + Document command = new Document("$eval", script.getCode()); + BasicDBList commandArgs = new BasicDBList(); + commandArgs.addAll(Arrays.asList(convertScriptArgs(false, args))); + command.append("args", commandArgs); + return db.runCommand(command).get("retval"); } }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[]) - */ @Override - public Object call(final String scriptName, final Object... args) { + public Object call(String scriptName, Object... args) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); return mongoOperations.execute(new DbCallback() { @Override - public Object doInDB(DB db) throws MongoException, DataAccessException { - return db.eval(String.format("%s(%s)", scriptName, convertAndJoinScriptArgs(args))); + public Object doInDB(MongoDatabase db) throws MongoException, DataAccessException { + + return db.runCommand(new Document("eval", String.format("%s(%s)", scriptName, convertAndJoinScriptArgs(args)))) + .get("retval"); } }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String) - */ @Override public boolean exists(String scriptName) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); - return mongoOperations.exists(query(where("name").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME); + return mongoOperations.exists(query(where(FieldName.ID.name()).is(scriptName)), NamedMongoScript.class, + SCRIPT_COLLECTION_NAME); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames() - */ @Override public Set getScriptNames() { @@ -155,31 +146,31 @@ public Set getScriptNames() { return scriptNames; } - private Object[] convertScriptArgs(Object... args) { + private Object[] convertScriptArgs(boolean quote, Object... args) { if (ObjectUtils.isEmpty(args)) { return args; } - List convertedValues = new ArrayList(args.length); + List convertedValues = new ArrayList<>(args.length); for (Object arg : args) { - convertedValues.add(arg instanceof String ? String.format("'%s'", arg) : this.mongoOperations.getConverter() - .convertToMongoType(arg)); + convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg) + : this.mongoOperations.getConverter().convertToMongoType(arg)); } return convertedValues.toArray(); } private String convertAndJoinScriptArgs(Object... args) { - return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(args)); + return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(true, args)); } /** * Generate a valid name for the {@literal JavaScript}. MongoDB requires an id of type String for scripts. Calling * scripts having {@link ObjectId} as id fails. Therefore we create a random UUID without {@code -} (as this won't * work) an prefix the result with {@link #SCRIPT_NAME_PREFIX}. - * + * * @return */ private static String generateScriptName() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java index d925cba603..8b4de14e05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java index 86ba70dcb4..54f85051fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,26 @@ */ package org.springframework.data.mongodb.core; +import org.bson.Document; import org.springframework.dao.DataAccessException; -import com.mongodb.DBObject; import com.mongodb.MongoException; /** * An interface used by {@link MongoTemplate} for processing documents returned from a MongoDB query on a per-document - * basis. Implementations of this interface perform the actual work of prcoessing each document but don't need to worry - * about exception handling. {@MongoException}s will be caught and translated by the calling - * MongoTemplate - * - * An DocumentCallbackHandler is typically stateful: It keeps the result state within the object, to be available later - * for later inspection. - * + * basis. Implementations of this interface perform the actual work of processing each document but don't need to worry + * about exception handling. {@link MongoException}s will be caught and translated by the calling MongoTemplate An + * DocumentCallbackHandler is typically stateful: It keeps the result state within the object, to be available later for + * later inspection. + * * @author Mark Pollack - * + * @author Grame Rocher + * @author Oliver Gierke + * @author John Brisbin + * @author Christoph Strobl + * @since 1.0 */ public interface DocumentCallbackHandler { - void processDocument(DBObject dbObject) throws MongoException, DataAccessException; + void processDocument(Document document) throws MongoException, DataAccessException; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..601b6898b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + public static final String RANGE = "Range"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java new file mode 100644 index 0000000000..94352ad65c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java @@ -0,0 +1,60 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.lang.Nullable; + +/** + * Delegate class to encapsulate lifecycle event configuration and publishing. + * + * @author Mark Paluch + * @since 4.0 + * @see ApplicationEventPublisher + */ +class EntityLifecycleEventDelegate { + + private @Nullable ApplicationEventPublisher publisher; + private boolean eventsEnabled = true; + + public void setPublisher(@Nullable ApplicationEventPublisher publisher) { + this.publisher = publisher; + } + + public boolean isEventsEnabled() { + return eventsEnabled; + } + + public void setEventsEnabled(boolean eventsEnabled) { + this.eventsEnabled = eventsEnabled; + } + + /** + * Publish an application event if event publishing is enabled. + * + * @param event the application event. + */ + public void publishEvent(Object event) { + + if (canPublishEvent()) { + publisher.publishEvent(event); + } + } + + private boolean canPublishEvent() { + return publisher != null && eventsEnabled; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java new file mode 100644 index 0000000000..38269787cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -0,0 +1,1176 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.time.Duration; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.mapping.IdentifierAccessor; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.TargetAware; +import org.springframework.data.util.Optionals; +import org.springframework.expression.spel.support.SimpleEvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.ValidationOptions; + +/** + * Common operations performed on an entity in the context of it's mapping metadata. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @author Ben Foster + * @author Ross Lawley + * @since 2.1 + * @see MongoTemplate + * @see ReactiveMongoTemplate + */ +class EntityOperations { + + private static final String ID_FIELD = FieldName.ID.name(); + + private final MappingContext, MongoPersistentProperty> context; + private final QueryMapper queryMapper; + + private final EntityProjectionIntrospector introspector; + + private final MongoJsonSchemaMapper schemaMapper; + + private @Nullable Environment environment; + + EntityOperations(MongoConverter converter) { + this(converter, new QueryMapper(converter)); + } + + EntityOperations(MongoConverter converter, QueryMapper queryMapper) { + this(converter, converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory(), + queryMapper); + } + + EntityOperations(MongoConverter converter, + MappingContext, MongoPersistentProperty> context, + CustomConversions conversions, ProjectionFactory projectionFactory, QueryMapper queryMapper) { + this.context = context; + this.queryMapper = queryMapper; + this.introspector = EntityProjectionIntrospector.create(projectionFactory, + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and(((target, underlyingType) -> !conversions.isSimpleType(target))), + context); + this.schemaMapper = new MongoJsonSchemaMapper(converter); + if (converter instanceof EnvironmentCapable environmentCapable) { + this.environment = environmentCapable.getEnvironment(); + } + } + + /** + * Creates a new {@link Entity} for the given bean. + * + * @param entity must not be {@literal null}. + * @return new instance of {@link Entity}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + Entity forEntity(T entity) { + + Assert.notNull(entity, "Bean must not be null"); + + if (entity instanceof TargetAware targetAware) { + return new SimpleMappedEntity((Map) targetAware.getTarget(), this); + } + + if (entity instanceof String) { + return new UnmappedEntity(parse(entity.toString()), this); + } + + if (entity instanceof Map) { + return new SimpleMappedEntity((Map) entity, this); + } + + return MappedEntity.of(entity, context, this); + } + + /** + * Creates a new {@link AdaptibleEntity} for the given bean and {@link ConversionService}. + * + * @param entity must not be {@literal null}. + * @param conversionService must not be {@literal null}. + * @return new instance of {@link AdaptibleEntity}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + AdaptibleEntity forEntity(T entity, ConversionService conversionService) { + + Assert.notNull(entity, "Bean must not be null"); + Assert.notNull(conversionService, "ConversionService must not be null"); + + if (entity instanceof String) { + return new UnmappedEntity(parse(entity.toString()), this); + } + + if (entity instanceof Map) { + return new SimpleMappedEntity((Map) entity, this); + } + + return AdaptibleMappedEntity.of(entity, context, conversionService, this); + } + + /** + * @param source can be {@literal null}. + * @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}. + * @since 3.2 + */ + static boolean isCollectionLike(@Nullable Object source) { + + if (source == null) { + return false; + } + + return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator; + } + + /** + * @param entityClass should not be null. + * @return the {@link MongoPersistentEntity#getCollection() collection name}. + */ + public String determineCollectionName(@Nullable Class entityClass) { + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException( + "No class parameter provided, entity collection can't be determined"); + } + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(entityClass); + + if (persistentEntity == null) { + throw new MappingException(String.format( + "Cannot determine collection name from type '%s'. Is it a store native type?", entityClass.getName())); + } + + return persistentEntity.getCollection(); + } + + public Query getByIdInQuery(Collection entities) { + + MultiValueMap byIds = new LinkedMultiValueMap<>(); + + entities.stream() // + .map(this::forEntity) // + .forEach(it -> byIds.add(it.getIdFieldName(), it.getId())); + + Criteria[] criterias = byIds.entrySet().stream() // + .map(it -> Criteria.where(it.getKey()).in(it.getValue())) // + .toArray(Criteria[]::new); + + return new Query(criterias.length == 1 ? criterias[0] : new Criteria().orOperator(criterias)); + } + + /** + * Returns the name of the identifier property. Considers mapping information but falls back to the MongoDB default of + * {@code _id} if no identifier property can be found. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + public String getIdPropertyName(Class type) { + + Assert.notNull(type, "Type must not be null"); + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(type); + + if (persistentEntity != null && persistentEntity.getIdProperty() != null) { + return persistentEntity.getRequiredIdProperty().getName(); + } + + return ID_FIELD; + } + + /** + * Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties. + * + * @param domainType must not be {@literal null}. + * @return the name of the distanceField to use. {@literal dis} by default. + * @since 2.2 + */ + public String nearQueryDistanceFieldName(Class domainType) { + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(domainType); + if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) { + return "dis"; + } + + String distanceFieldName = "calculated-distance"; + int counter = 0; + while (persistentEntity.getPersistentProperty(distanceFieldName) != null) { + distanceFieldName += "-" + (counter++); + } + + return distanceFieldName; + } + + private static Document parse(String source) { + + try { + return Document.parse(source); + } catch (org.bson.json.JsonParseException o_O) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } catch (RuntimeException o_O) { + + // legacy 3.x exception + if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } + throw o_O; + } + } + + public TypedOperations forType(@Nullable Class entityClass) { + + if (entityClass != null) { + + MongoPersistentEntity entity = context.getPersistentEntity(entityClass); + + if (entity != null) { + return new TypedEntityOperations(entity, environment); + } + + } + return UntypedOperations.instance(); + } + + /** + * Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned + * type is a projection and what property paths are participating in the projection. + * + * @param resultType the type to project on. Must not be {@literal null}. + * @param entityType the source domain type. Must not be {@literal null}. + * @return the introspection result. + * @since 3.4 + * @see EntityProjectionIntrospector#introspect(Class, Class) + */ + public EntityProjection introspectProjection(Class resultType, Class entityType) { + + MongoPersistentEntity persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType); + if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) { + return (EntityProjection) EntityProjection.nonProjecting(resultType); + } + return introspector.introspect(resultType, entityType); + } + + /** + * Convert {@link CollectionOptions} to {@link CreateCollectionOptions} using {@link Class entityType} to obtain + * mapping metadata. + * + * @param collectionOptions + * @param entityType + * @return + * @since 3.4 + */ + public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, + Class entityType) { + + Optional collation = Optionals.firstNonEmpty( + () -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> forType(entityType).getCollation());// + + CreateCollectionOptions result = new CreateCollectionOptions(); + collation.map(Collation::toMongoCollation).ifPresent(result::collation); + + if (collectionOptions == null) { + return result; + } + + collectionOptions.getCapped().ifPresent(result::capped); + collectionOptions.getSize().ifPresent(result::sizeInBytes); + collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); + collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); + + collectionOptions.getValidationOptions().ifPresent(it -> { + + ValidationOptions validationOptions = new ValidationOptions(); + + it.getValidationAction().ifPresent(validationOptions::validationAction); + it.getValidationLevel().ifPresent(validationOptions::validationLevel); + + it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); + + result.validationOptions(validationOptions); + }); + + collectionOptions.getTimeSeriesOptions().map(forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + it.getTimeField()); + + if (StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + if (!it.getExpireAfter().isNegative()) { + result.expireAfter(it.getExpireAfter().toSeconds(), TimeUnit.SECONDS); + } + + result.timeSeriesOptions(options); + }); + + collectionOptions.getChangeStreamOptions() // + .map(CollectionOptions.CollectionChangeStreamOptions::getPreAndPostImages) // + .map(ChangeStreamPreAndPostImagesOptions::new) // + .ifPresent(result::changeStreamPreAndPostImagesOptions); + + collectionOptions.getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .filter(Predicate.not(Document::isEmpty)) // + .ifPresent(result::encryptedFields); + + return result; + } + + private Document getMappedValidator(Validator validator, Class domainType) { + + Document validationRules = validator.toDocument(); + + if (validationRules.containsKey("$jsonSchema")) { + return schemaMapper.mapSchema(validationRules, domainType); + } + + return queryMapper.getMappedObject(validationRules, context.getPersistentEntity(domainType)); + } + + /** + * A representation of information about an entity. + * + * @author Oliver Gierke + * @author Christoph Strobl + * @since 2.1 + */ + interface Entity { + + /** + * Returns the field name of the identifier of the entity. + * + * @return + */ + String getIdFieldName(); + + /** + * Returns the identifier of the entity. + * + * @return + */ + Object getId(); + + /** + * Returns the property value for {@code key}. + * + * @param key + * @return + * @since 4.1 + */ + @Nullable + Object getPropertyValue(String key); + + /** + * Returns the {@link Query} to find the entity by its identifier. + * + * @return + */ + Query getByIdQuery(); + + /** + * Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}. + * + * @return the {@link Query} to use for removing the entity. Never {@literal null}. + * @since 2.2 + */ + default Query getRemoveByQuery() { + return isVersionedEntity() ? getQueryForVersion() : getByIdQuery(); + } + + /** + * Returns the {@link Query} to find the entity in its current version. + * + * @return + */ + Query getQueryForVersion(); + + /** + * Maps the backing entity into a {@link MappedDocument} using the given {@link MongoWriter}. + * + * @param writer must not be {@literal null}. + * @return + */ + MappedDocument toMappedDocument(MongoWriter writer); + + /** + * Asserts that the identifier type is updatable in case its not already set. + */ + default void assertUpdateableIdIfNotSet() {} + + /** + * Returns whether the entity is versioned, i.e. if it contains a version property. + * + * @return + */ + default boolean isVersionedEntity() { + return false; + } + + /** + * Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}. + * + * @return the entity version. Can be {@literal null}. + * @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check + * {@link #isVersionedEntity()}. + */ + @Nullable + Object getVersion(); + + /** + * Returns the underlying bean. + * + * @return + */ + T getBean(); + + /** + * Returns whether the entity is considered to be new. + * + * @return + * @since 2.1.2 + */ + boolean isNew(); + + /** + * @param sortObject + * @return + * @since 4.1 + * @throws IllegalStateException if a sort key yields {@literal null}. + */ + Map extractKeys(Document sortObject, Class sourceType); + + } + + /** + * Information and commands on an entity. + * + * @author Oliver Gierke + * @since 2.1 + */ + interface AdaptibleEntity extends Entity { + + /** + * Populates the identifier of the backing entity if it has an identifier property and there's no identifier + * currently present. + * + * @param id must not be {@literal null}. + * @return + */ + @Nullable + T populateIdIfNecessary(@Nullable Object id); + + /** + * Initializes the version property of the current entity if available. + * + * @return the entity with the version property updated if available. + */ + T initializeVersionProperty(); + + /** + * Increments the value of the version property if available. + * + * @return the entity with the version property incremented if available. + */ + T incrementVersion(); + + /** + * Returns the current version value if the entity has a version property. + * + * @return the current version or {@literal null} in case it's uninitialized. + * @throws IllegalStateException if the entity does not define a {@literal version} property. + */ + @Nullable + Number getVersion(); + } + + private static class UnmappedEntity> implements AdaptibleEntity { + + private final T map; + private final EntityOperations entityOperations; + + protected UnmappedEntity(T map, EntityOperations entityOperations) { + this.map = map; + this.entityOperations = entityOperations; + } + + @Override + public String getIdFieldName() { + return ID_FIELD; + } + + @Override + public Object getId() { + return getPropertyValue(ID_FIELD); + } + + @Override + public Object getPropertyValue(String key) { + return map.get(key); + } + + @Override + public Query getByIdQuery() { + return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD))); + } + + @Nullable + @Override + public T populateIdIfNecessary(@Nullable Object id) { + + map.put(ID_FIELD, id); + + return map; + } + + @Override + public Query getQueryForVersion() { + throw new MappingException("Cannot query for version on plain Documents"); + } + + @Override + public MappedDocument toMappedDocument(MongoWriter writer) { + return MappedDocument.of(map instanceof Document document // + ? document // + : new Document(map)); + } + + @Override + public T initializeVersionProperty() { + return map; + } + + @Override + @Nullable + public Number getVersion() { + return null; + } + + @Override + public T incrementVersion() { + return map; + } + + @Override + public T getBean() { + return map; + } + + @Override + public boolean isNew() { + return map.get(ID_FIELD) != null; + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(ID_FIELD, getId()); + } + + for (String key : sortObject.keySet()) { + + Object value = resolveValue(key, sourceEntity); + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object resolveValue(String key, @Nullable MongoPersistentEntity sourceEntity) { + + if (sourceEntity == null) { + return BsonUtils.resolveValue(map, key); + } + PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = entityOperations.context + .getPersistentPropertyPath(from); + return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(MongoPersistentProperty::getFieldName)); + } + } + + private static class SimpleMappedEntity> extends UnmappedEntity { + + protected SimpleMappedEntity(T map, EntityOperations entityOperations) { + super(map, entityOperations); + } + + @Override + @SuppressWarnings("unchecked") + public MappedDocument toMappedDocument(MongoWriter writer) { + + T bean = getBean(); + bean = (T) (bean instanceof Document document// + ? document // + : new Document(bean)); + Document document = new Document(); + writer.write(bean, document); + + return MappedDocument.of(document); + } + } + + private static class MappedEntity implements Entity { + + private final MongoPersistentEntity entity; + private final IdentifierAccessor idAccessor; + private final PersistentPropertyAccessor propertyAccessor; + private final EntityOperations entityOperations; + + protected MappedEntity(MongoPersistentEntity entity, IdentifierAccessor idAccessor, + PersistentPropertyAccessor propertyAccessor, EntityOperations entityOperations) { + + this.entity = entity; + this.idAccessor = idAccessor; + this.propertyAccessor = propertyAccessor; + this.entityOperations = entityOperations; + } + + private static MappedEntity of(T bean, + MappingContext, MongoPersistentProperty> context, + EntityOperations entityOperations) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); + IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); + + return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations); + } + + @Override + public String getIdFieldName() { + return entity.getRequiredIdProperty().getFieldName(); + } + + @Override + public Object getId() { + return idAccessor.getRequiredIdentifier(); + } + + @Override + public Object getPropertyValue(String key) { + return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key)); + } + + @Override + public Query getByIdQuery() { + + if (!entity.hasIdProperty()) { + throw new MappingException("No id property found for object of type " + entity.getType()); + } + + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); + + return Query.query(Criteria.where(idProperty.getName()).is(getId())); + } + + @Override + public Query getQueryForVersion() { + + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + return new Query(Criteria.where(idProperty.getName()).is(getId())// + .and(versionProperty.getName()).is(getVersion())); + } + + @Override + public MappedDocument toMappedDocument(MongoWriter writer) { + + T bean = propertyAccessor.getBean(); + + Document document = new Document(); + writer.write(bean, document); + + if (document.containsKey(ID_FIELD) && document.get(ID_FIELD) == null) { + document.remove(ID_FIELD); + } + + return MappedDocument.of(document); + } + + public void assertUpdateableIdIfNotSet() { + + if (!entity.hasIdProperty()) { + return; + } + + MongoPersistentProperty property = entity.getRequiredIdProperty(); + Object propertyValue = idAccessor.getIdentifier(); + + if (propertyValue != null) { + return; + } + + if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { + throw new InvalidDataAccessApiUsageException( + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), + entity.getType().getName())); + } + } + + @Override + public boolean isVersionedEntity() { + return entity.hasVersionProperty(); + } + + @Override + @Nullable + public Object getVersion() { + return propertyAccessor.getProperty(entity.getRequiredVersionProperty()); + } + + @Override + public T getBean() { + return propertyAccessor.getBean(); + } + + @Override + public boolean isNew() { + return entity.isNew(propertyAccessor.getBean()); + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(entity.getRequiredIdProperty().getName(), getId()); + } + + for (String key : sortObject.keySet()) { + + Object value; + if (key.indexOf('.') != -1) { + + // follow the path across nested levels. + // TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document. + value = getNestedPropertyValue(key); + } else { + value = getPropertyValue(key); + } + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object getNestedPropertyValue(String key) { + + String[] segments = key.split("\\."); + Entity currentEntity = this; + Object currentValue = BsonNull.VALUE; + + for (int i = 0; i < segments.length; i++) { + + String segment = segments[i]; + currentValue = currentEntity.getPropertyValue(segment); + + if (i < segments.length - 1) { + currentEntity = entityOperations.forEntity(currentValue); + } + } + + return currentValue != null ? currentValue : BsonNull.VALUE; + } + } + + private static class AdaptibleMappedEntity extends MappedEntity implements AdaptibleEntity { + + private final MongoPersistentEntity entity; + private final ConvertingPropertyAccessor propertyAccessor; + private final IdentifierAccessor identifierAccessor; + + private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccessor identifierAccessor, + ConvertingPropertyAccessor propertyAccessor, EntityOperations entityOperations) { + + super(entity, identifierAccessor, propertyAccessor, entityOperations); + + this.entity = entity; + this.propertyAccessor = propertyAccessor; + this.identifierAccessor = identifierAccessor; + } + + private static AdaptibleEntity of(T bean, + MappingContext, MongoPersistentProperty> context, + ConversionService conversionService, EntityOperations entityOperations) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); + IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); + + return new AdaptibleMappedEntity<>(entity, identifierAccessor, + new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations); + } + + @Nullable + @Override + public T populateIdIfNecessary(@Nullable Object id) { + + if (id == null) { + return propertyAccessor.getBean(); + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty == null) { + return propertyAccessor.getBean(); + } + + if (identifierAccessor.getIdentifier() != null) { + return propertyAccessor.getBean(); + } + + propertyAccessor.setProperty(idProperty, id); + return propertyAccessor.getBean(); + } + + @Override + @Nullable + public Number getVersion() { + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + return propertyAccessor.getProperty(versionProperty, Number.class); + } + + @Override + public T initializeVersionProperty() { + + if (!entity.hasVersionProperty()) { + return propertyAccessor.getBean(); + } + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + propertyAccessor.setProperty(versionProperty, versionProperty.getType().isPrimitive() ? 1 : 0); + + return propertyAccessor.getBean(); + } + + @Override + public T incrementVersion() { + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + Number version = getVersion(); + Number nextVersion = version == null ? 0 : version.longValue() + 1; + + propertyAccessor.setProperty(versionProperty, nextVersion); + + return propertyAccessor.getBean(); + } + } + + /** + * Type-specific operations abstraction. + * + * @author Mark Paluch + * @param + * @since 2.2 + */ + interface TypedOperations { + + /** + * Return the optional {@link Collation} for the underlying entity. + * + * @return + */ + Optional getCollation(); + + /** + * Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for + * the underlying entity. + * + * @return + */ + Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); + + /** + * @return the name of the id field. + * @since 4.1 + */ + default String getIdKeyName() { + return ID_FIELD; + } + } + + /** + * {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom + * conversions). + */ + enum UntypedOperations implements TypedOperations { + + INSTANCE; + + UntypedOperations() {} + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static TypedOperations instance() { + return (TypedOperations) INSTANCE; + } + + @Override + public Optional getCollation() { + return Optional.empty(); + } + + @Override + public Optional getCollation(Query query) { + + if (query == null) { + return Optional.empty(); + } + + return query.getCollation(); + } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } + } + + /** + * {@link TypedOperations} backed by {@link MongoPersistentEntity}. + * + * @param + */ + static class TypedEntityOperations implements TypedOperations { + + private final MongoPersistentEntity entity; + + @Nullable private final Environment environment; + + protected TypedEntityOperations(MongoPersistentEntity entity, @Nullable Environment environment) { + + this.entity = entity; + this.environment = environment; + } + + @Override + public Optional getCollation() { + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public Optional getCollation(Query query) { + + if (query.getCollation().isPresent()) { + return query.getCollation(); + } + + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + + options = options.metaField(timeSeries.metaField()); + } + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + + if (StringUtils.hasText(timeSeries.expireAfter())) { + + Duration timeout = computeIndexTimeout(timeSeries.expireAfter(), getEvaluationContextForEntity(entity)); + if (!timeout.isNegative()) { + options = options.expireAfter(timeout); + } + } + + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()).expireAfter(source.getExpireAfter()); + } + + @Override + public String getIdKeyName() { + return entity.getIdProperty().getName(); + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } + + /** + * Get the {@link ValueEvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return the context to use. + */ + private ValueEvaluationContext getEvaluationContextForEntity(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity instanceof BasicMongoPersistentEntity mongoEntity) { + return mongoEntity.getValueEvaluationContext(null); + } + + return ValueEvaluationContext.of(this.environment, SimpleEvaluationContext.forReadOnlyDataBinding().build()); + } + + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, ValueEvaluationContext evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java new file mode 100644 index 0000000000..67ed188655 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java @@ -0,0 +1,123 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.stream.Stream; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; + +/** + * {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent + * API style.
+ * The starting {@literal domainType} is used for mapping the {@link Aggregation} provided via {@code by} into the + * MongoDB specific representation, as well as mapping back the resulting {@link org.bson.Document}. An alternative + * input type for mapping the {@link Aggregation} can be provided by using + * {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation}. + * + *
+ *     
+ *         aggregateAndReturn(Jedi.class)
+ *             .by(newAggregation(Human.class, project("These are not the droids you are looking for")))
+ *             .all();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public interface ExecutableAggregationOperation { + + /** + * Start creating an aggregation operation that returns results mapped to the given domain type.
+ * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different + * input type for he aggregation. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableAggregation}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ExecutableAggregation aggregateAndReturn(Class domainType); + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.0 + */ + interface AggregationWithCollection { + + /** + * Explicitly set the name of the collection to perform the query on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link AggregationWithAggregation}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + AggregationWithAggregation inCollection(String collection); + } + + /** + * Trigger execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingAggregation { + + /** + * Apply pipeline operations as specified and get all matching elements. + * + * @return never {@literal null}. + */ + AggregationResults all(); + + /** + * Apply pipeline operations as specified and stream all matching elements.
+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} + * + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + */ + Stream stream(); + } + + /** + * Define the aggregation with pipeline stages. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface AggregationWithAggregation { + + /** + * Set the aggregation to be used. + * + * @param aggregation must not be {@literal null}. + * @return new instance of {@link TerminatingAggregation}. + * @throws IllegalArgumentException if aggregation is {@literal null}. + */ + TerminatingAggregation by(Aggregation aggregation); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface ExecutableAggregation extends AggregationWithCollection, AggregationWithAggregation {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java new file mode 100644 index 0000000000..ca5aa7a513 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java @@ -0,0 +1,111 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.stream.Stream; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ExecutableAggregationOperation} operating directly on {@link MongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation { + + private final MongoTemplate template; + + ExecutableAggregationOperationSupport(MongoTemplate template) { + this.template = template; + } + + @Override + public ExecutableAggregation aggregateAndReturn(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableAggregationSupport<>(template, domainType, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + static class ExecutableAggregationSupport + implements AggregationWithAggregation, ExecutableAggregation, TerminatingAggregation { + + private final MongoTemplate template; + private final Class domainType; + private final Aggregation aggregation; + private final String collection; + + public ExecutableAggregationSupport(MongoTemplate template, Class domainType, Aggregation aggregation, + String collection) { + this.template = template; + this.domainType = domainType; + this.aggregation = aggregation; + this.collection = collection; + } + + @Override + public AggregationWithAggregation inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + } + + @Override + public TerminatingAggregation by(Aggregation aggregation) { + + Assert.notNull(aggregation, "Aggregation must not be null"); + + return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + } + + @Override + public AggregationResults all() { + return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + } + + @Override + public Stream stream() { + return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType); + } + + private String getCollectionName(Aggregation aggregation) { + + if (StringUtils.hasText(collection)) { + return collection; + } + + if (aggregation instanceof TypedAggregation typedAggregation) { + + if (typedAggregation.getInputType() != null) { + return template.getCollectionName(typedAggregation.getInputType()); + } + } + + return template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java new file mode 100644 index 0000000000..3358ff2b17 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -0,0 +1,373 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.springframework.dao.DataAccessException; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoCollection; + +/** + * {@link ExecutableFindOperation} allows creation and execution of MongoDB find operations in a fluent API style. + *
+ * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the + * MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping back the + * result from the {@link org.bson.Document}. However, it is possible to define an different {@literal returnType} via + * {@code as} to mapping the result.
+ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
+ *     
+ *         query(Human.class)
+ *             .inCollection("star-wars")
+ *             .as(Jedi.class)
+ *             .matching(where("firstname").is("luke"))
+ *             .all();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public interface ExecutableFindOperation { + + /** + * Start creating a find operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableFind}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ExecutableFind query(Class domainType); + + /** + * Trigger find execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingFind { + + /** + * Get exactly zero or one result. + * + * @return {@link Optional#empty()} if no match found. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + */ + default Optional one() { + return Optional.ofNullable(oneValue()); + } + + /** + * Get exactly zero or one result. + * + * @return {@literal null} if no match found. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + */ + @Nullable + T oneValue(); + + /** + * Get the first or no result. + * + * @return {@link Optional#empty()} if no match found. + */ + default Optional first() { + return Optional.ofNullable(firstValue()); + } + + /** + * Get the first or no result. + * + * @return {@literal null} if no match found. + */ + @Nullable + T firstValue(); + + /** + * Get all matching elements. + * + * @return never {@literal null}. + */ + List all(); + + /** + * Stream all matching elements. + * + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + */ + Stream stream(); + + /** + * Return a window of elements either starting or resuming at + * {@link org.springframework.data.domain.ScrollPosition}. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a window of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Window scroll(ScrollPosition scrollPosition); + + /** + * Get the number of matching elements.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead. + * + * @return total number of matching elements. + */ + long count(); + + /** + * Check for the presence of matching elements. + * + * @return {@literal true} if at least one matching element exists. + */ + boolean exists(); + } + + /** + * Trigger geonear execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingFindNear { + + /** + * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. + * + * @return never {@literal null}. + */ + GeoResults all(); + } + + /** + * Terminating operations invoking the actual query execution. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface FindWithQuery extends TerminatingFind { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingFind matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + + /** + * Set the filter query for the geoNear execution. + * + * @param nearQuery must not be {@literal null}. + * @return new instance of {@link TerminatingFindNear}. + * @throws IllegalArgumentException if nearQuery is {@literal null}. + */ + TerminatingFindNear near(NearQuery nearQuery); + } + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.0 + */ + interface FindWithCollection extends FindWithQuery { + + /** + * Explicitly set the name of the collection to perform the query on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link FindWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + FindWithProjection inCollection(String collection); + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.0 + */ + interface FindWithProjection extends FindWithQuery, FindDistinct { + + /** + * Define the target type fields should be mapped to.
+ * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + FindWithQuery as(Class resultType); + + } + + /** + * Distinct Find support. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindDistinct { + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view. + * + * @param field name of the field. Must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if field is {@literal null}. + */ + TerminatingDistinct distinct(String field); + } + + /** + * Result type override. Optional. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface DistinctWithProjection { + + /** + * Define the target type the result should be mapped to.
+ * Skip this step if you are anyway fine with the default conversion. + *
+ *
{@link Object} (the default)
+ *
Result is mapped according to the {@link org.bson.BsonType} converting eg. {@link org.bson.BsonString} into + * plain {@link String}, {@link org.bson.BsonInt64} to {@link Long}, etc. always picking the most concrete type with + * respect to the domain types property.
+ * Any {@link org.bson.BsonType#DOCUMENT} is run through the {@link org.springframework.data.convert.EntityReader} + * to obtain the domain type.
+ * Using {@link Object} also works for non strictly typed fields. Eg. a mixture different types like fields using + * {@link String} in one {@link org.bson.Document} while {@link Long} in another.
+ *
Any Simple type like {@link String} or {@link Long}.
+ *
The result is mapped directly by the MongoDB Java driver and the {@link org.bson.codecs.CodeCodec Codecs} in + * place. This works only for results where all documents considered for the operation use the very same type for + * the field.
+ *
Any Domain type
+ *
Domain types can only be mapped if the if the result of the actual {@code distinct()} operation returns + * {@link org.bson.BsonType#DOCUMENT}.
+ *
{@link org.bson.BsonValue}
+ *
Using {@link org.bson.BsonValue} allows retrieval of the raw driver specific format, which returns eg. + * {@link org.bson.BsonString}.
+ *
+ * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + TerminatingDistinct as(Class resultType); + } + + /** + * Result restrictions. Optional. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface DistinctWithQuery extends DistinctWithProjection { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Terminating distinct find operations. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingDistinct extends DistinctWithQuery { + + /** + * Get all matching distinct field values. + * + * @return empty {@link List} if not match found. Never {@literal null}. + * @throws DataAccessException if eg. result cannot be converted correctly which may happen if the document contains + * {@link String} whereas the result type is specified as {@link Long}. + */ + List all(); + } + + /** + * {@link ExecutableFind} provides methods for constructing lookup operations in a fluent way. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface ExecutableFind extends FindWithCollection, FindWithProjection, FindDistinct {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java new file mode 100644 index 0000000000..4e6c3547c5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java @@ -0,0 +1,281 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.bson.Document; + +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.ReadPreference; +import com.mongodb.client.FindIterable; + +/** + * Implementation of {@link ExecutableFindOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +class ExecutableFindOperationSupport implements ExecutableFindOperation { + + private static final Query ALL_QUERY = new Query(); + + private final MongoTemplate template; + + ExecutableFindOperationSupport(MongoTemplate template) { + this.template = template; + } + + @Override + public ExecutableFind query(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + } + + /** + * @param + * @author Christoph Strobl + * @since 2.0 + */ + static class ExecutableFindSupport + implements ExecutableFind, FindWithCollection, FindWithProjection, FindWithQuery { + + private final MongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + + ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, @Nullable String collection, + Query query) { + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + } + + @Override + public FindWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public FindWithQuery as(Class returnType) { + + Assert.notNull(returnType, "ReturnType must not be null"); + + return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public TerminatingFind matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public T oneValue() { + + List result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(2)); + + if (ObjectUtils.isEmpty(result)) { + return null; + } + + if (result.size() > 1) { + throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1); + } + + return result.iterator().next(); + } + + @Override + public T firstValue() { + + List result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(1)); + + return ObjectUtils.isEmpty(result) ? null : result.iterator().next(); + } + + @Override + public List all() { + return doFind(null); + } + + @Override + public Stream stream() { + return doStream(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + } + + @Override + public TerminatingFindNear near(NearQuery nearQuery) { + return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + } + + @Override + public long count() { + return template.count(query, domainType, getCollectionName()); + } + + @Override + public boolean exists() { + return template.exists(query, domainType, getCollectionName()); + } + + @SuppressWarnings("unchecked") + @Override + public TerminatingDistinct distinct(String field) { + + Assert.notNull(field, "Field must not be null"); + + return new DistinctOperationSupport(this, field); + } + + private List doFind(@Nullable CursorPreparer preparer) { + + Document queryObject = query.getQueryObject(); + Document fieldsObject = query.getFieldsObject(); + + return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType, + returnType, getCursorPreparer(query, preparer)); + } + + private List doFindDistinct(String field) { + + return template.findDistinct(query, field, getCollectionName(), domainType, + returnType == domainType ? (Class) Object.class : returnType); + } + + private Stream doStream() { + return template.doStream(query, domainType, getCollectionName(), returnType); + } + + private CursorPreparer getCursorPreparer(Query query, @Nullable CursorPreparer preparer) { + return preparer != null ? preparer : template.new QueryCursorPreparer(query, domainType); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + + private String asString() { + return SerializationUtils.serializeToJsonSafely(query); + } + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + static class DelegatingQueryCursorPreparer implements SortingQueryCursorPreparer { + + private final @Nullable CursorPreparer delegate; + private Optional limit = Optional.empty(); + + DelegatingQueryCursorPreparer(@Nullable CursorPreparer delegate) { + this.delegate = delegate; + } + + @Override + public FindIterable prepare(FindIterable iterable) { + + FindIterable target = delegate != null ? delegate.prepare(iterable) : iterable; + return limit.map(target::limit).orElse(target); + } + + CursorPreparer limit(int limit) { + + this.limit = Optional.of(limit); + return this; + } + + @Override + @Nullable + public ReadPreference getReadPreference() { + return delegate.getReadPreference(); + } + + @Override + @Nullable + public Document getSortObject() { + return delegate instanceof SortingQueryCursorPreparer sqcp ? sqcp.getSortObject() : null; + } + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class DistinctOperationSupport implements TerminatingDistinct { + + private final String field; + private final ExecutableFindSupport delegate; + + public DistinctOperationSupport(ExecutableFindSupport delegate, String field) { + + this.delegate = delegate; + this.field = field; + } + + @Override + @SuppressWarnings("unchecked") + public TerminatingDistinct as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field); + } + + @Override + public TerminatingDistinct matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); + } + + @Override + public List all() { + return delegate.doFindDistinct(field); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java new file mode 100644 index 0000000000..c2b08c7e59 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java @@ -0,0 +1,139 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collection; + +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * {@link ExecutableInsertOperation} allows creation and execution of MongoDB insert and bulk insert operations in a + * fluent API style.
+ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
+ *     
+ *         insert(Jedi.class)
+ *             .inCollection("star-wars")
+ *             .one(luke);
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public interface ExecutableInsertOperation { + + /** + * Start creating an insert operation for given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableInsert}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ExecutableInsert insert(Class domainType); + + /** + * Trigger insert execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingInsert extends TerminatingBulkInsert { + + /** + * Insert exactly one object. + * + * @param object must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException if object is {@literal null}. + */ + T one(T object); + + /** + * Insert a collection of objects. + * + * @param objects must not be {@literal null}. + * @return the inserted objects. + * @throws IllegalArgumentException if objects is {@literal null}. + */ + Collection all(Collection objects); + } + + /** + * Trigger bulk insert execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingBulkInsert { + + /** + * Bulk write collection of objects. + * + * @param objects must not be {@literal null}. + * @return resulting {@link BulkWriteResult}. + * @throws IllegalArgumentException if objects is {@literal null}. + */ + BulkWriteResult bulk(Collection objects); + } + + /** + * Collection override (optional). + * + * @author Christoph Strobl + * @since 2.0 + */ + interface InsertWithCollection { + + /** + * Explicitly set the name of the collection.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link InsertWithBulkMode}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + InsertWithBulkMode inCollection(String collection); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface InsertWithBulkMode extends TerminatingInsert { + + /** + * Define the {@link BulkMode} to use for bulk insert operation. + * + * @param bulkMode must not be {@literal null}. + * @return new instance of {@link TerminatingBulkInsert}. + * @throws IllegalArgumentException if bulkMode is {@literal null}. + */ + TerminatingBulkInsert withBulkMode(BulkMode bulkMode); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface ExecutableInsert extends TerminatingInsert, InsertWithCollection, InsertWithBulkMode {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java new file mode 100644 index 0000000000..47b7127deb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java @@ -0,0 +1,115 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collection; + +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * Implementation of {@link ExecutableInsertOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +class ExecutableInsertOperationSupport implements ExecutableInsertOperation { + + private final MongoTemplate template; + + ExecutableInsertOperationSupport(MongoTemplate template) { + this.template = template; + } + + @Override + public ExecutableInsert insert(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableInsertSupport<>(template, domainType, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + static class ExecutableInsertSupport implements ExecutableInsert { + + private final MongoTemplate template; + private final Class domainType; + @Nullable private final String collection; + @Nullable private final BulkMode bulkMode; + + ExecutableInsertSupport(MongoTemplate template, Class domainType, String collection, BulkMode bulkMode) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + this.bulkMode = bulkMode; + } + + @Override + public T one(T object) { + + Assert.notNull(object, "Object must not be null"); + + return template.insert(object, getCollectionName()); + } + + @Override + public Collection all(Collection objects) { + + Assert.notNull(objects, "Objects must not be null"); + + return template.insert(objects, getCollectionName()); + } + + @Override + public BulkWriteResult bulk(Collection objects) { + + Assert.notNull(objects, "Objects must not be null"); + + return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName()) + .insert(new ArrayList<>(objects)).execute(); + } + + @Override + public InsertWithBulkMode inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); + } + + @Override + public TerminatingBulkInsert withBulkMode(BulkMode bulkMode) { + + Assert.notNull(bulkMode, "BulkMode must not be null"); + + return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java new file mode 100644 index 0000000000..2d13ad3ea0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java @@ -0,0 +1,215 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +/** + * {@link ExecutableMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API + * style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching} + * into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping + * back the results from the {@link org.bson.Document}. However, it is possible to define an different + * {@literal returnType} via {@code as} to mapping the result.
+ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
+ *     
+ *         mapReduce(Human.class)
+ *             .map("function() { emit(this.id, this.firstname) }")
+ *             .reduce("function(id, name) { return sum(id, name); }")
+ *             .inCollection("star-wars")
+ *             .as(Jedi.class)
+ *             .matching(query(where("lastname").is("skywalker")))
+ *             .all();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface ExecutableMapReduceOperation { + + /** + * Start creating a mapReduce operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableFind}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + MapReduceWithMapFunction mapReduce(Class domainType); + + /** + * Trigger mapReduce execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingMapReduce { + + /** + * Get the mapReduce results. + * + * @return never {@literal null}. + */ + List all(); + } + + /** + * Provide the Javascript {@code function()} used to map matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithMapFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param mapFunction must not be {@literal null} nor empty. + * @return new instance of {@link MapReduceWithReduceFunction}. + * @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty. + */ + MapReduceWithReduceFunction map(String mapFunction); + + } + + /** + * Provide the Javascript {@code function()} used to reduce matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithReduceFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param reduceFunction must not be {@literal null} nor empty. + * @return new instance of {@link ExecutableMapReduce}. + * @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty. + */ + ExecutableMapReduce reduce(String reduceFunction); + + } + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithCollection extends MapReduceWithQuery { + + /** + * Explicitly set the name of the collection to perform the mapReduce operation on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link MapReduceWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + MapReduceWithProjection inCollection(String collection); + } + + /** + * Input document filter query (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithQuery extends TerminatingMapReduce { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingMapReduce matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingMapReduce matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithProjection extends MapReduceWithQuery { + + /** + * Define the target type fields should be mapped to.
+ * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + MapReduceWithQuery as(Class resultType); + } + + /** + * Additional mapReduce options (Optional). + * + * @author Christoph Strobl + * @since 2.1 + * @deprecated since 4.0 in favor of {@link org.springframework.data.mongodb.core.aggregation}. + */ + @Deprecated + interface MapReduceWithOptions { + + /** + * Set additional options to apply to the mapReduce operation. + * + * @param options must not be {@literal null}. + * @return new instance of {@link ExecutableMapReduce}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + ExecutableMapReduce with(MapReduceOptions options); + } + + /** + * {@link ExecutableMapReduce} provides methods for constructing mapReduce operations in a fluent way. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface ExecutableMapReduce extends MapReduceWithMapFunction, MapReduceWithReduceFunction, + MapReduceWithCollection, MapReduceWithProjection, MapReduceWithOptions { + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java new file mode 100644 index 0000000000..9f78693540 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java @@ -0,0 +1,179 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ExecutableMapReduceOperation}. + * + * @author Christoph Strobl + * @since 2.1 + */ +class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation { + + private static final Query ALL_QUERY = new Query(); + + private final MongoTemplate template; + + ExecutableMapReduceOperationSupport(MongoTemplate template) { + + Assert.notNull(template, "Template must not be null"); + this.template = template; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class) + */ + @Override + public ExecutableMapReduceSupport mapReduce(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class ExecutableMapReduceSupport + implements ExecutableMapReduce, MapReduceWithOptions, MapReduceWithCollection, + MapReduceWithProjection, MapReduceWithQuery, MapReduceWithReduceFunction, MapReduceWithMapFunction { + + private final MongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + private final @Nullable String mapFunction; + private final @Nullable String reduceFunction; + private final @Nullable MapReduceOptions options; + + ExecutableMapReduceSupport(MongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction, + @Nullable MapReduceOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + this.mapFunction = mapFunction; + this.reduceFunction = reduceFunction; + this.options = options; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all() + */ + @Override + public List all() { + return template.mapReduce(query, domainType, getCollectionName(), mapFunction, reduceFunction, options, + returnType); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String) + */ + @Override + public MapReduceWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query) + */ + @Override + public TerminatingMapReduce matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithProjection#as(java.lang.Class) + */ + @Override + public MapReduceWithQuery as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) + */ + @Override + public ExecutableMapReduce with(MapReduceOptions options) { + + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String) + */ + @Override + public MapReduceWithReduceFunction map(String mapFunction) { + + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String) + */ + @Override + public ExecutableMapReduce reduce(String reduceFunction) { + + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java new file mode 100644 index 0000000000..a10cd0317f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java @@ -0,0 +1,142 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +import com.mongodb.client.result.DeleteResult; + +/** + * {@link ExecutableRemoveOperation} allows creation and execution of MongoDB remove / findAndRemove operations in a + * fluent API style.
+ * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the + * MongoDB specific representation. The collection to operate on is by default derived from the initial + * {@literal domainType} and can be defined there via {@link org.springframework.data.mongodb.core.mapping.Document}. + * Using {@code inCollection} allows to override the collection name for the execution. + * + *
+ *     
+ *         remove(Jedi.class)
+ *             .inCollection("star-wars")
+ *             .matching(query(where("firstname").is("luke")))
+ *             .all();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public interface ExecutableRemoveOperation { + + /** + * Start creating a remove operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableRemove}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ExecutableRemove remove(Class domainType); + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingRemove { + + /** + * Remove all documents matching. + * + * @return the {@link DeleteResult}. Never {@literal null}. + */ + DeleteResult all(); + + /** + * Remove the first matching document. + * + * @return the {@link DeleteResult}. Never {@literal null}. + */ + DeleteResult one(); + + /** + * Remove and return all matching documents.
+ * NOTE: The entire list of documents will be fetched before sending the actual delete commands. + * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete + * operation. + * + * @return empty {@link List} if no match found. Never {@literal null}. + */ + List findAndRemove(); + } + + /** + * Collection override (optional). + * + * @param + * @author Christoph Strobl + * @since 2.0 + */ + interface RemoveWithCollection extends RemoveWithQuery { + + /** + * Explicitly set the name of the collection to perform the query on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link RemoveWithCollection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + RemoveWithQuery inCollection(String collection); + } + + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface RemoveWithQuery extends TerminatingRemove { + + /** + * Define the query filtering elements. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface ExecutableRemove extends RemoveWithCollection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java new file mode 100644 index 0000000000..8e84aa7dd6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java @@ -0,0 +1,108 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.result.DeleteResult; + +/** + * Implementation of {@link ExecutableRemoveOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation { + + private static final Query ALL_QUERY = new Query(); + + private final MongoTemplate tempate; + + public ExecutableRemoveOperationSupport(MongoTemplate tempate) { + this.tempate = tempate; + } + + @Override + public ExecutableRemove remove(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + static class ExecutableRemoveSupport implements ExecutableRemove, RemoveWithCollection { + + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final String collection; + + public ExecutableRemoveSupport(MongoTemplate template, Class domainType, Query query, String collection) { + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + } + + @Override + public RemoveWithQuery inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ExecutableRemoveSupport<>(template, domainType, query, collection); + } + + @Override + public TerminatingRemove matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ExecutableRemoveSupport<>(template, domainType, query, collection); + } + + @Override + public DeleteResult all() { + return template.doRemove(getCollectionName(), query, domainType, true); + } + + @Override + public DeleteResult one() { + return template.doRemove(getCollectionName(), query, domainType, false); + } + + @Override + public List findAndRemove() { + + String collectionName = getCollectionName(); + + return template.doFindAndDelete(collectionName, query, domainType); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java new file mode 100644 index 0000000000..a5c63e9b67 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -0,0 +1,324 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Optional; + +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; + +import com.mongodb.client.result.UpdateResult; + +/** + * {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify / findAndReplace + * operations in a fluent API style.
+ * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as + * the {@link Update} via {@code apply} into the MongoDB specific representations. The collection to operate on is by + * default derived from the initial {@literal domainType} and can be defined there via + * {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
+ *     
+ *         update(Jedi.class)
+ *             .inCollection("star-wars")
+ *             .matching(query(where("firstname").is("luke")))
+ *             .apply(new Update().set("lastname", "skywalker"))
+ *             .upsert();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public interface ExecutableUpdateOperation { + + /** + * Start creating an update operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableUpdate}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ExecutableUpdate update(Class domainType); + + /** + * Trigger findAndModify execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ + interface TerminatingFindAndModify { + + /** + * Find, modify and return the first matching document. + * + * @return {@link Optional#empty()} if nothing found. + */ + default Optional findAndModify() { + return Optional.ofNullable(findAndModifyValue()); + } + + /** + * Find, modify and return the first matching document. + * + * @return {@literal null} if nothing found. + */ + @Nullable + T findAndModifyValue(); + } + + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + UpdateResult replaceFirst(); + } + + /** + * Trigger + * findOneAndReplace + * execution by calling one of the terminating methods. + * + * @author Mark Paluch + * @since 2.1 + */ + interface TerminatingFindAndReplace extends TerminatingReplace { + + /** + * Find, replace and return the first matching document. + * + * @return {@link Optional#empty()} if nothing found. + */ + default Optional findAndReplace() { + return Optional.ofNullable(findAndReplaceValue()); + } + + /** + * Find, replace and return the first matching document. + * + * @return {@literal null} if nothing found. + */ + @Nullable + T findAndReplaceValue(); + } + + /** + * Trigger update execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModifyWithOptions { + + /** + * Update all matching documents in the collection. + * + * @return never {@literal null}. + */ + UpdateResult all(); + + /** + * Update the first document in the collection. + * + * @return never {@literal null}. + */ + UpdateResult first(); + + /** + * Creates a new document if no documents match the filter query or updates the matching ones. + * + * @return never {@literal null}. + */ + UpdateResult upsert(); + } + + /** + * Declare the {@link Update} to apply. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface UpdateWithUpdate { + + /** + * Set the {@link UpdateDefinition} to be applied. + * + * @param update must not be {@literal null}. + * @return new instance of {@link TerminatingUpdate}. + * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + TerminatingUpdate apply(UpdateDefinition update); + + /** + * Specify {@code replacement} object. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + * @since 2.1 + */ + FindAndReplaceWithProjection replaceWith(T replacement); + } + + /** + * Explicitly define the name of the collection to perform operation in. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface UpdateWithCollection { + + /** + * Explicitly set the name of the collection to perform the query on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link UpdateWithCollection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + UpdateWithQuery inCollection(String collection); + } + + /** + * Define a filter query for the {@link Update}. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface UpdateWithQuery extends UpdateWithUpdate { + + /** + * Filter documents by given {@literal query}. + * + * @param query must not be {@literal null}. + * @return new instance of {@link UpdateWithQuery}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Define {@link FindAndModifyOptions}. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface FindAndModifyWithOptions { + + /** + * Explicitly define {@link FindAndModifyOptions} for the {@link Update}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndModifyWithOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingFindAndModify withOptions(FindAndModifyOptions options); + } + + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + + /** + * Define {@link FindAndReplaceOptions}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { + + /** + * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options); + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithProjection extends FindAndReplaceWithOptions { + + /** + * Define the target type fields should be mapped to.
+ * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindAndReplaceWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + FindAndReplaceWithOptions as(Class resultType); + + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + interface ExecutableUpdate extends UpdateWithCollection, UpdateWithQuery, UpdateWithUpdate {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java new file mode 100644 index 0000000000..593d863d39 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java @@ -0,0 +1,209 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.result.UpdateResult; + +/** + * Implementation of {@link ExecutableUpdateOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation { + + private static final Query ALL_QUERY = new Query(); + + private final MongoTemplate template; + + ExecutableUpdateOperationSupport(MongoTemplate template) { + this.template = template; + } + + @Override + public ExecutableUpdate update(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); + } + + /** + * @author Christoph Strobl + * @since 2.0 + */ + static class ExecutableUpdateSupport + implements ExecutableUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, + FindAndReplaceWithOptions, TerminatingFindAndReplace, FindAndReplaceWithProjection { + + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final UpdateDefinition update; + @Nullable private final String collection; + @Nullable private final FindAndModifyOptions findAndModifyOptions; + @Nullable private final FindAndReplaceOptions findAndReplaceOptions; + @Nullable private final Object replacement; + private final Class targetType; + + ExecutableUpdateSupport(MongoTemplate template, Class domainType, Query query, UpdateDefinition update, + String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions, + Object replacement, Class targetType) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + } + + @Override + public TerminatingUpdate apply(UpdateDefinition update) { + + Assert.notNull(update, "Update must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public UpdateWithQuery inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection replaceWith(T replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + options, replacement, targetType); + } + + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); + } + + @Override + public UpdateWithUpdate matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithOptions as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, resultType); + } + + @Override + public UpdateResult all() { + return doUpdate(true, false); + } + + @Override + public UpdateResult first() { + return doUpdate(false, false); + } + + @Override + public UpdateResult upsert() { + return doUpdate(true, true); + } + + @Override + public @Nullable T findAndModifyValue() { + + return template.findAndModify(query, update, + findAndModifyOptions != null ? findAndModifyOptions : new FindAndModifyOptions(), targetType, + getCollectionName()); + } + + @Override + public @Nullable T findAndReplaceValue() { + + return (T) template.findAndReplace(query, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.empty(), domainType, + getCollectionName(), targetType); + } + + @Override + public UpdateResult replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + private UpdateResult doUpdate(boolean multi, boolean upsert) { + return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java index c5e88c7fd9..51a2c5b86a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,90 @@ */ package org.springframework.data.mongodb.core; +import java.util.Optional; + +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; + +/** + * @author Mark Pollak + * @author Oliver Gierke + * @author Christoph Strobl + */ public class FindAndModifyOptions { - boolean returnNew; + private boolean returnNew; + private boolean upsert; + private boolean remove; + + private @Nullable Collation collation; + + private static final FindAndModifyOptions NONE = new FindAndModifyOptions() { + + private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed; Please use FindAndModifyOptions.options() instead"; + + @Override + public FindAndModifyOptions returnNew(boolean returnNew) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions upsert(boolean upsert) { + throw new UnsupportedOperationException(ERROR_MSG); + } - boolean upsert; + @Override + public FindAndModifyOptions remove(boolean remove) { + throw new UnsupportedOperationException(ERROR_MSG); + } - boolean remove; + @Override + public FindAndModifyOptions collation(@Nullable Collation collation) { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; /** * Static factory method to create a FindAndModifyOptions instance - * - * @return a new instance + * + * @return new instance of {@link FindAndModifyOptions}. */ public static FindAndModifyOptions options() { return new FindAndModifyOptions(); } + /** + * Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance. + * + * @return unmodifiable {@link FindAndModifyOptions} instance. + * @since 2.2 + */ + public static FindAndModifyOptions none() { + return NONE; + } + + /** + * Create new {@link FindAndModifyOptions} based on option of given {@literal source}. + * + * @param source can be {@literal null}. + * @return new instance of {@link FindAndModifyOptions}. + * @since 2.0 + */ + public static FindAndModifyOptions of(@Nullable FindAndModifyOptions source) { + + FindAndModifyOptions options = new FindAndModifyOptions(); + if (source == null) { + return options; + } + + options.returnNew = source.returnNew; + options.upsert = source.upsert; + options.remove = source.remove; + options.collation = source.collation; + + return options; + } + public FindAndModifyOptions returnNew(boolean returnNew) { this.returnNew = returnNew; return this; @@ -47,6 +114,19 @@ public FindAndModifyOptions remove(boolean remove) { return this; } + /** + * Define the {@link Collation} specifying language-specific rules for string comparison. + * + * @param collation can be {@literal null}. + * @return this. + * @since 2.0 + */ + public FindAndModifyOptions collation(@Nullable Collation collation) { + + this.collation = collation; + return this; + } + public boolean isReturnNew() { return returnNew; } @@ -59,4 +139,14 @@ public boolean isRemove() { return remove; } + /** + * Get the {@link Collation} specifying language-specific rules for string comparison. + * + * @return never {@literal null}. + * @since 2.0 + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java new file mode 100644 index 0000000000..266a0742c2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -0,0 +1,124 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Options for + * findOneAndReplace. + *
+ * Defaults to + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ +public class FindAndReplaceOptions extends ReplaceOptions { + + private boolean returnNew; + + private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() { + + private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed; Please use FindAndReplaceOptions.options() instead"; + + @Override + public FindAndReplaceOptions returnNew() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link FindAndReplaceOptions} instance. + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @return new instance of {@link FindAndReplaceOptions}. + */ + public static FindAndReplaceOptions options() { + return new FindAndReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance. + * + * @return unmodifiable {@link FindAndReplaceOptions} instance. + * @since 2.2 + */ + public static FindAndReplaceOptions none() { + return NONE; + } + + /** + * Static factory method to create a {@link FindAndReplaceOptions} instance with + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @return new instance of {@link FindAndReplaceOptions}. + */ + public static FindAndReplaceOptions empty() { + return new FindAndReplaceOptions(); + } + + /** + * Return the replacement document. + * + * @return this. + */ + public FindAndReplaceOptions returnNew() { + + this.returnNew = true; + return this; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + public FindAndReplaceOptions upsert() { + + super.upsert(); + return this; + } + + /** + * Get the bit indicating to return the replacement document. + * + * @return {@literal true} if set. + */ + public boolean isReturnNew() { + return returnNew; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java new file mode 100644 index 0000000000..625a85950e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -0,0 +1,83 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadPreference; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Simple callback interface to allow customization of a {@link FindPublisher}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Konstantin Volivach + */ +public interface FindPublisherPreparer extends ReadPreferenceAware { + + /** + * Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}. + * + * @since 2.2 + */ + FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher); + + /** + * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. + * + * @param findPublisher must not be {@literal null}. + */ + FindPublisher prepare(FindPublisher findPublisher); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindPublisher} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindPublisher initiateFind(MongoCollection collection, + Function, FindPublisher> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 + */ + @Override + @Nullable + default ReadPreference getReadPreference() { + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java new file mode 100644 index 0000000000..906afddd4a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java @@ -0,0 +1,25 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Stripped down interface providing access to a fluent API that specifies a basic set of MongoDB operations. + * + * @author Christoph Strobl + * @since 2.0 + */ +public interface FluentMongoOperations extends ExecutableFindOperation, ExecutableInsertOperation, + ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation, ExecutableMapReduceOperation {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java index 93cd6db533..654e7d4330 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,54 +15,54 @@ */ package org.springframework.data.mongodb.core; +import org.bson.Document; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Value object to mitigate different representations of geo command execution results in MongoDB. - * + * * @author Oliver Gierke + * @author Christoph Strobl * @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside) + * @since 1.9 */ class GeoCommandStatistics { - private static final GeoCommandStatistics NONE = new GeoCommandStatistics(new BasicDBObject()); + private static final GeoCommandStatistics NONE = new GeoCommandStatistics(new Document()); - private final DBObject source; + private final Document source; /** * Creates a new {@link GeoCommandStatistics} instance with the given source document. - * + * * @param source must not be {@literal null}. */ - private GeoCommandStatistics(DBObject source) { + private GeoCommandStatistics(Document source) { - Assert.notNull(source, "Source document must not be null!"); + Assert.notNull(source, "Source document must not be null"); this.source = source; } /** * Creates a new {@link GeoCommandStatistics} from the given command result extracting the statistics. - * + * * @param commandResult must not be {@literal null}. - * @return + * @return never {@literal null}. */ - public static GeoCommandStatistics from(DBObject commandResult) { + public static GeoCommandStatistics from(Document commandResult) { - Assert.notNull(commandResult, "Command result must not be null!"); + Assert.notNull(commandResult, "Command result must not be null"); Object stats = commandResult.get("stats"); - return stats == null ? NONE : new GeoCommandStatistics((DBObject) stats); + return stats == null ? NONE : new GeoCommandStatistics((Document) stats); } /** * Returns the average distance reported by the command result. Mitigating a removal of the field in case the command * didn't return any result introduced in MongoDB 3.2 RC1. - * - * @return - * @see https://jira.mongodb.org/browse/SERVER-21024 + * + * @return never {@literal null}, uses {@link Double#NaN} if {@literal avgDistance} does not exist. + * @see MongoDB Jira SERVER-21024 */ public double getAverageDistance() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java new file mode 100644 index 0000000000..57abe9a529 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java @@ -0,0 +1,129 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; + +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Function object to apply a query hint. Can be an index name or a BSON document. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class HintFunction { + + private static final HintFunction EMPTY = new HintFunction(null); + + private final @Nullable Object hint; + + private HintFunction(@Nullable Object hint) { + this.hint = hint; + } + + /** + * Return an empty hint function. + * + * @return + */ + static HintFunction empty() { + return EMPTY; + } + + /** + * Create a {@link HintFunction} from a {@link Bson document} or {@link String index name}. + * + * @param hint + * @return + */ + static HintFunction from(@Nullable Object hint) { + return new HintFunction(hint); + } + + /** + * Return whether a hint is present. + * + * @return + */ + public boolean isPresent() { + return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson; + } + + /** + * If a hint is not present, returns {@code true}, otherwise {@code false}. + * + * @return {@code true} if a hint is not present, otherwise {@code false}. + */ + public boolean isEmpty() { + return !isPresent(); + } + + /** + * Apply the hint to consumers depending on the hint format if {@link #isPresent() present}. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @param + */ + public void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + return; + } + apply(registryProvider, stringConsumer, bsonConsumer); + } + + /** + * Apply the hint to consumers depending on the hint format. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @return + * @param + */ + public R apply(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + throw new IllegalStateException("No hint present"); + } + + if (hint instanceof Bson bson) { + return bsonConsumer.apply(bson); + } + + if (hint instanceof String hintString) { + + if (BsonUtils.isJsonDocument(hintString)) { + return bsonConsumer.apply(BsonUtils.parse(hintString, registryProvider)); + } + return stringConsumer.apply(hintString); + } + + throw new IllegalStateException( + "Unable to read hint of type %s".formatted(hint != null ? hint.getClass() : "null")); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java new file mode 100644 index 0000000000..f5856100d0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -0,0 +1,146 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.concurrent.TimeUnit; + +import org.bson.Document; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.Collation; +import com.mongodb.client.model.IndexOptions; + +/** + * {@link Converter Converters} for index-related MongoDB documents/types. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +abstract class IndexConverters { + + private static final Converter DEFINITION_TO_MONGO_INDEX_OPTIONS; + private static final Converter DOCUMENT_INDEX_INFO; + + static { + + DEFINITION_TO_MONGO_INDEX_OPTIONS = getIndexDefinitionIndexOptionsConverter(); + DOCUMENT_INDEX_INFO = getDocumentIndexInfoConverter(); + } + + private IndexConverters() { + + } + + static Converter indexDefinitionToIndexOptionsConverter() { + return DEFINITION_TO_MONGO_INDEX_OPTIONS; + } + + static Converter documentToIndexInfoConverter() { + return DOCUMENT_INDEX_INFO; + } + + private static Converter getIndexDefinitionIndexOptionsConverter() { + + return indexDefinition -> { + + Document indexOptions = indexDefinition.getIndexOptions(); + IndexOptions ops = new IndexOptions(); + + if (indexOptions.containsKey("name")) { + ops = ops.name(indexOptions.get("name").toString()); + } + if (indexOptions.containsKey("unique")) { + ops = ops.unique((Boolean) indexOptions.get("unique")); + } + if (indexOptions.containsKey("sparse")) { + ops = ops.sparse((Boolean) indexOptions.get("sparse")); + } + if (indexOptions.containsKey("background")) { + ops = ops.background((Boolean) indexOptions.get("background")); + } + if (indexOptions.containsKey("expireAfterSeconds")) { + ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS); + } + if (indexOptions.containsKey("min")) { + ops = ops.min(((Number) indexOptions.get("min")).doubleValue()); + } + if (indexOptions.containsKey("max")) { + ops = ops.max(((Number) indexOptions.get("max")).doubleValue()); + } + if (indexOptions.containsKey("bits")) { + ops = ops.bits((Integer) indexOptions.get("bits")); + } + if (indexOptions.containsKey("bucketSize")) { + MongoCompatibilityAdapter.indexOptionsAdapter(ops).setBucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); + } + if (indexOptions.containsKey("default_language")) { + ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); + } + if (indexOptions.containsKey("language_override")) { + ops = ops.languageOverride(indexOptions.get("language_override").toString()); + } + if (indexOptions.containsKey("weights")) { + ops = ops.weights((org.bson.Document) indexOptions.get("weights")); + } + + for (String key : indexOptions.keySet()) { + if (ObjectUtils.nullSafeEquals("2dsphere", indexOptions.get(key))) { + ops = ops.sphereVersion(2); + } + } + + if (indexOptions.containsKey("partialFilterExpression")) { + ops = ops.partialFilterExpression((org.bson.Document) indexOptions.get("partialFilterExpression")); + } + + if (indexOptions.containsKey("collation")) { + ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); + } + + if (indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + + if (indexOptions.containsKey("hidden")) { + ops = ops.hidden((Boolean) indexOptions.get("hidden")); + } + + return ops; + }; + } + + @Nullable + public static Collation fromDocument(@Nullable Document source) { + + if (source == null) { + return null; + } + + return org.springframework.data.mongodb.core.query.Collation.from(source).toMongoCollation(); + } + + private static Converter getDocumentIndexInfoConverter() { + return IndexInfo::indexInfoOf; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java new file mode 100644 index 0000000000..da4766343a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -0,0 +1,156 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collection; +import java.util.List; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.util.StreamUtils; + +/** + * A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the + * entity the source document was supposed to represent. + * + * @author Oliver Gierke + * @since 2.1 + */ +public class MappedDocument { + + private static final String ID_FIELD = FieldName.ID.name(); + private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1); + + private final Document document; + + private MappedDocument(Document document) { + this.document = document; + } + + public static MappedDocument of(Document document) { + return new MappedDocument(document); + } + + public static Document getIdOnlyProjection() { + return ID_ONLY_PROJECTION; + } + + public static Document getIdIn(Collection ids) { + return new Document(ID_FIELD, new Document("$in", ids)); + } + + public static List toIds(Collection documents) { + + return documents.stream()// + .map(it -> it.get(ID_FIELD))// + .collect(StreamUtils.toUnmodifiableList()); + } + + public boolean hasId() { + return document.containsKey(ID_FIELD); + } + + public boolean hasNonNullId() { + return hasId() && document.get(ID_FIELD) != null; + } + + public Object getId() { + return document.get(ID_FIELD); + } + + public T getId(Class type) { + return document.get(ID_FIELD, type); + } + + public boolean isIdPresent(Class type) { + return type.isInstance(getId()); + } + + public Bson getIdFilter() { + return new Document(ID_FIELD, document.get(ID_FIELD)); + } + + public Object get(String key) { + return document.get(key); + } + + public UpdateDefinition updateWithoutId() { + return new MappedUpdate(Update.fromDocument(document, ID_FIELD)); + } + + public Document getDocument() { + return this.document; + } + + /** + * Updates the documents {@link #ID_FIELD}. + * + * @param value the {@literal _id} value to set. + * @since 3.4.3 + */ + public void updateId(Object value) { + document.put(ID_FIELD, value); + } + + /** + * An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been + * mapped to the specific domain type. + * + * @author Christoph Strobl + * @since 2.2 + */ + static class MappedUpdate implements UpdateDefinition { + + private final Update delegate; + + MappedUpdate(Update delegate) { + this.delegate = delegate; + } + + @Override + public Document getUpdateObject() { + return delegate.getUpdateObject(); + } + + @Override + public boolean modifies(String key) { + return delegate.modifies(key); + } + + @Override + public void inc(String version) { + delegate.inc(version); + } + + @Override + public Boolean isIsolated() { + return delegate.isIsolated(); + } + + @Override + public List getArrayFilters() { + return delegate.getArrayFilters(); + } + + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java new file mode 100644 index 0000000000..bc26dfb68c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -0,0 +1,452 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.bson.Document; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.data.util.TypeInformation; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain + * domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names} + * and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { + + private final MongoConverter converter; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; + private final LinkedMultiValueMap> mergeProperties; + + /** + * Create a new instance of {@link MappingMongoJsonSchemaCreator}. + * + * @param converter must not be {@literal null}. + */ + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter) { + + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true, new LinkedMultiValueMap<>()); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter, LinkedMultiValueMap> mergeProperties) { + + Assert.notNull(converter, "Converter must not be null"); + this.converter = converter; + this.mappingContext = mappingContext; + this.filter = filter; + this.mergeProperties = mergeProperties; + } + + @Override + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, mergeProperties); + } + + @Override + public PropertySpecifier property(String path) { + return types -> withTypesFor(path, types); + } + + /** + * Specify additional types to be considered when rendering the schema for the given path. + * + * @param path path the path using {@literal dot '.'} notation. + * @param types must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.4 + */ + public MongoJsonSchemaCreator withTypesFor(String path, Class... types) { + + LinkedMultiValueMap> clone = mergeProperties.clone(); + for (Class type : types) { + clone.add(path, type); + } + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone); + } + + @Override + public MongoJsonSchema createSchemaFor(Class type) { + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); + MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + schemaBuilder.encryptionMetadata(getEncryptionMetadata(entity, encrypted)); + } + + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); + schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); + + return schemaBuilder.build(); + } + + private static Document getEncryptionMetadata(MongoPersistentEntity entity, Encrypted encrypted) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + + return encryptionMetadata; + } + + private List computePropertiesForEntity(List path, + MongoPersistentEntity entity) { + + List schemaProperties = new ArrayList<>(); + + for (MongoPersistentProperty nested : entity) { + + List currentPath = new ArrayList<>(path); + + String stringPath = currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")); + stringPath = StringUtils.hasText(stringPath) ? (stringPath + "." + nested.getName()) : nested.getName(); + if (!filter.test(new PropertyContext(stringPath, nested))) { + if (!mergeProperties.containsKey(stringPath)) { + continue; + } + } + + if (path.contains(nested)) { // cycle guard + schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), + Object.class, false)); + break; + } + + currentPath.add(nested); + schemaProperties.add(computeSchemaForProperty(currentPath)); + } + + return schemaProperties; + } + + private JsonSchemaProperty computeSchemaForProperty(List path) { + + String stringPath = path.stream().map(MongoPersistentProperty::getName).collect(Collectors.joining(".")); + MongoPersistentProperty property = CollectionUtils.lastElement(path); + + boolean required = isRequiredProperty(property); + Class rawTargetType = computeTargetType(property); // target type before conversion + Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type + + if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class + || ClassUtils.isAssignable(targetType, rawTargetType)) { + targetType = rawTargetType; + } + + if (!isCollection(property) && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { + if (property.isEntity() || mergeProperties.containsKey(stringPath)) { + List targetProperties = new ArrayList<>(); + + if (property.isEntity()) { + targetProperties.add(createObjectSchemaPropertyForEntity(path, property, required)); + } + if (mergeProperties.containsKey(stringPath)) { + for (Class theType : mergeProperties.get(stringPath)) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(theType)); + + targetProperties.add(createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required)); + } + } + JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next() + : JsonSchemaProperty.merged(targetProperties); + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + } + + String fieldName = computePropertyFieldName(property); + + JsonSchemaProperty schemaProperty; + if (isCollection(property)) { + schemaProperty = createArraySchemaProperty(fieldName, property, required); + } else if (property.isMap()) { + schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required); + } else if (ClassUtils.isAssignable(Enum.class, targetType)) { + schemaProperty = createEnumSchemaProperty(fieldName, targetType, required); + } else { + schemaProperty = createSchemaProperty(fieldName, targetType, required); + } + + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + + private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property, + boolean required) { + + ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName); + + if (isSpecificType(property)) { + schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty); + } + + return createPotentiallyRequiredSchemaProperty(schemaProperty, required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property, + ArrayJsonSchemaProperty schemaProperty) { + + MongoPersistentEntity persistentEntity = mappingContext + .getPersistentEntity(property.getTypeInformation().getRequiredComponentType()); + + if (persistentEntity != null) { + + List nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity); + + if (nestedProperties.isEmpty()) { + return schemaProperty; + } + + return schemaProperty + .items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0]))); + } + + if (ClassUtils.isAssignable(Enum.class, property.getActualType())) { + + List possibleValues = getPossibleEnumValues((Class) property.getActualType()); + + return schemaProperty + .items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues)); + } + + return schemaProperty.items(JsonSchemaObject.of(property.getActualType())); + } + + private boolean isSpecificType(MongoPersistentProperty property) { + return !TypeInformation.OBJECT.equals(property.getTypeInformation().getActualType()); + } + + private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property, + JsonSchemaProperty schemaProperty) { + + Encrypted encrypted = property.findAnnotation(Encrypted.class); + if (encrypted == null) { + return schemaProperty; + } + + EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty); + if (StringUtils.hasText(encrypted.algorithm())) { + enc = enc.algorithm(encrypted.algorithm()); + } + if (!ObjectUtils.isEmpty(encrypted.keyId())) { + enc = enc.keys(property.getEncryptionKeyIds()); + } + + Queryable queryable = property.findAnnotation(Queryable.class); + if (queryable == null || !StringUtils.hasText(queryable.queryType())) { + return enc; + } + + QueryCharacteristic characteristic = new QueryCharacteristic() { + + @Override + public String queryType() { + return queryable.queryType(); + } + + @Override + public Document toDocument() { + + Document options = QueryCharacteristic.super.toDocument(); + + if (queryable.contentionFactor() >= 0) { + options.put("contention", queryable.contentionFactor()); + } + + if (StringUtils.hasText(queryable.queryAttributes())) { + options.putAll(Document.parse(queryable.queryAttributes())); + } + + return options; + } + }; + return new QueryableJsonSchemaProperty(enc, QueryCharacteristics.of(characteristic)); + } + + private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, + MongoPersistentProperty property, boolean required) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(property)); + + return createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class targetType, boolean required) { + + List possibleValues = getPossibleEnumValues((Class) targetType); + + targetType = computeTargetType(targetType, possibleValues); + return createSchemaProperty(fieldName, targetType, required, possibleValues); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) { + return createSchemaProperty(fieldName, type, required, Collections.emptyList()); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required, + Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues); + + return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + } + + private TypedJsonSchemaObject createSchemaObject(Object type, Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject) + : JsonSchemaObject.of(Class.class.cast(type)); + + if (!CollectionUtils.isEmpty(possibleValues)) { + schemaObject = schemaObject.possibleValues(possibleValues); + } + return schemaObject; + } + + private String computePropertyFieldName(PersistentProperty property) { + + return property instanceof MongoPersistentProperty mongoPersistentProperty ? mongoPersistentProperty.getFieldName() + : property.getName(); + } + + private boolean isRequiredProperty(PersistentProperty property) { + return property.getType().isPrimitive(); + } + + private Class computeTargetType(PersistentProperty property) { + + if (!(property instanceof MongoPersistentProperty mongoProperty)) { + return property.getType(); + } + + if (!property.getOwner().isIdProperty(property)) { + return mongoProperty.getFieldType(); + } + + if (mongoProperty.hasExplicitWriteTarget()) { + return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass(); + } + + return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType(); + } + + private static Class computeTargetType(Class fallback, List possibleValues) { + return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass(); + } + + private > List getPossibleEnumValues(Class targetType) { + + EnumSet enumSet = EnumSet.allOf(targetType); + List possibleValues = new ArrayList<>(enumSet.size()); + + for (Object enumValue : enumSet) { + possibleValues.add(converter.convertToMongoType(enumValue)); + } + + return possibleValues; + } + + private static boolean isCollection(MongoPersistentProperty property) { + return property.isCollectionLike() && !property.getType().equals(byte[].class); + } + + static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) { + return required ? JsonSchemaProperty.required(property) : property; + } + + class PropertyContext implements JsonSchemaPropertyContext { + + private final String path; + private final MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java index d081792215..fdfeaa81ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2012 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,10 @@ */ package org.springframework.data.mongodb.core; +import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; import com.mongodb.WriteConcern; /** @@ -28,33 +29,36 @@ *
  • REMOVE has null document
  • *
  • INSERT_LIST has null entityType, document, and query
  • * - * + * * @author Mark Pollack * @author Oliver Gierke + * @author Christoph Strobl */ public class MongoAction { private final String collectionName; - private final WriteConcern defaultWriteConcern; - private final Class entityType; private final MongoActionOperation mongoActionOperation; - private final DBObject query; - private final DBObject document; + + private final @Nullable WriteConcern defaultWriteConcern; + private final @Nullable Class entityType; + private final @Nullable Document query; + private final @Nullable Document document; /** * Create an instance of a {@link MongoAction}. - * - * @param defaultWriteConcern the default write concern. - * @param mongoActionOperation action being taken against the collection + * + * @param defaultWriteConcern the default write concern. Can be {@literal null}. + * @param mongoActionOperation action being taken against the collection. Must not be {@literal null}. * @param collectionName the collection name, must not be {@literal null} or empty. - * @param entityType the POJO that is being operated against - * @param document the converted DBObject from the POJO or Spring Update object - * @param query the converted DBObject from the Spring Query object + * @param entityType the POJO that is being operated against. Can be {@literal null}. + * @param document the converted Document from the POJO or Spring Update object. Can be {@literal null}. + * @param query the converted Document from the Spring Query object. Can be {@literal null}. */ - public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation, - String collectionName, Class entityType, DBObject document, DBObject query) { + public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation, + String collectionName, @Nullable Class entityType, @Nullable Document document, @Nullable Document query) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null"); this.defaultWriteConcern = defaultWriteConcern; this.mongoActionOperation = mongoActionOperation; @@ -68,31 +72,28 @@ public String getCollectionName() { return collectionName; } + @Nullable public WriteConcern getDefaultWriteConcern() { return defaultWriteConcern; } - /** - * @deprecated use {@link #getEntityType()} instead. - */ - @Deprecated - public Class getEntityClass() { - return entityType; - } - + @Nullable public Class getEntityType() { return entityType; } + @Nullable public MongoActionOperation getMongoActionOperation() { return mongoActionOperation; } - public DBObject getQuery() { + @Nullable + public Document getQuery() { return query; } - public DBObject getDocument() { + @Nullable + public Document getDocument() { return document; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java index 2fa6983991..509d10887b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,12 +18,13 @@ /** * Enumeration for operations on a collection. Used with {@link MongoAction} to help determine the WriteConcern to use * for a given mutating operation - * + * * @author Mark Pollack * @author Oliver Gierke + * @author Christoph Strobl * @see MongoAction */ public enum MongoActionOperation { - REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK; + REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK, REPLACE } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java index 804ad5e593..5fcc6c9599 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java @@ -1,99 +1,75 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.springframework.data.authentication.UserCredentials; -import org.springframework.jmx.export.annotation.ManagedOperation; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.util.Assert; - -import com.mongodb.DB; -import com.mongodb.Mongo; - -/** - * Mongo server administration exposed via JMX annotations - * - * @author Mark Pollack +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedOperation; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.util.Assert; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; + +/** + * Mongo server administration exposed via JMX annotations + * + * @author Mark Pollack * @author Thomas Darimont - */ -@ManagedResource(description = "Mongo Admin Operations") -public class MongoAdmin implements MongoAdminOperations { - - private final Mongo mongo; - private String username; - private String password; - private String authenticationDatabaseName; - - public MongoAdmin(Mongo mongo) { - Assert.notNull(mongo); - this.mongo = mongo; - } - - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#dropDatabase(java.lang.String) - */ - @ManagedOperation - public void dropDatabase(String databaseName) { - getDB(databaseName).dropDatabase(); - } - - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#createDatabase(java.lang.String) - */ - @ManagedOperation - public void createDatabase(String databaseName) { - getDB(databaseName); - } - - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#getDatabaseStats(java.lang.String) - */ - @ManagedOperation - public String getDatabaseStats(String databaseName) { - return getDB(databaseName).getStats().toString(); - } - - /** - * Sets the username to use to connect to the Mongo database - * - * @param username The username to use - */ - public void setUsername(String username) { - this.username = username; - } - - /** - * Sets the password to use to authenticate with the Mongo database. - * - * @param password The password to use - */ - public void setPassword(String password) { - this.password = password; - } - + * @author Mark Paluch + * @author Christoph Strobl + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Mongo Admin Operations") +public class MongoAdmin implements MongoAdminOperations { + + private final MongoClient mongoClient; + /** - * Sets the authenticationDatabaseName to use to authenticate with the Mongo database. - * - * @param authenticationDatabaseName The authenticationDatabaseName to use. + * @param client the underlying {@link com.mongodb.client.MongoClient} used for data access. + * @since 2.2 */ - public void setAuthenticationDatabaseName(String authenticationDatabaseName) { - this.authenticationDatabaseName = authenticationDatabaseName; + public MongoAdmin(MongoClient client) { + + Assert.notNull(client, "Client must not be null"); + this.mongoClient = client; + } + + @ManagedOperation + public void dropDatabase(String databaseName) { + getDB(databaseName).drop(); } - DB getDB(String databaseName) { - return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password), authenticationDatabaseName); - } -} + @ManagedOperation + public void createDatabase(String databaseName) { + getDB(databaseName); + } + + @ManagedOperation + public String getDatabaseStats(String databaseName) { + return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson(); + } + + @ManagedOperation + public String getServerStatus() { + return getDB("admin").runCommand(new Document("serverStatus", 1).append("rangeDeleter", 1).append("repl", 1)) + .toJson(); + } + + MongoDatabase getDB(String databaseName) { + return mongoClient.getDatabase(databaseName); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java index 293ef14c59..ec03302f7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java @@ -1,34 +1,35 @@ -/* - * Copyright 2011-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.springframework.jmx.export.annotation.ManagedOperation; - -/** - * @author Mark Pollack - * @author Oliver Gierke - */ -public interface MongoAdminOperations { - - @ManagedOperation - void dropDatabase(String databaseName); - - @ManagedOperation - void createDatabase(String databaseName); - - @ManagedOperation - String getDatabaseStats(String databaseName); -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.jmx.export.annotation.ManagedOperation; + +/** + * @author Mark Pollack + * @author Oliver Gierke + */ +@Deprecated(since = "4.5", forRemoval = true) +public interface MongoAdminOperations { + + @ManagedOperation + void dropDatabase(String databaseName); + + @ManagedOperation + void createDatabase(String databaseName); + + @ManagedOperation + String getDatabaseStats(String databaseName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java index 1b579774fc..c5fee9cf54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,174 +16,333 @@ package org.springframework.data.mongodb.core; import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.bson.UuidRepresentation; import org.springframework.beans.factory.config.AbstractFactoryBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.lang.Nullable; import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.connection.ClusterSettings; +import com.mongodb.connection.ConnectionPoolSettings; +import com.mongodb.connection.ServerSettings; +import com.mongodb.connection.SocketSettings; +import com.mongodb.connection.SslSettings; +import com.mongodb.event.ClusterListener; /** * Convenient factory for configuring MongoDB. - * + * * @author Christoph Strobl - * @since 1.7 + * @author Mark Paluch */ -public class MongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); +public class MongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private MongoClientOptions mongoClientOptions; - private String host; - private Integer port; - private List replicaSetSeeds; - private List credentials; + private @Nullable MongoClientSettings mongoClientSettings; + private @Nullable String host; + private @Nullable Integer port; + private @Nullable List credential = null; + private @Nullable ConnectionString connectionString; + private @Nullable String replicaSet = null; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** - * Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}. - * + * Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}. + * * @param mongoClientOptions */ - public void setMongoClientOptions(MongoClientOptions mongoClientOptions) { - this.mongoClientOptions = mongoClientOptions; + public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) { + this.mongoClientSettings = mongoClientOptions; } /** * Set the list of credentials to be used when creating {@link MongoClient}. - * - * @param credentials can be {@literal null}. - */ - public void setCredentials(MongoCredential[] credentials) { - this.credentials = filterNonNullElementsAsList(credentials); - } - - /** - * Set the list of {@link ServerAddress} to build up a replica set for. - * - * @param replicaSetSeeds can be {@literal null}. + * + * @param credential can be {@literal null}. */ - public void setReplicaSetSeeds(ServerAddress[] replicaSetSeeds) { - this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds); + public void setCredential(@Nullable MongoCredential[] credential) { + this.credential = Arrays.asList(credential); } /** * Configures the host to connect to. - * + * * @param host */ - public void setHost(String host) { + public void setHost(@Nullable String host) { this.host = host; } /** * Configures the port to connect to. - * + * * @param port */ public void setPort(int port) { this.port = port; } + public void setConnectionString(@Nullable ConnectionString connectionString) { + this.connectionString = connectionString; + } + + public void setReplicaSet(@Nullable String replicaSet) { + this.replicaSet = replicaSet; + } + /** * Configures the {@link PersistenceExceptionTranslator} to use. - * + * * @param exceptionTranslator */ - public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return Mongo.class; + public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; } - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + @Override + @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return exceptionTranslator.translateExceptionIfPossible(ex); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ @Override - protected Mongo createInstance() throws Exception { + public Class getObjectType() { + return MongoClient.class; + } - if (mongoClientOptions == null) { - mongoClientOptions = MongoClientOptions.builder().build(); - } + @Override + protected MongoClient createInstance() throws Exception { + return createMongoClient(computeClientSetting()); + } - if (credentials == null) { - credentials = Collections.emptyList(); - } + @Override + protected void destroyInstance(@Nullable MongoClient instance) throws Exception { - return createMongoClient(); + if (instance != null) { + instance.close(); + } } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object) + /** + * Create {@link MongoClientSettings} based on configuration and priority (lower is better). + *
      + *
    1. {@link MongoClientFactoryBean#mongoClientSettings}
    2. + *
    3. {@link MongoClientFactoryBean#connectionString}
    4. + *
    5. default {@link MongoClientSettings}
    6. + *
    + * + * @since 3.0 */ - @Override - protected void destroyInstance(Mongo instance) throws Exception { - instance.close(); - } + protected MongoClientSettings computeClientSetting() { + + if (connectionString != null && (StringUtils.hasText(host) || port != null)) { + throw new IllegalStateException("ConnectionString and host/port configuration exclude one another"); + } + + ConnectionString connectionString = this.connectionString != null ? this.connectionString + : new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()), + getOrDefault(port, "" + ServerAddress.defaultPort()))); + + Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + + if (mongoClientSettings != null) { + + MongoClientSettings defaultSettings = MongoClientSettings.builder().build(); - private MongoClient createMongoClient() throws UnknownHostException { + SslSettings sslSettings = mongoClientSettings.getSslSettings(); + ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings(); + ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings(); + SocketSettings socketSettings = mongoClientSettings.getSocketSettings(); + ServerSettings serverSettings = mongoClientSettings.getServerSettings(); - if (!CollectionUtils.isEmpty(replicaSetSeeds)) { - return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions); + builder = builder // + .applicationName(computeSettingsValue(defaultSettings.getApplicationName(), + mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) // + .applyToSslSettings(settings -> { + + applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled())); + applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed()))); + settings.context(sslSettings.getContext()); + }).applyToClusterSettings(settings -> { + + applySettings(settings::hosts, + computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList()))); + + applySettings(settings::requiredReplicaSetName, + computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(), + clusterSettings, connectionString.getRequiredReplicaSetName())); + + applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold())); + + applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType, + defaultSettings.getClusterSettings(), clusterSettings, null)); + applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getServerSelectionTimeout())); + + applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector, + defaultSettings.getClusterSettings(), clusterSettings, null)); + List clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners, + defaultSettings.getClusterSettings(), clusterSettings, null); + if (clusterListeners != null) { + clusterListeners.forEach(settings::addClusterListener); + } + }) // + .applyToConnectionPoolSettings(settings -> { + + applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionIdleTime())); + + applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionLifeTime())); + + applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxWaitTime())); + + applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS), + computeSettingsValue( + (ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(settings::minSize, + computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMinConnectionPoolSize())); + applySettings(settings::maxSize, + computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMaxConnectionPoolSize())); + }) // + .applyToSocketSettings(settings -> { + + applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout())); + + applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout())); + applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + }) // + .applyToServerSettings(settings -> { + + applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, null)); + + applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency())); + settings.applySettings(serverSettings); + }) // + .autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) // + .codecRegistry(mongoClientSettings.getCodecRegistry()); // + + applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(), + mongoClientSettings.getReadConcern(), connectionString.getReadConcern())); + applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(), + mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern())); + applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(), + mongoClientSettings.getReadPreference(), connectionString.getReadPreference())); + applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(), + mongoClientSettings.getRetryReads(), connectionString.getRetryReads())); + applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(), + mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue())); + applySettings(builder::uuidRepresentation, + computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY)); + } + + if (!CollectionUtils.isEmpty(credential)) { + builder = builder.credential(credential.iterator().next()); + } + + if (StringUtils.hasText(replicaSet)) { + builder.applyToClusterSettings((settings) -> { + settings.requiredReplicaSetName(replicaSet); + }); } - return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions); + return builder.build(); } - private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException { + private void applySettings(Consumer settingsBuilder, @Nullable T value) { - ServerAddress defaultAddress = new ServerAddress(); + if (ObjectUtils.isEmpty(value)) { + return; + } + settingsBuilder.accept(value); + } - return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(), - port != null ? port.intValue() : defaultAddress.getPort()); + private T computeSettingsValue(Function function, S defaultValueHolder, S settingsValueHolder, + @Nullable T connectionStringValue) { + return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder), + connectionStringValue); } - /** - * Returns the given array as {@link List} with all {@literal null} elements removed. - * - * @param elements the elements to filter , can be {@literal null}. - * @return a new unmodifiable {@link List#} from the given elements without {@literal null}s. - */ - private static List filterNonNullElementsAsList(T[] elements) { + private T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) { + + boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings); + boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString); - if (elements == null) { - return Collections.emptyList(); + if (!fromSettingsIsDefault) { + return fromSettings; } + return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue; + } + + private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } - List candidateElements = new ArrayList(); + private String getOrDefault(Object value, String defaultValue) { - for (T element : elements) { - if (element != null) { - candidateElements.add(element); - } + if(value == null) { + return defaultValue; } - - return Collections.unmodifiableList(candidateElements); + String sValue = value.toString(); + return StringUtils.hasText(sValue) ? sValue : defaultValue; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java deleted file mode 100644 index fe844df6e3..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import javax.net.SocketFactory; -import javax.net.ssl.SSLSocketFactory; - -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.data.mongodb.MongoDbFactory; - -import com.mongodb.DBDecoderFactory; -import com.mongodb.DBEncoderFactory; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; -import com.mongodb.ReadPreference; -import com.mongodb.WriteConcern; - -/** - * A factory bean for construction of a {@link MongoClientOptions} instance. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -public class MongoClientOptionsFactoryBean extends AbstractFactoryBean { - - private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build(); - - private String description = DEFAULT_MONGO_OPTIONS.getDescription(); - private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost(); - private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost(); - private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS - .getThreadsAllowedToBlockForConnectionMultiplier(); - private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime(); - private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime(); - private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime(); - private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout(); - private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout(); - private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive(); - private ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference(); - private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory(); - private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory(); - private WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern(); - private SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory(); - private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled(); - private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans(); - private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency(); - private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency(); - private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout(); - private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout(); - private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName(); - - private boolean ssl; - private SSLSocketFactory sslSocketFactory; - - /** - * Set the {@link MongoClient} description. - * - * @param description - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * Set the minimum number of connections per host. - * - * @param minConnectionsPerHost - */ - public void setMinConnectionsPerHost(int minConnectionsPerHost) { - this.minConnectionsPerHost = minConnectionsPerHost; - } - - /** - * Set the number of connections allowed per host. Will block if run out. Default is 10. System property - * {@code MONGO.POOLSIZE} can override - * - * @param connectionsPerHost - */ - public void setConnectionsPerHost(int connectionsPerHost) { - this.connectionsPerHost = connectionsPerHost; - } - - /** - * Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is - * 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an - * exception will be thrown. - * - * @param threadsAllowedToBlockForConnectionMultiplier - */ - public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) { - this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier; - } - - /** - * Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes) - * - * @param maxWaitTime - */ - public void setMaxWaitTime(int maxWaitTime) { - this.maxWaitTime = maxWaitTime; - } - - /** - * The maximum idle time for a pooled connection. - * - * @param maxConnectionIdleTime - */ - public void setMaxConnectionIdleTime(int maxConnectionIdleTime) { - this.maxConnectionIdleTime = maxConnectionIdleTime; - } - - /** - * Set the maximum life time for a pooled connection. - * - * @param maxConnectionLifeTime - */ - public void setMaxConnectionLifeTime(int maxConnectionLifeTime) { - this.maxConnectionLifeTime = maxConnectionLifeTime; - } - - /** - * Set the connect timeout in milliseconds. 0 is default and infinite. - * - * @param connectTimeout - */ - public void setConnectTimeout(int connectTimeout) { - this.connectTimeout = connectTimeout; - } - - /** - * Set the socket timeout. 0 is default and infinite. - * - * @param socketTimeout - */ - public void setSocketTimeout(int socketTimeout) { - this.socketTimeout = socketTimeout; - } - - /** - * Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. - * - * @param socketKeepAlive - */ - public void setSocketKeepAlive(boolean socketKeepAlive) { - this.socketKeepAlive = socketKeepAlive; - } - - /** - * Set the {@link ReadPreference}. - * - * @param readPreference - */ - public void setReadPreference(ReadPreference readPreference) { - this.readPreference = readPreference; - } - - /** - * Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB - * object. - * - * @param writeConcern - */ - public void setWriteConcern(WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /** - * @param socketFactory - */ - public void setSocketFactory(SocketFactory socketFactory) { - this.socketFactory = socketFactory; - } - - /** - * Set the frequency that the driver will attempt to determine the current state of each server in the cluster. - * - * @param heartbeatFrequency - */ - public void setHeartbeatFrequency(int heartbeatFrequency) { - this.heartbeatFrequency = heartbeatFrequency; - } - - /** - * In the event that the driver has to frequently re-check a server's availability, it will wait at least this long - * since the previous check to avoid wasted effort. - * - * @param minHeartbeatFrequency - */ - public void setMinHeartbeatFrequency(int minHeartbeatFrequency) { - this.minHeartbeatFrequency = minHeartbeatFrequency; - } - - /** - * Set the connect timeout for connections used for the cluster heartbeat. - * - * @param heartbeatConnectTimeout - */ - public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) { - this.heartbeatConnectTimeout = heartbeatConnectTimeout; - } - - /** - * Set the socket timeout for connections used for the cluster heartbeat. - * - * @param heartbeatSocketTimeout - */ - public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) { - this.heartbeatSocketTimeout = heartbeatSocketTimeout; - } - - /** - * Configures the name of the replica set. - * - * @param requiredReplicaSetName - */ - public void setRequiredReplicaSetName(String requiredReplicaSetName) { - this.requiredReplicaSetName = requiredReplicaSetName; - } - - /** - * This controls if the driver should us an SSL connection. Defaults to |@literal false}. - * - * @param ssl - */ - public void setSsl(boolean ssl) { - this.ssl = ssl; - } - - /** - * Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here, - * {@link SSLSocketFactory#getDefault()} will be used. - * - * @param sslSocketFactory - */ - public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) { - this.sslSocketFactory = sslSocketFactory; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - @Override - protected MongoClientOptions createInstance() throws Exception { - - SocketFactory socketFactoryToUse = ssl ? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory - .getDefault()) : this.socketFactory; - - return MongoClientOptions.builder() // - .alwaysUseMBeans(this.alwaysUseMBeans) // - .connectionsPerHost(this.connectionsPerHost) // - .connectTimeout(connectTimeout) // - .cursorFinalizerEnabled(cursorFinalizerEnabled) // - .dbDecoderFactory(dbDecoderFactory) // - .dbEncoderFactory(dbEncoderFactory) // - .description(description) // - .heartbeatConnectTimeout(heartbeatConnectTimeout) // - .heartbeatFrequency(heartbeatFrequency) // - .heartbeatSocketTimeout(heartbeatSocketTimeout) // - .maxConnectionIdleTime(maxConnectionIdleTime) // - .maxConnectionLifeTime(maxConnectionLifeTime) // - .maxWaitTime(maxWaitTime) // - .minConnectionsPerHost(minConnectionsPerHost) // - .minHeartbeatFrequency(minHeartbeatFrequency) // - .readPreference(readPreference) // - .requiredReplicaSetName(requiredReplicaSetName) // - .socketFactory(socketFactoryToUse) // - .socketKeepAlive(socketKeepAlive) // - .socketTimeout(socketTimeout) // - .threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) // - .writeConcern(writeConcern).build(); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClientOptions.class; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java new file mode 100644 index 0000000000..02913b4303 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -0,0 +1,515 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.net.ssl.SSLContext; + +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; +import com.mongodb.WriteConcern; +import com.mongodb.connection.ClusterConnectionMode; +import com.mongodb.connection.ClusterType; +import com.mongodb.connection.TransportSettings; + +/** + * A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class MongoClientSettingsFactoryBean extends AbstractFactoryBean { + + private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); + + private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); + + @Nullable private Object streamFactoryFactory = MongoCompatibilityAdapter + .clientSettingsAdapter(DEFAULT_MONGO_SETTINGS).getStreamFactoryFactory(); + @Nullable private TransportSettings transportSettings; + + private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); + private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); + private @Nullable Boolean retryReads = null; + + private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); + private @Nullable Boolean retryWrites = null; + + private @Nullable String applicationName = null; + + private @Nullable UuidRepresentation uUidRepresentation = null; + + // --> Socket Settings + + private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings() + .getConnectTimeout(TimeUnit.MILLISECONDS); + private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS); + private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize(); + private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize(); + + // --> Cluster Settings + + private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost(); + private List clusterHosts = Collections.emptyList(); + private @Nullable ClusterConnectionMode clusterConnectionMode = null; + private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType(); + private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getRequiredReplicaSetName(); + private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getLocalThreshold(TimeUnit.MILLISECONDS); + private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getServerSelectionTimeout(TimeUnit.MILLISECONDS); + + // --> ConnectionPoolSettings + + private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize(); + private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize(); + private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxWaitTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionLifeTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionIdleTime(TimeUnit.MILLISECONDS); + private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceInitialDelay(TimeUnit.MILLISECONDS); + private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceFrequency(TimeUnit.MILLISECONDS); + + // --> SSL Settings + + private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled(); + private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed(); + private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled() + ? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName() + : ""; + + // encryption and retry + + private @Nullable AutoEncryptionSettings autoEncryptionSettings; + private @Nullable ServerApi serverApi; + + /** + * @param socketConnectTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit) + */ + public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) { + this.socketConnectTimeoutMS = socketConnectTimeoutMS; + } + + /** + * @param socketReadTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit) + */ + public void setSocketReadTimeoutMS(int socketReadTimeoutMS) { + this.socketReadTimeoutMS = socketReadTimeoutMS; + } + + /** + * @param socketReceiveBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int) + */ + public void setSocketReceiveBufferSize(int socketReceiveBufferSize) { + this.socketReceiveBufferSize = socketReceiveBufferSize; + } + + /** + * @param socketSendBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int) + */ + public void setSocketSendBufferSize(int socketSendBufferSize) { + this.socketSendBufferSize = socketSendBufferSize; + } + + // --> Server Settings + + private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getHeartbeatFrequency(TimeUnit.MILLISECONDS); + private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getMinHeartbeatFrequency(TimeUnit.MILLISECONDS); + + /** + * @param serverHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit) + */ + public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) { + this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS; + } + + /** + * @param serverMinHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit) + */ + public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) { + this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS; + } + + // --> Cluster Settings + + /** + * @param clusterSrvHost + * @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String) + */ + public void setClusterSrvHost(String clusterSrvHost) { + this.clusterSrvHost = clusterSrvHost; + } + + /** + * @param clusterHosts + * @see com.mongodb.connection.ClusterSettings.Builder#hosts(List) + */ + public void setClusterHosts(ServerAddress[] clusterHosts) { + this.clusterHosts = Arrays.asList(clusterHosts); + } + + /** + * ???? + * + * @param clusterConnectionMode + * @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode) + */ + public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) { + this.clusterConnectionMode = clusterConnectionMode; + } + + /** + * @param custerRequiredClusterType + * @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType) + */ + public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) { + this.custerRequiredClusterType = custerRequiredClusterType; + } + + /** + * @param clusterRequiredReplicaSetName + * @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String) + */ + public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) { + this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName; + } + + /** + * @param clusterLocalThresholdMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit) + */ + public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) { + this.clusterLocalThresholdMS = clusterLocalThresholdMS; + } + + /** + * @param clusterServerSelectionTimeoutMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit) + */ + public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) { + this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS; + } + + // --> ConnectionPoolSettings + + /** + * @param poolMaxSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int) + */ + public void setPoolMaxSize(int poolMaxSize) { + this.poolMaxSize = poolMaxSize; + } + + /** + * @param poolMinSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int) + */ + public void setPoolMinSize(int poolMinSize) { + this.poolMinSize = poolMinSize; + } + + /** + * @param poolMaxWaitTimeMS in mesec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit) + */ + public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) { + this.poolMaxWaitTimeMS = poolMaxWaitTimeMS; + } + + /** + * @param poolMaxConnectionLifeTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit) + */ + public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) { + this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS; + } + + /** + * @param poolMaxConnectionIdleTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit) + */ + public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) { + this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS; + } + + /** + * @param poolMaintenanceInitialDelayMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit) + */ + public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) { + this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS; + } + + /** + * @param poolMaintenanceFrequencyMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit) + */ + public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) { + this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS; + } + + // --> SSL Settings + + /** + * @param sslEnabled + * @see com.mongodb.connection.SslSettings.Builder#enabled(boolean) + */ + public void setSslEnabled(Boolean sslEnabled) { + this.sslEnabled = sslEnabled; + } + + /** + * @param sslInvalidHostNameAllowed + * @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean) + */ + public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) { + this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed; + } + + /** + * @param sslProvider + * @see com.mongodb.connection.SslSettings.Builder#context(SSLContext) + * @see SSLContext#getInstance(String) + */ + public void setSslProvider(String sslProvider) { + this.sslProvider = sslProvider; + } + + // encryption and retry + + /** + * @param applicationName + * @see MongoClientSettings.Builder#applicationName(String) + */ + public void setApplicationName(@Nullable String applicationName) { + this.applicationName = applicationName; + } + + /** + * @param retryReads + * @see MongoClientSettings.Builder#retryReads(boolean) + */ + public void setRetryReads(@Nullable Boolean retryReads) { + this.retryReads = retryReads; + } + + /** + * @param readConcern + * @see MongoClientSettings.Builder#readConcern(ReadConcern) + */ + public void setReadConcern(ReadConcern readConcern) { + this.readConcern = readConcern; + } + + /** + * @param writeConcern + * @see MongoClientSettings.Builder#writeConcern(WriteConcern) + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * @param retryWrites + * @see MongoClientSettings.Builder#retryWrites(boolean) + */ + public void setRetryWrites(@Nullable Boolean retryWrites) { + this.retryWrites = retryWrites; + } + + /** + * @param readPreference + * @see MongoClientSettings.Builder#readPreference(ReadPreference) + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * @param streamFactoryFactory + * @deprecated since 4.3, will be removed in the MongoDB 5.0 driver in favor of + * {@code com.mongodb.connection.TransportSettings}. + */ + @Deprecated(since = "4.3") + public void setStreamFactoryFactory(Object streamFactoryFactory) { + this.streamFactoryFactory = streamFactoryFactory; + } + + public void setTransportSettings(@Nullable TransportSettings transportSettings) { + this.transportSettings = transportSettings; + } + + /** + * @param codecRegistry + * @see MongoClientSettings.Builder#codecRegistry(CodecRegistry) + */ + public void setCodecRegistry(CodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + } + + /** + * @param uUidRepresentation + */ + public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) { + this.uUidRepresentation = uUidRepresentation; + } + + /** + * @param autoEncryptionSettings can be {@literal null}. + * @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings) + */ + public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) { + this.autoEncryptionSettings = autoEncryptionSettings; + } + + /** + * @param serverApi can be {@literal null}. + * @see MongoClientSettings.Builder#serverApi(ServerApi) + * @since 3.3 + */ + public void setServerApi(@Nullable ServerApi serverApi) { + this.serverApi = serverApi; + } + + @Override + public Class getObjectType() { + return MongoClientSettings.class; + } + + @Override + protected MongoClientSettings createInstance() { + + Builder builder = MongoClientSettings.builder() // + .readPreference(readPreference) // + .writeConcern(writeConcern) // + .readConcern(readConcern) // + .codecRegistry(codecRegistry) // + .applicationName(applicationName) // + .autoEncryptionSettings(autoEncryptionSettings) // + .applyToClusterSettings((settings) -> { + + settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS); + if (clusterConnectionMode != null) { + settings.mode(clusterConnectionMode); + } + settings.requiredReplicaSetName(clusterRequiredReplicaSetName); + + if (!CollectionUtils.isEmpty(clusterHosts)) { + settings.hosts(clusterHosts); + } + settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS); + settings.requiredClusterType(custerRequiredClusterType); + + if (StringUtils.hasText(clusterSrvHost)) { + settings.srvHost(clusterSrvHost); + } + }) // + .applyToConnectionPoolSettings((settings) -> { + + settings.minSize(poolMinSize); + settings.maxSize(poolMaxSize); + settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS); + settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS); + settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS); + // settings.maxWaitQueueSize(poolMaxWaitQueueSize); + settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS); + settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS); + }) // + .applyToServerSettings((settings) -> { + + settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + }) // + .applyToSocketSettings((settings) -> { + + settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS); + settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS); + settings.receiveBufferSize(socketReceiveBufferSize); + settings.sendBufferSize(socketSendBufferSize); + }) // + .applyToSslSettings((settings) -> { + + settings.enabled(sslEnabled); + if (sslEnabled) { + + settings.invalidHostNameAllowed(sslInvalidHostNameAllowed); + try { + settings.context( + StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault()); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + }); + + if (transportSettings != null) { + builder.transportSettings(transportSettings); + } + + if (streamFactoryFactory != null) { + MongoCompatibilityAdapter.clientSettingsBuilderAdapter(builder).setStreamFactoryFactory(streamFactoryFactory); + } + + if (retryReads != null) { + builder = builder.retryReads(retryReads); + } + + if (retryWrites != null) { + builder = builder.retryWrites(retryWrites); + } + if (uUidRepresentation != null) { + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); + } + + return builder.build(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java index b259878d97..df58a36770 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,51 +18,51 @@ import org.springframework.dao.DataIntegrityViolationException; import org.springframework.util.Assert; -import com.mongodb.WriteResult; +import com.mongodb.WriteConcernResult; /** * Mongo-specific {@link DataIntegrityViolationException}. - * + * * @author Oliver Gierke */ public class MongoDataIntegrityViolationException extends DataIntegrityViolationException { private static final long serialVersionUID = -186980521176764046L; - private final WriteResult writeResult; + private final WriteConcernResult writeResult; private final MongoActionOperation actionOperation; /** - * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}. - * + * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}. + * * @param message the exception message - * @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}. + * @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}. * @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}. */ - public MongoDataIntegrityViolationException(String message, WriteResult writeResult, + public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult, MongoActionOperation actionOperation) { super(message); - Assert.notNull(writeResult, "WriteResult must not be null!"); - Assert.notNull(actionOperation, "MongoActionOperation must not be null!"); + Assert.notNull(writeResult, "WriteResult must not be null"); + Assert.notNull(actionOperation, "MongoActionOperation must not be null"); this.writeResult = writeResult; this.actionOperation = actionOperation; } /** - * Returns the {@link WriteResult} that caused the exception. - * + * Returns the {@link WriteConcernResult} that caused the exception. + * * @return the writeResult */ - public WriteResult getWriteResult() { + public WriteConcernResult getWriteResult() { return writeResult; } /** - * Returns the {@link MongoActionOperation} in which the current exception occured. - * + * Returns the {@link MongoActionOperation} in which the current exception occurred. + * * @return the actionOperation */ public MongoActionOperation getActionOperation() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java new file mode 100644 index 0000000000..eab6b5d7f4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -0,0 +1,266 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.WriteConcern; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as + * database name and exception translator.
    + * Not intended to be used directly. + * + * @author Christoph Strobl + * @author Mark Paluch + * @param Client type. + * @since 3.0 + * @see SimpleMongoClientDatabaseFactory + */ +public abstract class MongoDatabaseFactorySupport implements MongoDatabaseFactory { + + private final C mongoClient; + private final String databaseName; + private final boolean mongoInstanceCreated; + + private PersistenceExceptionTranslator exceptionTranslator; + private @Nullable WriteConcern writeConcern; + + /** + * Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName}, + * {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + * @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of + * {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}. + * @param exceptionTranslator must not be {@literal null}. + */ + protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, + PersistenceExceptionTranslator exceptionTranslator) { + + Assert.notNull(mongoClient, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); + Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); + + this.mongoClient = mongoClient; + this.databaseName = databaseName; + this.mongoInstanceCreated = mongoInstanceCreated; + this.exceptionTranslator = exceptionTranslator; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } + + /** + * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. + * + * @param writeConcern the writeConcern to set. + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + return getMongoDatabase(getDefaultDatabaseName()); + } + + @Override + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + + Assert.hasText(dbName, "Database name must not be empty"); + + MongoDatabase db = doGetMongoDatabase(dbName); + + if (writeConcern == null) { + return db; + } + + return db.withWriteConcern(writeConcern); + } + + /** + * Get the actual {@link MongoDatabase} from the client. + * + * @param dbName must not be {@literal null} or empty. + * @return + */ + protected abstract MongoDatabase doGetMongoDatabase(String dbName); + + public void destroy() throws Exception { + if (mongoInstanceCreated) { + closeClient(); + } + } + + @Override + public MongoDatabaseFactory withSession(ClientSession session) { + return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this); + } + + /** + * Close the client instance. + */ + protected abstract void closeClient(); + + /** + * @return the Mongo client object. + */ + protected C getMongoClient() { + return mongoClient; + } + + /** + * @return the database name. + */ + protected String getDefaultDatabaseName() { + return databaseName; + } + + /** + * {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a + * {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static final class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory { + + private final ClientSession session; + private final MongoDatabaseFactory delegate; + + public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFactory delegate) { + this.session = session; + this.delegate = delegate; + } + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase()); + } + + @Override + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase(dbName)); + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return delegate.getExceptionTranslator(); + } + + @Override + public ClientSession getSession(ClientSessionOptions options) { + return delegate.getSession(options); + } + + @Override + public MongoDatabaseFactory withSession(ClientSession session) { + return delegate.withSession(session); + } + + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + + private MongoDatabase proxyMongoDatabase(MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, + MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); + } + + public ClientSession getSession() { + return this.session; + } + + public MongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + ", delegate=" + + this.getDelegate() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbUtils.java deleted file mode 100644 index 27c7594e79..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbUtils.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.transaction.support.TransactionSynchronizationManager; -import org.springframework.util.Assert; - -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - -/** - * Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the - * framework. - * - * @author Thomas Risberg - * @author Graeme Rocher - * @author Oliver Gierke - * @author Randy Watler - * @author Thomas Darimont - * @author Christoph Strobl - * @since 1.0 - */ -public abstract class MongoDbUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbUtils.class); - - /** - * Private constructor to prevent instantiation. - */ - private MongoDbUtils() {} - - /** - * Obtains a {@link DB} connection for the given {@link Mongo} instance and database name - * - * @param mongo the {@link Mongo} instance, must not be {@literal null}. - * @param databaseName the database name, must not be {@literal null} or empty. - * @return the {@link DB} connection - */ - public static DB getDB(Mongo mongo, String databaseName) { - return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName); - } - - /** - * Obtains a {@link DB} connection for the given {@link Mongo} instance and database name - * - * @param mongo the {@link Mongo} instance, must not be {@literal null}. - * @param databaseName the database name, must not be {@literal null} or empty. - * @param credentials the credentials to use, must not be {@literal null}. - * @return the {@link DB} connection - * @deprecated since 1.7. The {@link MongoClient} itself should hold credentials within - * {@link MongoClient#getCredentialsList()}. - */ - @Deprecated - public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials) { - return getDB(mongo, databaseName, credentials, databaseName); - } - - /** - * @param mongo - * @param databaseName - * @param credentials - * @param authenticationDatabaseName - * @return - * @deprecated since 1.7. The {@link MongoClient} itself should hold credentials within - * {@link MongoClient#getCredentialsList()}. - */ - @Deprecated - public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials, - String authenticationDatabaseName) { - - Assert.notNull(mongo, "No Mongo instance specified!"); - Assert.hasText(databaseName, "Database name must be given!"); - Assert.notNull(credentials, "Credentials must not be null, use UserCredentials.NO_CREDENTIALS!"); - Assert.hasText(authenticationDatabaseName, "Authentication database name must not be null or empty!"); - - return doGetDB(mongo, databaseName, credentials, true, authenticationDatabaseName); - } - - private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate, - String authenticationDatabaseName) { - - DbHolder dbHolder = (DbHolder) TransactionSynchronizationManager.getResource(mongo); - - // Do we have a populated holder and TX sync active? - if (dbHolder != null && !dbHolder.isEmpty() && TransactionSynchronizationManager.isSynchronizationActive()) { - - DB db = dbHolder.getDB(databaseName); - - // DB found but not yet synchronized - if (db != null && !dbHolder.isSynchronizedWithTransaction()) { - - LOGGER.debug("Registering Spring transaction synchronization for existing MongoDB {}.", databaseName); - - TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(dbHolder, mongo)); - dbHolder.setSynchronizedWithTransaction(true); - } - - if (db != null) { - return db; - } - } - - // Lookup fresh database instance - LOGGER.debug("Getting Mongo Database name=[{}]", databaseName); - - DB db = mongo.getDB(databaseName); - - if (!(mongo instanceof MongoClient) && requiresAuthDbAuthentication(credentials)) { - ReflectiveDbInvoker.authenticate(mongo, db, credentials, authenticationDatabaseName); - } - - // TX sync active, bind new database to thread - if (TransactionSynchronizationManager.isSynchronizationActive()) { - - LOGGER.debug("Registering Spring transaction synchronization for MongoDB instance {}.", databaseName); - - DbHolder holderToUse = dbHolder; - - if (holderToUse == null) { - holderToUse = new DbHolder(databaseName, db); - } else { - holderToUse.addDB(databaseName, db); - } - - // synchronize holder only if not yet synchronized - if (!holderToUse.isSynchronizedWithTransaction()) { - TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo)); - holderToUse.setSynchronizedWithTransaction(true); - } - - if (holderToUse != dbHolder) { - TransactionSynchronizationManager.bindResource(mongo, holderToUse); - } - } - - // Check whether we are allowed to return the DB. - if (!allowCreate && !isDBTransactional(db, mongo)) { - throw new IllegalStateException("No Mongo DB bound to thread, " - + "and configuration does not allow creation of non-transactional one here"); - } - - return db; - } - - /** - * Return whether the given DB instance is transactional, that is, bound to the current thread by Spring's transaction - * facilities. - * - * @param db the DB to check - * @param mongo the Mongo instance that the DB was created with (may be null) - * @return whether the DB is transactional - */ - public static boolean isDBTransactional(DB db, Mongo mongo) { - - if (mongo == null) { - return false; - } - DbHolder dbHolder = (DbHolder) TransactionSynchronizationManager.getResource(mongo); - return dbHolder != null && dbHolder.containsDB(db); - } - - /** - * Perform actual closing of the Mongo DB object, catching and logging any cleanup exceptions thrown. - * - * @param db the DB to close (may be null) - * @deprecated since 1.7. The main use case for this method is to ensure that applications can read their own - * unacknowledged writes, but this is no longer so prevalent since the MongoDB Java driver version 3 - * started defaulting to acknowledged writes. - */ - @Deprecated - public static void closeDB(DB db) { - - if (db != null) { - LOGGER.debug("Closing Mongo DB object"); - try { - ReflectiveDbInvoker.requestDone(db); - } catch (Throwable ex) { - LOGGER.debug("Unexpected exception on closing Mongo DB object", ex); - } - } - } - - /** - * Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need - * for authentication as the auth data has to be provided within the MongoClient - * - * @param credentials - * @return - */ - private static boolean requiresAuthDbAuthentication(UserCredentials credentials) { - - if (credentials == null || !credentials.hasUsername()) { - return false; - } - - return !MongoClientVersion.isMongo3Driver(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java new file mode 100644 index 0000000000..7aef5a3a82 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java @@ -0,0 +1,112 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collections; +import java.util.Map; + +import org.bson.BsonDocument; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; + +/** + * {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}. + * + * @author Christoph Strobl + * @since 2.2 + */ +public class MongoEncryptionSettingsFactoryBean implements FactoryBean { + + private boolean bypassAutoEncryption; + private String keyVaultNamespace; + private Map extraOptions; + private MongoClientSettings keyVaultClientSettings; + private Map> kmsProviders; + private Map schemaMap; + + /** + * @param bypassAutoEncryption + * @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean) + */ + public void setBypassAutoEncryption(boolean bypassAutoEncryption) { + this.bypassAutoEncryption = bypassAutoEncryption; + } + + /** + * @param extraOptions + * @see AutoEncryptionSettings.Builder#extraOptions(Map) + */ + public void setExtraOptions(Map extraOptions) { + this.extraOptions = extraOptions; + } + + /** + * @param keyVaultNamespace + * @see AutoEncryptionSettings.Builder#keyVaultNamespace(String) + */ + public void setKeyVaultNamespace(String keyVaultNamespace) { + this.keyVaultNamespace = keyVaultNamespace; + } + + /** + * @param keyVaultClientSettings + * @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings) + */ + public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) { + this.keyVaultClientSettings = keyVaultClientSettings; + } + + /** + * @param kmsProviders + * @see AutoEncryptionSettings.Builder#kmsProviders(Map) + */ + public void setKmsProviders(Map> kmsProviders) { + this.kmsProviders = kmsProviders; + } + + /** + * @param schemaMap + * @see AutoEncryptionSettings.Builder#schemaMap(Map) + */ + public void setSchemaMap(Map schemaMap) { + this.schemaMap = schemaMap; + } + + @Override + public AutoEncryptionSettings getObject() { + + return AutoEncryptionSettings.builder() // + .bypassAutoEncryption(bypassAutoEncryption) // + .keyVaultNamespace(keyVaultNamespace) // + .keyVaultMongoClientSettings(keyVaultClientSettings) // + .kmsProviders(orEmpty(kmsProviders)) // + .extraOptions(orEmpty(extraOptions)) // + .schemaMap(orEmpty(schemaMap)) // + .build(); + } + + private Map orEmpty(@Nullable Map source) { + return source != null ? source : Collections.emptyMap(); + } + + @Override + public Class getObjectType() { + return AutoEncryptionSettings.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java index bf94e9c519..1ec7d3ffc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; +import org.bson.BsonInvalidOperationException; + import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DataIntegrityViolationException; @@ -27,49 +27,69 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException; import org.springframework.dao.PermissionDeniedDataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.BulkOperationException; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.TransientClientSessionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.util.MongoDbErrorCodes; +import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; -import com.mongodb.BulkWriteException; +import com.mongodb.MongoBulkWriteException; import com.mongodb.MongoException; +import com.mongodb.MongoServerException; +import com.mongodb.MongoSocketException; +import com.mongodb.bulk.BulkWriteError; /** * Simple {@link PersistenceExceptionTranslator} for Mongo. Convert the given runtime exception to an appropriate * exception from the {@code org.springframework.dao} hierarchy. Return {@literal null} if no translation is * appropriate: any other exception may have resulted from user code, and should not be translated. - * + * * @author Oliver Gierke * @author Michal Vich * @author Christoph Strobl + * @author Brice Vandeputte */ public class MongoExceptionTranslator implements PersistenceExceptionTranslator { - private static final Set DULICATE_KEY_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException")); + public static final MongoExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private static final Set RESOURCE_FAILURE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound", - "MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException")); + private static final Set DUPLICATE_KEY_EXCEPTIONS = Set.of("MongoException.DuplicateKey", + "DuplicateKeyException"); - private static final Set RESOURCE_USAGE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoInternalException")); + private static final Set RESOURCE_FAILURE_EXCEPTIONS = Set.of("MongoException.Network", + "MongoSocketException", "MongoException.CursorNotFound", "MongoCursorNotFoundException", + "MongoServerSelectionException", "MongoTimeoutException"); - private static final Set DATA_INTEGRETY_EXCEPTIONS = new HashSet( - Arrays.asList("WriteConcernException")); + private static final Set RESOURCE_USAGE_EXCEPTIONS = Set.of("MongoInternalException"); - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + private static final Set DATA_INTEGRITY_EXCEPTIONS = Set.of("WriteConcernException", "MongoWriteException", + "MongoBulkWriteException"); + + private static final Set SECURITY_EXCEPTIONS = Set.of("MongoCryptException"); + + @Override + @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return doTranslateException(ex); + } + + @Nullable + DataAccessException doTranslateException(RuntimeException ex) { // Check for well-known MongoException subclasses. + if (ex instanceof BsonInvalidOperationException) { + throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex); + } + + if (ex instanceof MongoSocketException) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass())); - if (DULICATE_KEY_EXCEPTIONS.contains(exception)) { + if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) { return new DuplicateKeyException(ex.getMessage(), ex); } @@ -81,35 +101,92 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex); } - if (DATA_INTEGRETY_EXCEPTIONS.contains(exception)) { - return new DataIntegrityViolationException(ex.getMessage(), ex); - } + if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) { + + if (ex instanceof MongoServerException) { + if (MongoDbErrorCodes.isDataDuplicateKeyError(ex)) { + return new DuplicateKeyException(ex.getMessage(), ex); + } + if (ex instanceof MongoBulkWriteException bulkException) { + for (BulkWriteError writeError : bulkException.getWriteErrors()) { + if (MongoDbErrorCodes.isDuplicateKeyCode(writeError.getCode())) { + return new DuplicateKeyException(ex.getMessage(), ex); + } + } + } + } - if (ex instanceof BulkWriteException) { - return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex); + return new DataIntegrityViolationException(ex.getMessage(), ex); } // All other MongoExceptions - if (ex instanceof MongoException) { - - int code = ((MongoException) ex).getCode(); - - if (MongoDbErrorCodes.isDuplicateKeyCode(code)) { - throw new DuplicateKeyException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) { - throw new DataAccessResourceFailureException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001 - || code == 12010 || code == 12011 || code == 12012) { - throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) { - throw new PermissionDeniedDataAccessException(ex.getMessage(), ex); + if (ex instanceof MongoException mongoException) { + + int code = mongoException.getCode(); + + if (MongoDbErrorCodes.isDuplicateKeyError(mongoException)) { + return new DuplicateKeyException(ex.getMessage(), ex); } + if (MongoDbErrorCodes.isDataAccessResourceError(mongoException)) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isInvalidDataAccessApiUsageError(mongoException) || code == 12001 || code == 12010 + || code == 12011 || code == 12012) { + return new InvalidDataAccessApiUsageException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isPermissionDeniedError(mongoException)) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isDataIntegrityViolationError(mongoException)) { + return new DataIntegrityViolationException(mongoException.getMessage(), mongoException); + } + if (MongoDbErrorCodes.isClientSessionFailure(mongoException)) { + return isTransientFailure(mongoException) ? new TransientClientSessionException(ex.getMessage(), ex) + : new ClientSessionException(ex.getMessage(), ex); + } + if (ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); + } + return new UncategorizedMongoDbException(ex.getMessage(), ex); } + // may interfere with OmitStackTraceInFastThrow (enabled by default). + // see https://jira.spring.io/browse/DATAMONGO-1905 + if (ex instanceof IllegalStateException) { + for (StackTraceElement elm : ex.getStackTrace()) { + if (elm.getClassName().contains("ClientSession")) { + return new ClientSessionException(ex.getMessage(), ex); + } + } + } + // If we get here, we have an exception that resulted from user code, // rather than the persistence provider, so we return null to indicate // that translation should not occur. return null; } + + /** + * Check if a given exception holds an error label indicating a transient failure. + * + * @param e the exception to inspect. + * @return {@literal true} if the given {@link Exception} is a {@link MongoException} holding one of the transient + * exception error labels. + * @see MongoException#hasErrorLabel(String) + * @since 4.4 + */ + public boolean isTransientFailure(Exception e) { + + if (e instanceof MongoException mongoException) { + return mongoException.hasErrorLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL) + || mongoException.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + } + + if (e.getCause() != e && e.getCause() instanceof Exception ex) { + return isTransientFailure(ex); + } + + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoFactoryBean.java deleted file mode 100644 index 03107f611c..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoFactoryBean.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.CannotGetMongoDbConnectionException; -import org.springframework.util.StringUtils; - -import com.mongodb.Mongo; -import com.mongodb.MongoOptions; -import com.mongodb.ServerAddress; -import com.mongodb.WriteConcern; - -/** - * Convenient factory for configuring MongoDB. - * - * @author Thomas Risberg - * @author Graeme Rocher - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - * @since 1.0 - * @deprecated since 1.7. Please use {@link MongoClientFactoryBean} instead. - */ -@Deprecated -public class MongoFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - - private MongoOptions mongoOptions; - private String host; - private Integer port; - private WriteConcern writeConcern; - private List replicaSetSeeds; - private List replicaPair; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; - - /** - * @param mongoOptions - */ - public void setMongoOptions(MongoOptions mongoOptions) { - this.mongoOptions = mongoOptions; - } - - public void setReplicaSetSeeds(ServerAddress[] replicaSetSeeds) { - this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds); - } - - /** - * @deprecated use {@link #setReplicaSetSeeds(ServerAddress[])} instead - * @param replicaPair - */ - @Deprecated - public void setReplicaPair(ServerAddress[] replicaPair) { - this.replicaPair = filterNonNullElementsAsList(replicaPair); - } - - /** - * Configures the host to connect to. - * - * @param host - */ - public void setHost(String host) { - this.host = host; - } - - /** - * Configures the port to connect to. - * - * @param port - */ - public void setPort(int port) { - this.port = port; - } - - /** - * Sets the {@link WriteConcern} to be configured for the {@link Mongo} instance to be created. - * - * @param writeConcern - */ - public void setWriteConcern(WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /** - * Configures the {@link PersistenceExceptionTranslator} to use. - * - * @param exceptionTranslator can be {@literal null}. - */ - public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return Mongo.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ - public DataAccessException translateExceptionIfPossible(RuntimeException ex) { - return exceptionTranslator.translateExceptionIfPossible(ex); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - @Override - protected Mongo createInstance() throws Exception { - - Mongo mongo; - ServerAddress defaultOptions = new ServerAddress(); - - if (mongoOptions == null) { - mongoOptions = new MongoOptions(); - } - - if (!isNullOrEmpty(replicaPair)) { - if (replicaPair.size() < 2) { - throw new CannotGetMongoDbConnectionException("A replica pair must have two server entries"); - } - mongo = new Mongo(replicaPair.get(0), replicaPair.get(1), mongoOptions); - } else if (!isNullOrEmpty(replicaSetSeeds)) { - mongo = new Mongo(replicaSetSeeds, mongoOptions); - } else { - String mongoHost = StringUtils.hasText(host) ? host : defaultOptions.getHost(); - mongo = port != null ? new Mongo(new ServerAddress(mongoHost, port), mongoOptions) : new Mongo(mongoHost, - mongoOptions); - } - - if (writeConcern != null) { - mongo.setWriteConcern(writeConcern); - } - - return mongo; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object) - */ - @Override - protected void destroyInstance(Mongo mongo) throws Exception { - mongo.close(); - } - - private static boolean isNullOrEmpty(Collection elements) { - return elements == null || elements.isEmpty(); - } - - /** - * Returns the given array as {@link List} with all {@literal null} elements removed. - * - * @param elements the elements to filter - * @return a new unmodifiable {@link List#} from the given elements without nulls - */ - private static List filterNonNullElementsAsList(T[] elements) { - - if (elements == null) { - return Collections.emptyList(); - } - - List candidateElements = new ArrayList(); - - for (T element : elements) { - if (element != null) { - candidateElements.add(element); - } - } - - return Collections.unmodifiableList(candidateElements); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java new file mode 100644 index 0000000000..66b1cf209e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -0,0 +1,250 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.util.Assert; + +/** + * {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the + * following mapping rules. + *

    + * Required Properties + *

    + *
      + *
    • Properties of primitive type
    • + *
    + * Ignored Properties + *
      + *
    • All properties annotated with {@link org.springframework.data.annotation.Transient}
    • + *
    + * Property Type Mapping + *
      + *
    • {@link java.lang.Object} -> {@code type : 'object'}
    • + *
    • {@link java.util.Arrays} -> {@code type : 'array'}
    • + *
    • {@link java.util.Collection} -> {@code type : 'array'}
    • + *
    • {@link java.util.Map} -> {@code type : 'object'}
    • + *
    • {@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}
    • + *
    • Simple Types -> {@code type : 'the corresponding bson type' }
    • + *
    • Domain Types -> {@code type : 'object', properties : {the types properties} }
    • + *
    + *
    + * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into + * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more + * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. + * {@link Encrypted} properties will contain {@literal encrypt} information. + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface MongoJsonSchemaCreator { + + /** + * Create the {@link MongoJsonSchema} for the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + MongoJsonSchema createSchemaFor(Class type); + + /** + * Create a merged {@link MongoJsonSchema} out of the individual schemas of the given types by merging their + * properties into one large {@link MongoJsonSchema schema}. + * + * @param types must not be {@literal null} nor contain {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergedSchemaFor(Class... types) { + + MongoJsonSchema[] schemas = Arrays.stream(types).map(this::createSchemaFor).toArray(MongoJsonSchema[]::new); + return MongoJsonSchema.merge(schemas); + } + + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * Entry point to specify additional behavior for a given path. + * + * @param path the path using {@literal dot '.'} notation. + * @return new instance of {@link PropertySpecifier}. + * @since 3.4 + */ + PropertySpecifier property(String path); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable + MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given + * {@link MongoConverter}. + * + * @param mongoConverter must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + */ + static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { + + Assert.notNull(mongoConverter, "MongoConverter must not be null"); + return new MappingMongoJsonSchemaCreator(mongoConverter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface PropertySpecifier { + + /** + * Set additional type parameters for polymorphic ones. + * + * @param types must not be {@literal null}. + * @return the source + */ + MongoJsonSchemaCreator withTypes(Class... types); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index b63c9e8b3d..65396bc7fe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. - * + * Copyright 2011-2025 the original author or authors. + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,37 +18,56 @@ import java.util.Collection; import java.util.List; import java.util.Set; - +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.util.CloseableIterator; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.util.Lock; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; -import com.mongodb.CommandResult; -import com.mongodb.Cursor; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; +import com.mongodb.ClientSessionOptions; import com.mongodb.ReadPreference; -import com.mongodb.WriteResult; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK - * proxy). - * + * proxy).
    + * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. + * * @author Thomas Risberg * @author Mark Pollack * @author Oliver Gierke @@ -56,246 +75,357 @@ * @author Chuong Ngo * @author Christoph Strobl * @author Thomas Darimont + * @author Maninder Singh + * @author Mark Paluch + * @author Woojin Shin */ -public interface MongoOperations { +public interface MongoOperations extends FluentMongoOperations { /** * The collection name used for the specified class by this template. - * + * * @param entityClass must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. */ String getCollectionName(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the - * MongoDB driver to convert the JSON string to a DBObject. Any errors that result from executing this command will be + * Execute a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to + * obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. - * - * @param jsonCommand a MongoDB command expressed as a JSON string. - */ - CommandResult executeCommand(String jsonCommand); - - /** - * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO - * exception hierarchy. - * - * @param command a MongoDB command + * + * @param jsonCommand a MongoDB command expressed as a JSON string. Must not be {@literal null}. + * @return a result object returned by the action. */ - CommandResult executeCommand(DBObject command); + Document executeCommand(String jsonCommand); /** * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO * exception hierarchy. - * - * @param command a MongoDB command - * @param options query options to use - * @deprecated since 1.7. Please use {@link #executeCommand(DBObject, ReadPreference)}, as the MongoDB Java driver - * version 3 no longer supports this operation. + * + * @param command a MongoDB command. + * @return a result object returned by the action. */ - @Deprecated - CommandResult executeCommand(DBObject command, int options); + Document executeCommand(Document command); /** * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data * access exception hierarchy. - * + * * @param command a MongoDB command, must not be {@literal null}. * @param readPreference read preferences to use, can be {@literal null}. - * @return + * @return a result object returned by the action. * @since 1.7 */ - CommandResult executeCommand(DBObject command, ReadPreference readPreference); + Document executeCommand(Document command, @Nullable ReadPreference readPreference); /** * Execute a MongoDB query and iterate over the query results on a per-document basis with a DocumentCallbackHandler. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param collectionName name of the collection to retrieve the objects from - * @param dch the handler that will extract results, one document at a time + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param collectionName name of the collection to retrieve the objects from. + * @param dch the handler that will extract results, one document at a time. */ void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch); /** - * Executes a {@link DbCallback} translating any exceptions as necessary. - *

    + * Executes a {@link DbCallback} translating any exceptions as necessary.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. - * - * @param return type - * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. - * @return a result object returned by the action or null + * + * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not + * be {@literal null}. + * @param return type. + * @return a result object returned by the action or {@literal null}. */ + @Nullable T execute(DbCallback action); /** - * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

    + * Executes the given {@link CollectionCallback} on the entity collection of the specified class.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. - * - * @param entityClass class that determines the collection to use - * @param return type - * @param action callback object that specifies the MongoDB action - * @return a result object returned by the action or null + * + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @param action callback object that specifies the MongoDB action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action or {@literal null}. */ + @Nullable T execute(Class entityClass, CollectionCallback action); /** - * Executes the given {@link CollectionCallback} on the collection of the given name. - *

    + * Executes the given {@link CollectionCallback} on the collection of the given name.
    * Allows for returning a result object, that is a domain object or a collection of domain objects. - * - * @param return type - * @param collectionName the name of the collection that specifies which DBCollection instance will be passed into - * @param action callback object that specifies the MongoDB action the callback action. - * @return a result object returned by the action or null - */ + * + * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be + * passed into. Must not be {@literal null} or empty. + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action or {@literal null}. + */ + @Nullable T execute(String collectionName, CollectionCallback action); /** - * Executes the given {@link DbCallback} within the same connection to the database so as to ensure consistency in a - * write heavy environment where you may read the data that you wrote. See the comments on {@see Java Driver Concurrency} - *

    - * Allows for returning a result object, that is a domain object or a collection of domain objects. - * - * @param return type - * @param action callback that specified the MongoDB actions to perform on the DB instance - * @return a result object returned by the action or null - * @deprecated since 1.7 as the MongoDB Java driver version 3 does not longer support request boundaries via - * {@link DB#requestStart()} and {@link DB#requestDone()}. + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} + * with given {@literal sessionOptions} to each and every command issued against MongoDB. + * + * @param sessionOptions must not be {@literal null}. + * @return new instance of {@link SessionScoped}. Never {@literal null}. + * @since 2.1 */ - @Deprecated - T executeInSession(DbCallback action); + SessionScoped withSession(ClientSessionOptions sessionOptions); + + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
    + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the + * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. + * + * @param sessionProvider must not be {@literal null}. + * @since 2.1 + */ + default SessionScoped withSession(Supplier sessionProvider) { + + Assert.notNull(sessionProvider, "SessionProvider must not be null"); + + return new SessionScoped() { + + private final Lock lock = Lock.of(new ReentrantLock()); + private @Nullable ClientSession session; + + @Override + public T execute(SessionCallback action, Consumer onComplete) { + + lock.executeWithoutResult(() -> { + + if (session == null) { + session = sessionProvider.get(); + } + }); + + try { + return action.doInSession(MongoOperations.this.withSession(session)); + } finally { + onComplete.accept(session); + } + } + }; + } + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
    + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. + * + * @param session must not be {@literal null}. + * @return {@link ClientSession} bound instance of {@link MongoOperations}. + * @since 2.1 + */ + MongoOperations withSession(ClientSession session); /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB - * {@link Cursor}. + * {@link com.mongodb.client.FindIterable}. *

    - * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. - * + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType must not be {@literal null}. * @param element return type - * @param query - * @param entityType - * @return + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.7 */ - CloseableIterator stream(Query query, Class entityType); + Stream stream(Query query, Class entityType); + + /** + * Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed + * by a Mongo DB {@link com.mongodb.client.FindIterable}. + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @param element return type + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + * @since 1.10 + */ + Stream stream(Query query, Class entityType, String collectionName); /** * Create an uncapped collection with a name based on the provided entity class. - * - * @param entityClass class that determines the collection to create - * @return the created collection + * + * @param entityClass class that determines the collection to create. + * @return the created collection. */ - DBCollection createCollection(Class entityClass); + MongoCollection createCollection(Class entityClass); /** * Create a collection with a name based on the provided entity class using the options. - * - * @param entityClass class that determines the collection to create + * + * @param entityClass class that determines the collection to create. Must not be {@literal null}. * @param collectionOptions options to use when creating the collection. - * @return the created collection + * @return the created collection. */ - DBCollection createCollection(Class entityClass, CollectionOptions collectionOptions); + MongoCollection createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions); /** * Create an uncapped collection with the provided name. - * - * @param collectionName name of the collection - * @return the created collection + * + * @param collectionName name of the collection. + * @return the created collection. */ - DBCollection createCollection(String collectionName); + MongoCollection createCollection(String collectionName); /** * Create a collection with the provided name and options. - * - * @param collectionName name of the collection + * + * @param collectionName name of the collection. Must not be {@literal null} nor empty. * @param collectionOptions options to use when creating the collection. - * @return the created collection + * @return the created collection. */ - DBCollection createCollection(String collectionName, CollectionOptions collectionOptions); + MongoCollection createCollection(String collectionName, @Nullable CollectionOptions collectionOptions); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); /** * A set of collection names. - * - * @return list of collection names + * + * @return list of collection names. */ Set getCollectionNames(); /** - * Get a collection by name, creating it if it doesn't exist. - *

    + * Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and + * is created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
    * Translate any exceptions as necessary. - * - * @param collectionName name of the collection - * @return an existing collection or a newly created one. + * + * @param collectionName name of the collection. Must not be {@literal null}. + * @return an existing collection or one created on first server interaction. */ - DBCollection getCollection(String collectionName); + MongoCollection getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

    + * Check to see if a collection with a name indicated by the entity class exists.
    * Translate any exceptions as necessary. - * - * @param entityClass class that determines the name of the collection + * + * @param entityClass class that determines the name of the collection. Must not be {@literal null}. * @return true if a collection with the given name is found, false otherwise. */ boolean collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

    + * Check to see if a collection with a given name exists.
    * Translate any exceptions as necessary. - * - * @param collectionName name of the collection + * + * @param collectionName name of the collection. Must not be {@literal null}. * @return true if a collection with the given name is found, false otherwise. */ boolean collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

    + * Drop the collection with the name indicated by the entity class.
    * Translate any exceptions as necessary. - * - * @param entityClass class that determines the collection to drop/delete. + * + * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. */ void dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

    + * Drop the collection with the given name.
    * Translate any exceptions as necessary. - * + * * @param collectionName name of the collection to drop/delete. */ void dropCollection(String collectionName); /** * Returns the operations that can be performed on indexes - * + * * @return index operations on the named collection */ IndexOperations indexOps(String collectionName); /** * Returns the operations that can be performed on indexes - * + * * @return index operations on the named collection associated with the given entity class */ IndexOperations indexOps(Class entityClass); /** - * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level. - * - * @return + * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level. + * + * @return never {@literal null}. * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ + @Deprecated ScriptOperations scriptOps(); /** - * Returns a new {@link BulkOperations} for the given collection. - * + * Returns a new {@link BulkOperations} for the given collection.
    + * NOTE: Any additional support for field mapping, etc. is not available for {@literal update} or + * {@literal remove} operations in bulk mode due to the lack of domain type information. Use + * {@link #bulkOps(BulkMode, Class, String)} to get full type specific support. + * * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. * @return {@link BulkOperations} on the named collection @@ -304,7 +434,7 @@ public interface MongoOperations { /** * Returns a new {@link BulkOperations} for the given entity type. - * + * * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. * @param entityType the name of the entity class, must not be {@literal null}. * @return {@link BulkOperations} on the named collection associated of the given entity class. @@ -313,80 +443,47 @@ public interface MongoOperations { /** * Returns a new {@link BulkOperations} for the given entity type and collection name. - * + * * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. - * @param entityClass the name of the entity class, must not be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. * @return {@link BulkOperations} on the named collection associated with the given entity class. */ - BulkOperations bulkOps(BulkMode mode, Class entityType, String collectionName); + BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); /** - * Query for a list of objects of type T from the collection used by the entity class. - *

    + * Query for a list of objects of type T from the collection used by the entity class.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. - * - * @param entityClass the parameterized type of the returned list - * @return the converted collection + * + * @param entityClass the parametrized type of the returned list. + * @return the converted collection. */ List findAll(Class entityClass); /** - * Query for a list of objects of type T from the specified collection. - *

    + * Query for a list of objects of type T from the specified collection.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. - * - * @param entityClass the parameterized type of the returned list. - * @param collectionName name of the collection to retrieve the objects from - * @return the converted collection + * + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted collection. */ List findAll(Class entityClass, String collectionName); - /** - * Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of - * the returned object that takes int account the initial document structure as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parameterized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass); - - /** - * Execute a group operation restricting the rows to those which match the provided Criteria. The group operation - * entity class should match the 'shape' of the returned object that takes int account the initial document structure - * as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parameterized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(Criteria criteria, String inputCollectionName, GroupBy groupBy, Class entityClass); - /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the * inputCollection is derived from the inputType of the aggregation. - * + * * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param collectionName The name of the input collection to use for the aggreation. - * @param outputType The parameterized type of the returned list, must not be {@literal null}. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. * @return The results of the aggregation operation. * @since 1.3 */ @@ -395,10 +492,10 @@ public interface MongoOperations { /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the * inputCollection is derived from the inputType of the aggregation. - * + * * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be * {@literal null}. - * @param outputType The parameterized type of the returned list, must not be {@literal null}. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. * @return The results of the aggregation operation. * @since 1.3 */ @@ -406,12 +503,12 @@ public interface MongoOperations { /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. - * + * * @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or * empty. - * @param outputType The parameterized type of the returned list, must not be {@literal null}. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. * @return The results of the aggregation operation. * @since 1.3 */ @@ -419,617 +516,1393 @@ public interface MongoOperations { /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. - * + * * @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or * empty. - * @param outputType The parameterized type of the returned list, must not be {@literal null}. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. * @return The results of the aggregation operation. * @since 1.3 */ AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType); + /** + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. The name of the inputCollection is derived from + * the inputType of the aggregation. + *

    + * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling + * explanation mode will throw an {@link IllegalArgumentException}. + * + * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be + * {@literal null}. + * @param collectionName The name of the input collection to use for the aggreation. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + * @since 2.0 + */ + Stream aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); + + /** + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class and are returned as stream. The name of the + * inputCollection is derived from the inputType of the aggregation. + *

    + * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling + * explanation mode will throw an {@link IllegalArgumentException}. + * + * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be + * {@literal null}. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + * @since 2.0 + */ + Stream aggregateStream(TypedAggregation aggregation, Class outputType); + + /** + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

    + * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling + * explanation mode will throw an {@link IllegalArgumentException}. + * + * @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be + * {@literal null}. + * @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or + * empty. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + * @since 2.0 + */ + Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType); + + /** + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

    + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

    + * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling + * explanation mode will throw an {@link IllegalArgumentException}. + * + * @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be + * {@literal null}. + * @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or + * empty. + * @param outputType The parametrized type of the returned list, must not be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). + * @since 2.0 + */ + Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType); + /** * Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE - * - * @param inputCollectionName the collection where the map-reduce will read from - * @param mapFunction The JavaScript map function + * + * @param inputCollectionName the collection where the map-reduce will read from. Must not be {@literal null}. + * @param mapFunction The JavaScript map function. * @param reduceFunction The JavaScript reduce function - * @param mapReduceOptions Options that specify detailed map-reduce behavior - * @param entityClass The parameterized type of the returned list + * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); /** * Execute a map-reduce operation that takes additional map-reduce options. - * - * @param inputCollectionName the collection where the map-reduce will read from + * + * @param inputCollectionName the collection where the map-reduce will read from. Must not be {@literal null}. * @param mapFunction The JavaScript map function * @param reduceFunction The JavaScript reduce function - * @param mapReduceOptions Options that specify detailed map-reduce behavior - * @param entityClass The parameterized type of the returned list + * @param mapReduceOptions Options that specify detailed map-reduce behavior. + * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, - MapReduceOptions mapReduceOptions, Class entityClass); + @Nullable MapReduceOptions mapReduceOptions, Class entityClass); /** * Execute a map-reduce operation that takes a query. The map-reduce operation will be formed with an output type of * INLINE - * - * @param query The query to use to select the data for the map phase - * @param inputCollectionName the collection where the map-reduce will read from + * + * @param query The query to use to select the data for the map phase. Must not be {@literal null}. + * @param inputCollectionName the collection where the map-reduce will read from. Must not be {@literal null}. * @param mapFunction The JavaScript map function * @param reduceFunction The JavaScript reduce function - * @param mapReduceOptions Options that specify detailed map-reduce behavior - * @param entityClass The parameterized type of the returned list + * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); /** * Execute a map-reduce operation that takes a query and additional map-reduce options - * - * @param query The query to use to select the data for the map phase - * @param inputCollectionName the collection where the map-reduce will read from + * + * @param query The query to use to select the data for the map phase. Must not be {@literal null}. + * @param inputCollectionName the collection where the map-reduce will read from. Must not be {@literal null}. * @param mapFunction The JavaScript map function * @param reduceFunction The JavaScript reduce function * @param mapReduceOptions Options that specify detailed map-reduce behavior - * @param entityClass The parameterized type of the returned list + * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, - MapReduceOptions mapReduceOptions, Class entityClass); + @Nullable MapReduceOptions mapReduceOptions, Class entityClass); /** * Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Will consider entity mapping * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of * results. - * + *

    + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

    + * + *
    +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
    +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
    +	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
    +	 * 
    + * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass); /** * Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a * particular number of results. - * + *

    + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

    + * + *
    +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
    +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
    +	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
    +	 * 
    + * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @param collectionName the collection to trigger the query against. If no collection name is given the entity class - * will be inspected. + * will be inspected. Must not be {@literal null} nor empty. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass, String collectionName); /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

    + * specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. - * @return the converted object + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned list. + * @return the converted object. */ + @Nullable T findOne(Query query, Class entityClass); /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

    + * type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. - * @param collectionName name of the collection to retrieve the objects from - * @return the converted object - */ + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted object. + */ + @Nullable T findOne(Query query, Class entityClass, String collectionName); /** - * Determine result of given {@link Query} contains at least one element. - * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * Determine result of given {@link Query} contains at least one element.
    + * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of + * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. + * + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. - * @return + * @return {@literal true} if the query yields a result. */ boolean exists(Query query, String collectionName); /** * Determine result of given {@link Query} contains at least one element. - * - * @param query the {@link Query} class that specifies the criteria used to find a record. - * @param entityClass the parameterized type. - * @return + * + * @param query the {@link Query} class that specifies the criteria used to find a document. + * @param entityClass the parametrized type. + * @return {@literal true} if the query yields a result. */ boolean exists(Query query, Class entityClass); /** * Determine result of given {@link Query} contains at least one element. - * - * @param query the {@link Query} class that specifies the criteria used to find a record. - * @param entityClass the parameterized type. + * + * @param query the {@link Query} class that specifies the criteria used to find a document. + * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. - * @return + * @return {@literal true} if the query yields a result. */ - boolean exists(Query query, Class entityClass, String collectionName); + boolean exists(Query query, @Nullable Class entityClass, String collectionName); /** - * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

    + * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. - * @return the List of converted objects + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. + * @return the List of converted objects. */ List find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

    + * Map the results of an ad-hoc query on the specified collection to a List of the specified type.
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. - * @param collectionName name of the collection to retrieve the objects from - * @return the List of converted objects + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. + * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. + * @return the List of converted objects. */ List find(Query query, Class entityClass, String collectionName); + /** + * Query for a window of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
    + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
    + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

    + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. - * - * @param - * @param id the id of the document to return. - * @param entityClass the type the document shall be converted into. + * + * @param id the id of the document to return. Must not be {@literal null}. + * @param entityClass the type the document shall be converted into. Must not be {@literal null}. * @return the document with the given id mapped onto the given target class. */ + @Nullable T findById(Object id, Class entityClass); /** * Returns the document with the given id from the given collection mapped onto the given target class. - * - * @param id the id of the document to return - * @param entityClass the type to convert the document to - * @param collectionName the collection to query for the document - * @param - * @return + * + * @param id the id of the document to return. + * @param entityClass the type to convert the document to. + * @param collectionName the collection to query for the document. + * @return he converted object or {@literal null} if document does not exist. */ + @Nullable T findById(Object id, Class entityClass, String collectionName); /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. - * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. - * @param entityClass the parameterized type. - * @return - */ - T findAndModify(Query query, Update update, Class entityClass); - - /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. - * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. - * @param entityClass the parameterized type. - * @param collectionName the collection to query. - * @return - */ - T findAndModify(Query query, Update update, Class entityClass, String collectionName); - - /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link List}. + * + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param entityClass the domain type used for determining the actual {@link MongoCollection}. Must not be + * {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default List findDistinct(String field, Class entityClass, Class resultClass) { + return findDistinct(new Query(), field, entityClass, resultClass); + } + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link List}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param entityClass the domain type used for determining the actual {@link MongoCollection} and mapping the + * {@link Query} to the domain type fields. Must not be {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + List findDistinct(Query query, String field, Class entityClass, Class resultClass); + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link List}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param collectionName the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}. + * @param entityClass the domain type used for mapping the {@link Query} to the domain type fields. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + List findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass); + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link List}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param collection the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default List findDistinct(Query query, String field, String collection, Class resultClass) { + return findDistinct(query, field, collection, Object.class, resultClass); + } + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + @Nullable + T findAndModify(Query query, UpdateDefinition update, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + @Nullable + T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. - * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. - * @param entityClass the parameterized type. - * @return + * @param entityClass the parametrized type. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as + * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + @Nullable + T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. - * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. - * @param options the {@link FindAndModifyOptions} holding additional information. - * @param entityClass the parameterized type. - * @param collectionName the collection to query. - * @return - */ - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as + * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + @Nullable + T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName); + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
    + * The collection name is derived from the {@literal replacement} type.
    + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
    + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, String collectionName) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { + return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, + String collectionName) { + + return findAndReplace(query, replacement, options, entityType, collectionName, entityType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + Class resultType) { + + return findAndReplace(query, replacement, options, entityType, + getCollectionName(ClassUtils.getUserClass(entityType)), resultType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
    + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType); + /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

    - * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

    + * database.
    + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned list. * @return the converted object */ + @Nullable T findAndRemove(Query query, Class entityClass); /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

    + *
    * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. - * - * @param query the query class that specifies the criteria used to find a record and also an optional fields - * specification - * @param entityClass the parameterized type of the returned list. - * @param collectionName name of the collection to retrieve the objects from - * @return the converted object - */ + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted object. + */ + @Nullable T findAndRemove(Query query, Class entityClass, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. - * - * @param query - * @param entityClass must not be {@literal null}. - * @return + *
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ long count(Query query, Class entityClass); /** * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} * must solely consist of document field references as we lack type information to map potential property references - * onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}. - * - * @param query + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. - * @return + * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #exactCount(Query, String) + * @see #estimatedCount(String) */ long count(Query query, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity - * class to map the given {@link Query}. - * - * @param query + * class to map the given {@link Query}.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @see #estimatedCount(String) + */ + long count(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
    + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * * @param entityClass must not be {@literal null}. + * @return the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default long estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
    + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return the estimated number of documents. + * @since 3.1 + */ + long estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default long exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. - * @return + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 */ - long count(Query query, Class entityClass, String collectionName); + default long exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } /** - * Insert the object into the collection for the entity type of the object to save. - *

    - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

    - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * - * Spring's Type Conversion" for more details. - *

    - *

    - * Insert is used to initially store the object into the database. To update an existing object use the save method. - * - * @param objectToSave the object to store in the collection. + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
    + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
    + * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + long exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
    + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
    + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
    + * Insert is used to initially store the object into the database. To update an existing object use the + * {@link #save(Object)} method. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ - void insert(Object objectToSave); + T insert(T objectToSave); /** - * Insert the object into the specified collection. - *

    + * Insert the object into the specified collection.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    * Insert is used to initially store the object into the database. To update an existing object use the save method. - * - * @param objectToSave the object to store in the collection - * @param collectionName name of the collection to store the object in + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

    + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. */ - void insert(Object objectToSave, String collectionName); + T insert(T objectToSave, String collectionName); /** * Insert a Collection of objects into a collection in a single batch write to the database. - * - * @param batchToSave the list of objects to save. - * @param entityClass class that determines the collection to use + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the batch of objects to save. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the inserted objects that. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ - void insert(Collection batchToSave, Class entityClass); + Collection insert(Collection batchToSave, Class entityClass); /** - * Insert a list of objects into the specified collection in a single batch write to the database. - * - * @param batchToSave the list of objects to save. - * @param collectionName name of the collection to store the object in + * Insert a batch of objects into the specified collection in a single batch write to the database. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the list of objects to save. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted objects that. */ - void insert(Collection batchToSave, String collectionName); + Collection insert(Collection batchToSave, String collectionName); /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. - * - * @param collectionToSave the list of objects to save. + *

    + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

    + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param objectsToSave the list of objects to save. Must not be {@literal null}. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ - void insertAll(Collection objectsToSave); + Collection insertAll(Collection objectsToSave); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

    + * object is not already present, that is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * - * Spring's Type Conversion" for more details. - * - * @param objectToSave the object to store in the collection + * Spring's + * Type Conversion" for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

    + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ - void save(Object objectToSave); + T save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

    + * is an 'upsert'.
    * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of MappingMongoConverter will be used. - *

    - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
    + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Cobnversion API. See Spring's - * Type Conversion" for more details. - * - * @param objectToSave the object to store in the collection - * @param collectionName name of the collection to store the object in + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

    + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

    + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ - void save(Object objectToSave, String collectionName); + T save(T objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. - * - * @param query the query document that specifies the criteria used to select a record to be upserted - * @param update the update document that contains the updated object or $ operators to manipulate the existing object - * @param entityClass class that determines the collection to use - * @return the WriteResult which lets you access the results of the previous write. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

    + * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ - WriteResult upsert(Query query, Update update, Class entityClass); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by - * combining the query document and the update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. + * combining the query document and the update document.
    + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support.
    + * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - WriteResult upsert(Query query, Update update, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. - * - * @param query the query document that specifies the criteria used to select a record to be upserted - * @param update the update document that contains the updated object or $ operators to manipulate the existing object - * @param entityClass class of the pojo to be operated on - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - WriteResult upsert(Query query, Update update, Class entityClass, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param entityClass class that determines the collection to use - * @return the WriteResult which lets you access the results of the previous write. - */ - WriteResult updateFirst(Query query, Update update, Class entityClass); + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + */ + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass); /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. - */ - WriteResult updateFirst(Query query, Update update, String collectionName); + * the provided updated document.
    + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName); /** * Updates the first object that is found in the specified collection that matches the query document criteria with * the provided updated document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param entityClass class of the pojo to be operated on - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. - */ - WriteResult updateFirst(Query query, Update update, Class entityClass, String collectionName); + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param entityClass class that determines the collection to use - * @return the WriteResult which lets you access the results of the previous write. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate + * @since 3.0 */ - WriteResult updateMulti(Query query, Update update, Class entityClass); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass); /** * Updates all objects that are found in the specified collection that matches the query document criteria with the - * provided updated document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. + * provided updated document.
    + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - WriteResult updateMulti(Query query, Update update, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. - * - * @param query the query document that specifies the criteria used to select a record to be updated - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. - * @param entityClass class of the pojo to be operated on - * @param collectionName name of the collection to update the object in - * @return the WriteResult which lets you access the results of the previous write. + *

    + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - WriteResult updateMulti(final Query query, final Update update, Class entityClass, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. - * - * @param object + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
    + * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. + * + * @param object must not be {@literal null}. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ - WriteResult remove(Object object); + DeleteResult remove(Object object); /** - * Removes the given object from the given collection. - * - * @param object - * @param collection must not be {@literal null} or empty. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
    + * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. + * + * @param object must not be {@literal null}. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ - WriteResult remove(Object object, String collection); + DeleteResult remove(Object object, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. - * - * @param query - * @param entityClass + * + * @param query the query document that specifies the criteria used to remove a document. + * @param entityClass class that determines the collection to use. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ - WriteResult remove(Query query, Class entityClass); + DeleteResult remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. - * - * @param query - * @param entityClass - * @param collectionName + * + * @param query the query document that specifies the criteria used to remove a document. + * @param entityClass class of the pojo to be operated on. Can be {@literal null}. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws IllegalArgumentException when {@literal query}, {@literal entityClass} or {@literal collectionName} is + * {@literal null}. */ - WriteResult remove(Query query, Class entityClass, String collectionName); + DeleteResult remove(Query query, Class entityClass, String collectionName); /** * Remove all documents from the specified collection that match the provided query document criteria. There is no - * conversion/mapping done for any criteria using the id field. - * - * @param query the query document that specifies the criteria used to remove a record - * @param collectionName name of the collection where the objects will removed - */ - WriteResult remove(Query query, String collectionName); - - /** - * Returns and removes all documents form the specified collection that match the provided query. - * - * @param query - * @param collectionName - * @return + * conversion/mapping done for any criteria using the id field.
    + * NOTE: Any additional support for field mapping is not available due to the lack of domain type + * information. Use {@link #remove(Query, Class, String)} to get full type specific support. + * + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws IllegalArgumentException when {@literal query} or {@literal collectionName} is {@literal null}. + */ + DeleteResult remove(Query query, String collectionName); + + /** + * Returns and removes all documents form the specified collection that match the provided query.
    + * NOTE: Any additional support for field mapping is not available due to the lack of domain type + * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ List findAllAndRemove(Query query, String collectionName); /** * Returns and removes all documents matching the given query form the collection used to store the entityClass. - * - * @param query - * @param entityClass - * @return + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param entityClass class of the pojo to be operated on. + * @return the {@link List} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. - * - * @param query - * @param entityClass - * @param collectionName - * @return + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param entityClass class of the pojo to be operated on. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
    + * The collection name is derived from the {@literal replacement} type.
    + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Returns the underlying {@link MongoConverter}. - * - * @return + * + * @return never {@literal null}. */ MongoConverter getConverter(); - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBean.java deleted file mode 100644 index c10335a037..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBean.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import javax.net.ssl.SSLSocketFactory; - -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.data.mongodb.util.MongoClientVersion; - -import com.mongodb.MongoOptions; - -/** - * A factory bean for construction of a {@link MongoOptions} instance. In case used with MongoDB Java driver version 3 - * porperties not suppprted by the driver will be ignored. - * - * @author Graeme Rocher - * @author Mark Pollack - * @author Mike Saavedra - * @author Thomas Darimont - * @author Christoph Strobl - * @deprecated since 1.7. Please use {@link MongoClientOptionsFactoryBean} instead. - */ -@Deprecated -public class MongoOptionsFactoryBean extends AbstractFactoryBean { - - private static final MongoOptions DEFAULT_MONGO_OPTIONS = new MongoOptions(); - - private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost(); - private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS - .getThreadsAllowedToBlockForConnectionMultiplier(); - private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime(); - private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout(); - private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout(); - private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive(); - private int writeNumber = DEFAULT_MONGO_OPTIONS.getW(); - private int writeTimeout = DEFAULT_MONGO_OPTIONS.getWtimeout(); - private boolean writeFsync = DEFAULT_MONGO_OPTIONS.isFsync(); - - private boolean autoConnectRetry = !MongoClientVersion.isMongo3Driver() ? ReflectiveMongoOptionsInvoker - .getAutoConnectRetry(DEFAULT_MONGO_OPTIONS) : false; - private long maxAutoConnectRetryTime = !MongoClientVersion.isMongo3Driver() ? ReflectiveMongoOptionsInvoker - .getMaxAutoConnectRetryTime(DEFAULT_MONGO_OPTIONS) : -1; - private boolean slaveOk = !MongoClientVersion.isMongo3Driver() ? ReflectiveMongoOptionsInvoker - .getSlaveOk(DEFAULT_MONGO_OPTIONS) : false; - - private boolean ssl; - private SSLSocketFactory sslSocketFactory; - - /** - * Configures the maximum number of connections allowed per host until we will block. - * - * @param connectionsPerHost - */ - public void setConnectionsPerHost(int connectionsPerHost) { - this.connectionsPerHost = connectionsPerHost; - } - - /** - * A multiplier for connectionsPerHost for # of threads that can block a connection. If connectionsPerHost is 10, and - * threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block. If more threads try to block an - * exception will be thrown. - * - * @param threadsAllowedToBlockForConnectionMultiplier - */ - public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) { - this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier; - } - - /** - * Max wait time of a blocking thread for a connection. - * - * @param maxWaitTime - */ - public void setMaxWaitTime(int maxWaitTime) { - this.maxWaitTime = maxWaitTime; - } - - /** - * Configures the connect timeout in milliseconds. Defaults to 0 (infinite time). - * - * @param connectTimeout - */ - public void setConnectTimeout(int connectTimeout) { - this.connectTimeout = connectTimeout; - } - - /** - * Configures the socket timeout. Defaults to 0 (infinite time). - * - * @param socketTimeout - */ - public void setSocketTimeout(int socketTimeout) { - this.socketTimeout = socketTimeout; - } - - /** - * Configures whether or not to have socket keep alive turned on (SO_KEEPALIVE). Defaults to {@literal false}. - * - * @param socketKeepAlive - */ - public void setSocketKeepAlive(boolean socketKeepAlive) { - this.socketKeepAlive = socketKeepAlive; - } - - /** - * This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' - * option to the getlasterror command. Defaults to 0. - *

      - *
    • -1 = don't even report network errors
    • - *
    • 0 = default, don't call getLastError by default
    • - *
    • 1 = basic, call getLastError, but don't wait for slaves
    • - *
    • 2 += wait for slaves
    • - *
    - * - * @param writeNumber the number of servers to wait for on the write operation, and exception raising behavior. - */ - public void setWriteNumber(int writeNumber) { - this.writeNumber = writeNumber; - } - - /** - * Configures the timeout for write operations in milliseconds. This defaults to {@literal 0} (indefinite). - * - * @param writeTimeout - */ - public void setWriteTimeout(int writeTimeout) { - this.writeTimeout = writeTimeout; - } - - /** - * Configures whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to {@literal false}. - * - * @param writeFsync to fsync on write (true), otherwise {@literal false}. - */ - public void setWriteFsync(boolean writeFsync) { - this.writeFsync = writeFsync; - } - - /** - * Configures whether or not the system retries automatically on a failed connect. This defaults to {@literal false}. - * - * @deprecated since 1.7. - */ - @Deprecated - public void setAutoConnectRetry(boolean autoConnectRetry) { - this.autoConnectRetry = autoConnectRetry; - } - - /** - * Configures the maximum amount of time in millisecons to spend retrying to open connection to the same server. This - * defaults to {@literal 0}, which means to use the default {@literal 15s} if {@link #autoConnectRetry} is on. - * - * @param maxAutoConnectRetryTime the maxAutoConnectRetryTime to set - * @deprecated since 1.7 - */ - @Deprecated - public void setMaxAutoConnectRetryTime(long maxAutoConnectRetryTime) { - this.maxAutoConnectRetryTime = maxAutoConnectRetryTime; - } - - /** - * Specifies if the driver is allowed to read from secondaries or slaves. Defaults to {@literal false}. - * - * @param slaveOk true if the driver should read from secondaries or slaves. - * @deprecated since 1.7 - */ - @Deprecated - public void setSlaveOk(boolean slaveOk) { - this.slaveOk = slaveOk; - } - - /** - * Specifies if the driver should use an SSL connection to Mongo. This defaults to {@literal false}. By default - * {@link SSLSocketFactory#getDefault()} will be used. See {@link #setSslSocketFactory(SSLSocketFactory)} if you want - * to configure a custom factory. - * - * @param ssl true if the driver should use an SSL connection. - * @see #setSslSocketFactory(SSLSocketFactory) - */ - public void setSsl(boolean ssl) { - this.ssl = ssl; - } - - /** - * Specifies the {@link SSLSocketFactory} to use for creating SSL connections to Mongo. Defaults to - * {@link SSLSocketFactory#getDefault()}. Implicitly activates {@link #setSsl(boolean)} if a non-{@literal null} value - * is given. - * - * @param sslSocketFactory the sslSocketFactory to use. - * @see #setSsl(boolean) - */ - public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) { - - setSsl(sslSocketFactory != null); - this.sslSocketFactory = sslSocketFactory; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - @Override - protected MongoOptions createInstance() throws Exception { - - if (MongoClientVersion.isMongo3Driver()) { - throw new IllegalArgumentException( - String - .format("Usage of 'mongo-options' is no longer supported for MongoDB Java driver version 3 and above. Please use 'mongo-client-options' and refer to chapter 'MongoDB 3.0 Support' for details.")); - } - - MongoOptions options = new MongoOptions(); - - options.setConnectionsPerHost(connectionsPerHost); - options.setThreadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier); - options.setMaxWaitTime(maxWaitTime); - options.setConnectTimeout(connectTimeout); - options.setSocketTimeout(socketTimeout); - options.setSocketKeepAlive(socketKeepAlive); - - options.setW(writeNumber); - options.setWtimeout(writeTimeout); - options.setFsync(writeFsync); - - if (ssl) { - options.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault()); - } - - ReflectiveMongoOptionsInvoker.setAutoConnectRetry(options, autoConnectRetry); - ReflectiveMongoOptionsInvoker.setMaxAutoConnectRetryTime(options, maxAutoConnectRetryTime); - ReflectiveMongoOptionsInvoker.setSlaveOk(options, slaveOk); - - return options; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoOptions.class; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..37001faa4e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Nullable + @Override + public ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index e7419a06ff..fd547c61a0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,22 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Scanner; -import java.util.Set; - -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.function.BiPredicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; +import org.bson.conversions.Bson; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -41,91 +38,125 @@ import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.convert.ConversionService; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.annotation.Id; -import org.springframework.data.authentication.UserCredentials; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Metric; -import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.SessionSynchronization; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.CollectionPreparerDelegate; +import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; +import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.mongodb.core.aggregation.Fields; -import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.JsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.DefaultSearchIndexOperations; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.index.IndexOperationsProvider; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.index.SearchIndexOperations; +import org.springframework.data.mongodb.core.index.SearchIndexOperationsProvider; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; +import org.springframework.data.mongodb.core.mapping.event.*; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; -import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.CloseableIterator; -import org.springframework.jca.cci.core.ConnectionCallback; +import org.springframework.data.util.Optionals; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.Bytes; -import com.mongodb.CommandResult; -import com.mongodb.Cursor; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; -import com.mongodb.MapReduceCommand; -import com.mongodb.MapReduceOutput; -import com.mongodb.Mongo; +import com.mongodb.ClientSessionOptions; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; -import com.mongodb.WriteResult; -import com.mongodb.util.JSON; -import com.mongodb.util.JSONParseException; +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.ClientSession; +import com.mongodb.client.DistinctIterable; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; +import com.mongodb.client.model.*; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** - * Primary implementation of {@link MongoOperations}. - * + * Primary implementation of {@link MongoOperations}. It simplifies the use of imperative MongoDB usage and helps to + * avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindIterable} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link MongoDatabaseFactory} reference, or get prepared in an application context and given to services as bean + * reference. + *

    + * Note: The {@link MongoDatabaseFactory} should always be configured as a bean in the application context, in the first + * case given to the service directly, in the second case to the prepared template. + *

    {@link ReadPreference} and {@link com.mongodb.ReadConcern}

    + *

    + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

    + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. + * * @author Thomas Risberg * @author Graeme Rocher * @author Mark Pollack @@ -138,160 +169,278 @@ * @author Chuong Ngo * @author Christoph Strobl * @author Doménique Tilleuil + * @author Niko Schmuck + * @author Mark Paluch + * @author Laszlo Csontos + * @author Maninder Singh + * @author Borislav Rangelov + * @author duozhilin + * @author Andreas Zink + * @author Cimon Lucas + * @author Michael J. Simons + * @author Roman Puchkovskiy + * @author Yadhukrishna S Pai + * @author Anton Barkan + * @author Bartłomiej Mazur + * @author Michael Krog + * @author Jakub Zurawa + * @author Florian Lüdiger */ -@SuppressWarnings("deprecation") -public class MongoTemplate implements MongoOperations, ApplicationContextAware { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider, + SearchIndexOperationsProvider, ReadPreferenceAware { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); - private static final String ID_FIELD = "_id"; + private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; - private static final Collection ITERABLE_CLASSES; - - static { - - Set iterableClasses = new HashSet(); - iterableClasses.add(List.class.getName()); - iterableClasses.add(Collection.class.getName()); - iterableClasses.add(Iterator.class.getName()); - - ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); - } private final MongoConverter mongoConverter; private final MappingContext, MongoPersistentProperty> mappingContext; - private final MongoDbFactory mongoDbFactory; + private final MongoDatabaseFactory mongoDbFactory; private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; + private final JsonSchemaMapper schemaMapper; + private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; - private WriteConcern writeConcern; + private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; - private ReadPreference readPreference; - private ApplicationEventPublisher eventPublisher; - private ResourceLoader resourceLoader; - private MongoPersistentEntityIndexCreator indexCreator; + private @Nullable ReadPreference readPreference; + private @Nullable ApplicationEventPublisher eventPublisher; + private @Nullable EntityCallbacks entityCallbacks; + private @Nullable ResourceLoader resourceLoader; + private @Nullable MongoPersistentEntityIndexCreator indexCreator; - /** - * Constructor used for a basic template configuration - * - * @param mongo must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - */ - public MongoTemplate(Mongo mongo, String databaseName) { - this(new SimpleMongoDbFactory(mongo, databaseName), null); - } + private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + + private CountExecution countExecution = this::doExactCount; /** - * Constructor used for a template configuration with user credentials in the form of - * {@link org.springframework.data.authentication.UserCredentials} - * - * @param mongo must not be {@literal null}. + * Constructor used for a basic template configuration. + * + * @param mongoClient must not be {@literal null}. * @param databaseName must not be {@literal null} or empty. - * @param userCredentials + * @since 2.1 */ - public MongoTemplate(Mongo mongo, String databaseName, UserCredentials userCredentials) { - this(new SimpleMongoDbFactory(mongo, databaseName, userCredentials)); + public MongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleMongoClientDatabaseFactory(mongoClient, databaseName), (MongoConverter) null); } /** * Constructor used for a basic template configuration. - * + * * @param mongoDbFactory must not be {@literal null}. */ - public MongoTemplate(MongoDbFactory mongoDbFactory) { - this(mongoDbFactory, null); + public MongoTemplate(MongoDatabaseFactory mongoDbFactory) { + this(mongoDbFactory, (MongoConverter) null); } /** * Constructor used for a basic template configuration. - * + * * @param mongoDbFactory must not be {@literal null}. * @param mongoConverter */ - public MongoTemplate(MongoDbFactory mongoDbFactory, MongoConverter mongoConverter) { + public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { - Assert.notNull(mongoDbFactory); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); this.mongoDbFactory = mongoDbFactory; this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter(mongoDbFactory) : mongoConverter; this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); + this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDbFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We always have a mapping context in the converter, whether it's a simple one or not mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events - if (null != mappingContext && mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, mongoDbFactory); - eventPublisher = new MongoMappingEventPublisher(indexCreator); - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof MongoMappingContext mappingContext) { + + if (mappingContext.isAutoIndexCreation()) { + + indexCreator = new MongoPersistentEntityIndexCreator(mappingContext, this); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + mappingContext.setApplicationEventPublisher(eventPublisher); } } } + private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) { + + this.mongoDbFactory = dbFactory; + this.exceptionTranslator = that.exceptionTranslator; + this.sessionSynchronization = that.sessionSynchronization; + + // we need to (re)create the MappingMongoConverter as we need to have it use a DbRefResolver that operates within + // the sames session. Otherwise loading referenced objects would happen outside of it. + if (that.mongoConverter instanceof MappingMongoConverter mappingMongoConverter) { + this.mongoConverter = mappingMongoConverter.with(dbFactory); + } else { + this.mongoConverter = that.mongoConverter; + } + + this.queryMapper = that.queryMapper; + this.updateMapper = that.updateMapper; + this.schemaMapper = that.schemaMapper; + this.mappingContext = that.mappingContext; + this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; + } + /** * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the - * default of {@value #DEFAULT_WRITE_RESULT_CHECKING}. - * + * default of {@link #DEFAULT_WRITE_RESULT_CHECKING}. + * * @param resultChecking */ - public void setWriteResultChecking(WriteResultChecking resultChecking) { + public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) { this.writeResultChecking = resultChecking == null ? DEFAULT_WRITE_RESULT_CHECKING : resultChecking; } /** * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} - * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no - * {@link WriteConcern} will be used. - * + * configured on the {@link MongoDatabaseFactory} will apply. + * * @param writeConcern */ - public void setWriteConcern(WriteConcern writeConcern) { + public void setWriteConcern(@Nullable WriteConcern writeConcern) { this.writeConcern = writeConcern; } /** * Configures the {@link WriteConcernResolver} to be used with the template. - * + * * @param writeConcernResolver */ - public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) { - this.writeConcernResolver = writeConcernResolver; + public void setWriteConcernResolver(@Nullable WriteConcernResolver writeConcernResolver) { + this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE + : writeConcernResolver; } /** - * Used by @{link {@link #prepareCollection(DBCollection)} to set the {@link ReadPreference} before any operations are - * performed. - * + * Used by @{link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * are performed. + * * @param readPreference */ - public void setReadPreference(ReadPreference readPreference) { + public void setReadPreference(@Nullable ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + @Override + public boolean hasReadPreference() { + return this.readPreference != null; + } + + @Override + public ReadPreference getReadPreference() { + return this.readPreference; + } + + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + eventDelegate.setPublisher(eventPublisher); + + if (entityCallbacks == null) { + setEntityCallbacks(EntityCallbacks.create(applicationContext)); + } + + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } + resourceLoader = applicationContext; } + /** + * Set the {@link EntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. + *
    + * Overrides potentially existing {@link EntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiPredicate estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionPreparer, collectionName, filter, options) -> { + + if (!estimationFilter.test(filter, options)) { + return doExactCount(collectionPreparer, collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionPreparer, collectionName, estimatedDocumentCountOptions); + }; + } else { + this.countExecution = this::doExactCount; + } + } + /** * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} * can be found we manually add the internally created one as {@link ApplicationListener} to make sure indexes get * created appropriately for entity types persisted through this {@link MongoTemplate} instance. - * + * * @param context must not be {@literal null}. */ private void prepareIndexCreator(ApplicationContext context) { @@ -305,106 +454,93 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext && indexCreator != null) { + configurableApplicationContext.addApplicationListener(indexCreator); } } /** - * Returns the default {@link org.springframework.data.mongodb.core.core.convert.MongoConverter}. - * + * Returns the default {@link org.springframework.data.mongodb.core.convert.MongoConverter}. + * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeAsStream(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override - public CloseableIterator stream(final Query query, final Class entityType) { + public Stream stream(Query query, Class entityType) { + return stream(query, entityType, getCollectionName(entityType)); + } + + @Override + public Stream stream(Query query, Class entityType, String collectionName) { + return doStream(query, entityType, collectionName, entityType); + } - return execute(entityType, new CollectionCallback>() { + @SuppressWarnings("ConstantConditions") + protected Stream doStream(Query query, Class entityType, String collectionName, Class returnType) { - @Override - public CloseableIterator doInCollection(DBCollection collection) throws MongoException, DataAccessException { + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityType, "Entity type must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(returnType, "ReturnType must not be null"); - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); + return execute(collectionName, (CollectionCallback>) collection -> { - DBObject mappedFields = queryMapper.getMappedFields(query.getFieldsObject(), persistentEntity); - DBObject mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); - DBCursor cursor = collection.find(mappedQuery, mappedFields); - QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(returnType, entityType); - ReadDbObjectCallback readCallback = new ReadDbObjectCallback(mongoConverter, entityType, - collection.getName()); + Document mappedQuery = queryContext.getMappedQuery(persistentEntity); + Document mappedFields = queryContext.getMappedFields(persistentEntity, projection); - return new CloseableIterableCursorAdapter(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback); - } + CollectionPreparerDelegate readPreference = createDelegate(query); + FindIterable cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection, + col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields)); + + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName)).stream(); }); } + @Override public String getCollectionName(Class entityClass) { - return this.determineCollectionName(entityClass); - } - - public CommandResult executeCommand(String jsonCommand) { - return executeCommand((DBObject) JSON.parse(jsonCommand)); + return this.operations.determineCollectionName(entityClass); } - public CommandResult executeCommand(final DBObject command) { - - CommandResult result = execute(new DbCallback() { - public CommandResult doInDB(DB db) throws MongoException, DataAccessException { - return db.command(command); - } - }); + @Override + @SuppressWarnings("ConstantConditions") + public Document executeCommand(String jsonCommand) { - logCommandExecutionError(command, result); - return result; - } + Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty"); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(com.mongodb.DBObject, int) - */ - @Deprecated - public CommandResult executeCommand(final DBObject command, final int options) { - return executeCommand(command, - (options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred() : ReadPreference.primary()); + return execute(db -> db.runCommand(Document.parse(jsonCommand), Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(com.mongodb.DBObject, com.mongodb.ReadPreference) - */ - public CommandResult executeCommand(final DBObject command, final ReadPreference readPreference) { - - Assert.notNull(command, "Command must not be null!"); - - CommandResult result = execute(new DbCallback() { - public CommandResult doInDB(DB db) throws MongoException, DataAccessException { - return readPreference != null ? db.command(command, readPreference) : db.command(command); - } - }); + @Override + @SuppressWarnings("ConstantConditions") + public Document executeCommand(Document command) { - logCommandExecutionError(command, result); + Assert.notNull(command, "Command must not be null"); - return result; + return execute(db -> db.runCommand(command, Document.class)); } - protected void logCommandExecutionError(final DBObject command, CommandResult result) { + @Override + @SuppressWarnings("ConstantConditions") + public Document executeCommand(Document command, @Nullable ReadPreference readPreference) { - String error = result.getErrorMessage(); + Assert.notNull(command, "Command must not be null"); - if (error != null) { - LOGGER.warn("Command execution of {} failed: {}", command.toString(), error); - } + return execute(db -> readPreference != null // + ? db.runCommand(command, readPreference, Document.class) // + : db.runCommand(command, Document.class)); } + @Override public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) { executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null)); } @@ -412,164 +548,270 @@ public void executeQuery(Query query, String collectionName, DocumentCallbackHan /** * Execute a MongoDB query and iterate over the query results on a per-document basis with a * {@link DocumentCallbackHandler} using the provided CursorPreparer. - * + * * @param query the query class that specifies the criteria used to find a record and also an optional fields * specification, must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from - * @param dch the handler that will extract results, one document at a time - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param documentCallbackHandler the handler that will extract results, one document at a time + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). */ - protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch, - CursorPreparer preparer) { + protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler documentCallbackHandler, + @Nullable CursorPreparer preparer) { - Assert.notNull(query); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null"); - DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null); - DBObject sortObject = query.getSortObject(); - DBObject fieldsObject = query.getFieldsObject(); + Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), Optional.empty()); + Document sortObject = query.getSortObject(); + Document fieldsObject = query.getFieldsObject(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject), - sortObject, fieldsObject, collectionName); + LOGGER.debug(String.format("Executing query: %s fields: %s sort: %s in collection: %s", + serializeToJsonSafely(queryObject), fieldsObject, serializeToJsonSafely(sortObject), collectionName)); } - this.executeQueryInternal(new FindCallback(queryObject, fieldsObject), preparer, dch, collectionName); + this.executeQueryInternal(new FindCallback(createDelegate(query), queryObject, fieldsObject, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, documentCallbackHandler, collectionName); } + @Override public T execute(DbCallback action) { - Assert.notNull(action); + Assert.notNull(action, "DbCallback must not be null"); try { - DB db = this.getDb(); + MongoDatabase db = prepareDatabase(this.doGetDatabase()); return action.doInDB(db); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } + @Override public T execute(Class entityClass, CollectionCallback callback) { - return execute(determineCollectionName(entityClass), callback); + + Assert.notNull(entityClass, "EntityClass must not be null"); + return execute(getCollectionName(entityClass), callback); } + @Override public T execute(String collectionName, CollectionCallback callback) { - Assert.notNull(callback); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(callback, "CollectionCallback must not be null"); try { - DBCollection collection = getAndPrepareCollection(getDb(), collectionName); + MongoCollection collection = getAndPrepareCollection(doGetDatabase(), collectionName); return callback.doInCollection(collection); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeInSession(org.springframework.data.mongodb.core.DbCallback) + @Override + public SessionScoped withSession(ClientSessionOptions options) { + + Assert.notNull(options, "ClientSessionOptions must not be null"); + + return withSession(() -> mongoDbFactory.getSession(options)); + } + + @Override + public MongoTemplate withSession(ClientSession session) { + + Assert.notNull(session, "ClientSession must not be null"); + + return new SessionBoundMongoTemplate(session, MongoTemplate.this); + } + + /** + * Define if {@link MongoTemplate} should participate in transactions. Default is set to + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION}.
    + * NOTE: MongoDB transactions require at least MongoDB 4.0. + * + * @since 2.1 */ - @Deprecated - public T executeInSession(final DbCallback action) { - - return execute(new DbCallback() { - public T doInDB(DB db) throws MongoException, DataAccessException { - try { - ReflectiveDbInvoker.requestStart(db); - return action.doInDB(db); - } finally { - ReflectiveDbInvoker.requestDone(db); - } - } - }); + public void setSessionSynchronization(SessionSynchronization sessionSynchronization) { + this.sessionSynchronization = sessionSynchronization; + } + + @Override + public MongoCollection createCollection(Class entityClass) { + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - public DBCollection createCollection(Class entityClass) { - return createCollection(determineCollectionName(entityClass)); + @Override + public MongoCollection createCollection(Class entityClass, + @Nullable CollectionOptions collectionOptions) { + + Assert.notNull(entityClass, "EntityClass must not be null"); + + return doCreateCollection(getCollectionName(entityClass), + operations.convertToCreateCollectionOptions(collectionOptions, entityClass)); } - public DBCollection createCollection(Class entityClass, CollectionOptions collectionOptions) { - return createCollection(determineCollectionName(entityClass), collectionOptions); + @Override + public MongoCollection createCollection(String collectionName) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + + return doCreateCollection(collectionName, new Document()); } - public DBCollection createCollection(final String collectionName) { - return doCreateCollection(collectionName, new BasicDBObject()); + @Override + public MongoCollection createCollection(String collectionName, + @Nullable CollectionOptions collectionOptions) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + return doCreateCollection(collectionName, + operations.convertToCreateCollectionOptions(collectionOptions, Object.class)); } - public DBCollection createCollection(final String collectionName, final CollectionOptions collectionOptions) { - return doCreateCollection(collectionName, convertToDbObject(collectionOptions)); + @Override + public MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); } - public DBCollection getCollection(final String collectionName) { - return execute(new DbCallback() { - public DBCollection doInDB(DB db) throws MongoException, DataAccessException { - return db.getCollection(collectionName); - } + @Override + public MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private MongoCollection createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected MongoCollection doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + db.createView(name, source, pipeline, viewOptions); + return db.getCollection(name); }); } + @Override + @SuppressWarnings("ConstantConditions") + public MongoCollection getCollection(String collectionName) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + + return execute(db -> db.getCollection(collectionName, Document.class)); + } + + @Override public boolean collectionExists(Class entityClass) { - return collectionExists(determineCollectionName(entityClass)); + return collectionExists(getCollectionName(entityClass)); } - public boolean collectionExists(final String collectionName) { - return execute(new DbCallback() { - public Boolean doInDB(DB db) throws MongoException, DataAccessException { - return db.collectionExists(collectionName); + @Override + @SuppressWarnings("ConstantConditions") + public boolean collectionExists(String collectionName) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + + return execute(db -> { + + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { + if (name.equals(collectionName)) { + return true; + } } + return false; }); } + @Override public void dropCollection(Class entityClass) { - dropCollection(determineCollectionName(entityClass)); + dropCollection(getCollectionName(entityClass)); } + @Override public void dropCollection(String collectionName) { - execute(collectionName, new CollectionCallback() { - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { - collection.drop(); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Dropped collection [{}]", collection.getFullName()); - } - return null; + + Assert.notNull(collectionName, "CollectionName must not be null"); + + execute(collectionName, (CollectionCallback) collection -> { + collection.drop(); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropped collection [%s]", + collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName)); } + return null; }); } + @Override public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(this, collectionName); + return indexOps(collectionName, null); + } + + @Override + public IndexOperations indexOps(String collectionName, @Nullable Class type) { + return new DefaultIndexOperations(this, collectionName, type); } + @Override public IndexOperations indexOps(Class entityClass) { - return new DefaultIndexOperations(this, determineCollectionName(entityClass)); + return indexOps(getCollectionName(entityClass), entityClass); + } + + @Override + public SearchIndexOperations searchIndexOps(String collectionName) { + return searchIndexOps(null, collectionName); + } + + @Override + public SearchIndexOperations searchIndexOps(Class type) { + return new DefaultSearchIndexOperations(this, type); } - public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { - return bulkOps(bulkMode, null, collectionName); + @Override + public SearchIndexOperations searchIndexOps(@Nullable Class type, String collectionName) { + return new DefaultSearchIndexOperations(this, collectionName, type); + } + + @Override + public BulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); } + @Override public BulkOperations bulkOps(BulkMode bulkMode, Class entityClass) { - return bulkOps(bulkMode, entityClass, determineCollectionName(entityClass)); + return bulkOps(bulkMode, entityClass, getCollectionName(entityClass)); } - public BulkOperations bulkOps(BulkMode mode, Class entityType, String collectionName) { + @Override + public BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { - Assert.notNull(mode, "BulkMode must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - DefaultBulkOperations operations = new DefaultBulkOperations(this, mode, collectionName, entityType); + DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, + new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper, + eventPublisher, entityCallbacks)); - operations.setExceptionTranslator(exceptionTranslator); - operations.setWriteConcernResolver(writeConcernResolver); operations.setDefaultWriteConcern(writeConcern); return operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#scriptOps() - */ @Override public ScriptOperations scriptOps() { return new DefaultScriptOperations(this); @@ -577,13 +819,24 @@ public ScriptOperations scriptOps() { // Find methods that take a Query to express the query and that return a single object. + @Nullable + @Override public T findOne(Query query, Class entityClass) { - return findOne(query, entityClass, determineCollectionName(entityClass)); + return findOne(query, entityClass, getCollectionName(entityClass)); } + @Nullable + @Override public T findOne(Query query, Class entityClass, String collectionName) { - if (query.getSortObject() == null) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass); + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + if (ObjectUtils.isEmpty(query.getSortObject())) { + + return doFindOne(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + new QueryCursorPreparer(query, entityClass), entityClass); } else { query.limit(1); List results = find(query, entityClass, collectionName); @@ -591,210 +844,471 @@ public T findOne(Query query, Class entityClass, String collectionName) { } } + @Override public boolean exists(Query query, Class entityClass) { - return exists(query, entityClass, determineCollectionName(entityClass)); + return exists(query, entityClass, getCollectionName(entityClass)); } + @Override public boolean exists(Query query, String collectionName) { return exists(query, null, collectionName); } - public boolean exists(Query query, Class entityClass, String collectionName) { + @Override + @SuppressWarnings("ConstantConditions") + public boolean exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); } + Assert.notNull(collectionName, "CollectionName must not be null"); + + QueryContext queryContext = queryOperations.createQueryContext(query); + Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); - DBObject mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); - return execute(collectionName, new FindCallback(mappedQuery)).hasNext(); + return execute(collectionName, + new ExistsCallback(createDelegate(query), mappedQuery, queryContext.getCollation(entityClass).orElse(null))); } // Find methods that take a Query to express the query and that return a List of objects. + @Override public List find(Query query, Class entityClass) { - return find(query, entityClass, determineCollectionName(entityClass)); + return find(query, entityClass, getCollectionName(entityClass)); } - public List find(final Query query, Class entityClass, String collectionName) { + @Override + public List find(Query query, Class entityClass, String collectionName) { - if (query == null) { - return findAll(entityClass, collectionName); - } + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass, new QueryCursorPreparer(query, entityClass)); } - public T findById(Object id, Class entityClass) { - return findById(id, entityClass, determineCollectionName(entityClass)); - } + @Override + public Window scroll(Query query, Class entityType) { - public T findById(Object id, Class entityClass, String collectionName) { - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityClass); - MongoPersistentProperty idProperty = persistentEntity == null ? null : persistentEntity.getIdProperty(); - String idKey = idProperty == null ? ID_FIELD : idProperty.getName(); - return doFindOne(collectionName, new BasicDBObject(idKey, id), null, entityClass); + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); } - public GeoResults geoNear(NearQuery near, Class entityClass) { - return geoNear(near, entityClass, determineCollectionName(entityClass)); + @Override + public Window scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); } - @SuppressWarnings("unchecked") - public GeoResults geoNear(NearQuery near, Class entityClass, String collectionName) { + Window doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { - if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); - } + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); - } + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; - String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass); - DBObject nearDbObject = near.toDBObject(); + if (query.hasKeyset()) { - BasicDBObject command = new BasicDBObject("geoNear", collection); - command.putAll(nearDbObject); + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); - if (nearDbObject.containsField("query")) { - DBObject query = (DBObject) nearDbObject.get("query"); - command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass))); - } + List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), + keysetPaginationQuery.fields(), sourceClass, + new QueryCursorPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), - entityClass, collectionName); + return ScrollUtils.createWindow(query, result, sourceClass, operations); } - CommandResult commandResult = executeCommand(command, this.readPreference); - List results = (List) commandResult.get("results"); - results = results == null ? Collections.emptyList() : results; + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + sourceClass, new QueryCursorPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), + callback); - DbObjectCallback> callback = new GeoNearResultDbObjectCallback( - new ReadDbObjectCallback(mongoConverter, entityClass, collectionName), near.getMetric()); - List> result = new ArrayList>(results.size()); + return ScrollUtils.createWindow(result, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip())); + } - int index = 0; - int elementsToSkip = near.getSkip() != null ? near.getSkip() : 0; + @Nullable + @Override + public T findById(Object id, Class entityClass) { + return findById(id, entityClass, getCollectionName(entityClass)); + } - for (Object element : results) { + @Nullable + @Override + public T findById(Object id, Class entityClass, String collectionName) { - /* - * As MongoDB currently (2.4.4) doesn't support the skipping of elements in near queries - * we skip the elements ourselves to avoid at least the document 2 object mapping overhead. - * - * @see https://jira.mongodb.org/browse/SERVER-3925 - */ - if (index >= elementsToSkip) { - result.add(callback.doWith((DBObject) element)); - } - index++; - } + Assert.notNull(id, "Id must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - if (elementsToSkip > 0) { - // as we skipped some elements we have to calculate the averageDistance ourselves: - return new GeoResults(result, near.getMetric()); - } + String idKey = operations.getIdPropertyName(entityClass); - GeoCommandStatistics stats = GeoCommandStatistics.from(commandResult); - return new GeoResults(result, new Distance(stats.getAverageDistance(), near.getMetric())); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), new Document(), + entityClass); } - public T findAndModify(Query query, Update update, Class entityClass) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass)); + @Override + public List findDistinct(Query query, String field, Class entityClass, Class resultClass) { + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); } - public T findAndModify(Query query, Update update, Class entityClass, String collectionName) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); - } + @Override + @SuppressWarnings("unchecked") + public List findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass) { - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { - return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass)); - } + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, - String collectionName) { - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass, update, options); - } + MongoPersistentEntity entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null; + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); - // Find methods that take a Query to express the query and that return a single object that is also removed from the - // collection in the database. + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); - public T findAndRemove(Query query, Class entityClass) { - return findAndRemove(query, entityClass, determineCollectionName(entityClass)); - } + MongoIterable result = execute(collectionName, (collection) -> { - public T findAndRemove(Query query, Class entityClass, String collectionName) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); + } - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass); - } + collection = createDelegate(query).prepare(collection); - public long count(Query query, Class entityClass) { - Assert.notNull(entityClass); - return count(query, entityClass, determineCollectionName(entityClass)); - } + DistinctIterable iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, iterable::collation); - public long count(final Query query, String collectionName) { - return count(query, null, collectionName); - } + return iterable; + }); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - public long count(Query query, Class entityClass, String collectionName) { + if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { - Assert.hasText(collectionName); - final DBObject dbObject = query == null ? null - : queryMapper.getMappedObject(query.getQueryObject(), - entityClass == null ? null : mappingContext.getPersistentEntity(entityClass)); + MongoConverter converter = getConverter(); + DefaultDbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); - return execute(collectionName, new CollectionCallback() { - public Long doInCollection(DBCollection collection) throws MongoException, DataAccessException { - return collection.count(dbObject); - } - }); - } + result = result.map((source) -> converter.mapValueToTargetType(source, + distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass), dbRefResolver)); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object) - */ - public void insert(Object objectToSave) { - ensureNotIterable(objectToSave); - insert(objectToSave, determineEntityCollectionName(objectToSave)); + try { + return (List) result.into(new ArrayList<>()); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object, java.lang.String) - */ - public void insert(Object objectToSave, String collectionName) { - ensureNotIterable(objectToSave); - doInsert(collectionName, objectToSave, this.mongoConverter); + @Override + public GeoResults geoNear(NearQuery near, Class entityClass) { + return geoNear(near, entityClass, getCollectionName(entityClass)); } - protected void ensureNotIterable(Object o) { - if (null != o) { - if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) { - throw new IllegalArgumentException("Cannot use a collection here."); - } - } + @Override + public GeoResults geoNear(NearQuery near, Class domainType, String collectionName) { + return geoNear(near, domainType, collectionName, domainType); + } + + public GeoResults geoNear(NearQuery near, Class domainType, String collectionName, Class returnType) { + + if (near == null) { + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); + } + + if (domainType == null) { + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); + } + + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(returnType, "ReturnType must not be null"); + + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(domainType); + String distanceField = operations.nearQueryDistanceFieldName(domainType); + + Builder optionsBuilder = AggregationOptions.builder().collation(near.getCollation()); + + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } + + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } + + Aggregation $geoNear = TypedAggregation.newAggregation(domainType, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); + + AggregationResults results = aggregate($geoNear, collection, Document.class); + EntityProjection projection = operations.introspectProjection(returnType, domainType); + + DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); + + List> result = new ArrayList<>(results.getMappedResults().size()); + + BigDecimal aggregate = BigDecimal.ZERO; + for (Document element : results) { + + GeoResult geoResult = callback.doWith(element); + aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue())); + result.add(geoResult); + } + + Distance avgDistance = new Distance( + result.size() == 0 ? 0 : aggregate.divide(new BigDecimal(result.size()), RoundingMode.HALF_UP).doubleValue(), + near.getMetric()); + + return new GeoResults<>(result, avgDistance); + } + + @Nullable + @Override + public T findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); + } + + @Nullable + @Override + public T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); + } + + @Nullable + @Override + public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); + } + + @Nullable + @Override + public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, + String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + Assert.notNull(options, "Options must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); + + Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { + throw new IllegalArgumentException( + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); + }); + + if (!options.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } + + return doFindAndModify(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + } + + @Override + public T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + CollectionPreparerDelegate collectionPreparer = createDelegate(query); + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = operations.forEntity(replacement).toMappedDocument(this.mongoConverter).getDocument(); + + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + T saved = doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, + queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, projection); + + if (saved != null) { + maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName)); + return maybeCallAfterSave(saved, mappedReplacement, collectionName); + } + + return saved; + } + + // Find methods that take a Query to express the query and that return a single object that is also removed from the + // collection in the database. + + @Nullable + @Override + public T findAndRemove(Query query, Class entityClass) { + return findAndRemove(query, entityClass, getCollectionName(entityClass)); + } + + @Nullable + @Override + public T findAndRemove(Query query, Class entityClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + return doFindAndRemove(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null), + entityClass); + } + + @Override + public long count(Query query, Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return count(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public long count(Query query, String collectionName) { + return count(query, null, collectionName); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + @Override + public long count(Query query, @Nullable Class entityClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + CollectionPreparerDelegate readPreference = createDelegate(query); + return doCount(readPreference, collectionName, mappedQuery, options); + } + + protected long doCount(CollectionPreparer collectionPreparer, String collectionName, Document filter, + CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionPreparer, collectionName, filter, options); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.MongoOperations#estimatedCount(java.lang.String) + */ + @Override + public long estimatedCount(String collectionName) { + return doEstimatedCount(CollectionPreparerDelegate.of(this), collectionName, new EstimatedDocumentCountOptions()); + } + + protected long doEstimatedCount(CollectionPreparer> collectionPreparer, + String collectionName, EstimatedDocumentCountOptions options) { + return execute(collectionName, + collection -> collectionPreparer.prepare(collection).estimatedDocumentCount(options)); + } + + @Override + public long exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(createDelegate(query), collectionName, mappedQuery, options); + } + + protected long doExactCount(CollectionPreparer> collectionPreparer, String collectionName, + Document filter, CountOptions options) { + return execute(collectionName, collection -> collectionPreparer.prepare(collection) + .countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + + return + // only empty filter for estimatedCount + filter.isEmpty() && + // no skip, no limit,... + isEmptyOptions(options) && + // transaction active? + !MongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; + } + + @Override + public T insert(T objectToSave) { + + Assert.notNull(objectToSave, "ObjectToSave must not be null"); + + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); + } + + @Override + @SuppressWarnings("unchecked") + public T insert(T objectToSave, String collectionName) { + + Assert.notNull(objectToSave, "ObjectToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + ensureNotCollectionLike(objectToSave); + return (T) doInsert(collectionName, objectToSave, this.mongoConverter); + } + + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { + + if (EntityOperations.isCollectionLike(source)) { + throw new IllegalArgumentException("Cannot use a collection here"); + } } /** * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like - * slaveOk() etc. Can be overridden in sub-classes. - * + * withCodecRegistry() etc. Can be overridden in sub-classes. + * * @param collection */ - protected void prepareCollection(DBCollection collection) { - if (this.readPreference != null) { - collection.setReadPreference(readPreference); + protected MongoCollection prepareCollection(MongoCollection collection) { + + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); } + + return collection; } /** @@ -802,91 +1316,82 @@ protected void prepareCollection(DBCollection collection) { * settings in sub-classes.
    * In case of using MongoDB Java driver version 3 the returned {@link WriteConcern} will be defaulted to * {@link WriteConcern#ACKNOWLEDGED} when {@link WriteResultChecking} is set to {@link WriteResultChecking#EXCEPTION}. - * - * @param writeConcern any WriteConcern already configured or null + * + * @param mongoAction any MongoAction already configured or null * @return The prepared WriteConcern or null */ + @Nullable protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { WriteConcern wc = writeConcernResolver.resolve(mongoAction); return potentiallyForceAcknowledgedWrite(wc); } - private WriteConcern potentiallyForceAcknowledgedWrite(WriteConcern wc) { + @Nullable + private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) { - if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking) - && MongoClientVersion.isMongo3Driver()) { + if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject() instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } return wc; } - protected void doInsert(String collectionName, T objectToSave, MongoWriter writer) { - - assertUpdateableIdIfNotSet(objectToSave); + protected T doInsert(String collectionName, T objectToSave, MongoWriter writer) { - initializeVersionProperty(objectToSave); + BeforeConvertEvent event = new BeforeConvertEvent<>(objectToSave, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - DBObject dbDoc = toDbObject(objectToSave, writer); + T initialized = entity.initializeVersionProperty(); + Document dbDoc = entity.toMappedDocument(writer).getDocument(); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); - Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass()); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, dbDoc, collectionName)); + initialized = maybeCallBeforeSave(initialized, dbDoc, collectionName); + Object id = insertDocument(collectionName, dbDoc, initialized.getClass()); - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); + T saved = populateIdIfNecessary(initialized, id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); } - /** - * @param objectToSave - * @param writer - * @return - */ - private DBObject toDbObject(T objectToSave, MongoWriter writer) { + @Override + @SuppressWarnings("unchecked") + public Collection insert(Collection batchToSave, Class entityClass) { - if (!(objectToSave instanceof String)) { - DBObject dbDoc = new BasicDBObject(); - writer.write(objectToSave, dbDoc); - return dbDoc; - } else { - try { - return (DBObject) JSON.parse((String) objectToSave); - } catch (JSONParseException e) { - throw new MappingException("Could not parse given String to save into a JSON document!", e); - } - } - } + Assert.notNull(batchToSave, "BatchToSave must not be null"); - private void initializeVersionProperty(Object entity) { + return (Collection) doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); + } - MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(entity.getClass()); + @Override + @SuppressWarnings("unchecked") + public Collection insert(Collection batchToSave, String collectionName) { - if (mongoPersistentEntity != null && mongoPersistentEntity.hasVersionProperty()) { - ConvertingPropertyAccessor accessor = new ConvertingPropertyAccessor( - mongoPersistentEntity.getPropertyAccessor(entity), mongoConverter.getConversionService()); - accessor.setProperty(mongoPersistentEntity.getVersionProperty(), 0); - } - } + Assert.notNull(batchToSave, "BatchToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - public void insert(Collection batchToSave, Class entityClass) { - doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter); + return (Collection) doInsertBatch(collectionName, batchToSave, this.mongoConverter); } - public void insert(Collection batchToSave, String collectionName) { - doInsertBatch(collectionName, batchToSave, this.mongoConverter); - } + @Override + @SuppressWarnings("unchecked") + public Collection insertAll(Collection objectsToSave) { - public void insertAll(Collection objectsToSave) { - doInsertAll(objectsToSave, this.mongoConverter); + Assert.notNull(objectsToSave, "ObjectsToSave must not be null"); + return (Collection) doInsertAll(objectsToSave, this.mongoConverter); } - protected void doInsertAll(Collection listToSave, MongoWriter writer) { + @SuppressWarnings("unchecked") + protected Collection doInsertAll(Collection listToSave, MongoWriter writer) { - Map> elementsByCollection = new HashMap>(); + Map> elementsByCollection = new HashMap<>(); + List savedObjects = new ArrayList<>(listToSave.size()); for (T element : listToSave) { @@ -894,592 +1399,638 @@ protected void doInsertAll(Collection listToSave, MongoWriter entity = mappingContext.getPersistentEntity(element.getClass()); - - if (entity == null) { - throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass()); - } - - String collection = entity.getCollection(); - List collectionElements = elementsByCollection.get(collection); - - if (null == collectionElements) { - collectionElements = new ArrayList(); - elementsByCollection.put(collection, collectionElements); - } + String collection = getCollectionName(ClassUtils.getUserClass(element)); + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); } for (Map.Entry> entry : elementsByCollection.entrySet()) { - doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter); + savedObjects.addAll((Collection) doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter)); } + + return savedObjects; } - protected void doInsertBatch(String collectionName, Collection batchToSave, MongoWriter writer) { + protected Collection doInsertBatch(String collectionName, Collection batchToSave, + MongoWriter writer) { - Assert.notNull(writer); + Assert.notNull(writer, "MongoWriter must not be null"); - List dbObjectList = new ArrayList(); - for (T o : batchToSave) { + List documentList = new ArrayList<>(batchToSave.size()); + List initializedBatchToSave = new ArrayList<>(batchToSave.size()); + for (T uninitialized : batchToSave) { - initializeVersionProperty(o); - BasicDBObject dbDoc = new BasicDBObject(); + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); - maybeEmitEvent(new BeforeConvertEvent(o, collectionName)); - writer.write(o, dbDoc); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeSaveEvent(o, dbDoc, collectionName)); - dbObjectList.add(dbDoc); + T initialized = entity.initializeVersionProperty(); + Document document = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, document, collectionName)); + initialized = maybeCallBeforeSave(initialized, document, collectionName); + + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(uninitialized.getClass()); + + documentList.add(mappedDocument.getDocument()); + initializedBatchToSave.add(initialized); } - List ids = insertDBObjectList(collectionName, dbObjectList); + + List ids = insertDocumentList(collectionName, documentList); + List savedObjects = new ArrayList<>(documentList.size()); + int i = 0; - for (T obj : batchToSave) { + for (T obj : initializedBatchToSave) { + if (i < ids.size()) { - populateIdIfNecessary(obj, ids.get(i)); - maybeEmitEvent(new AfterSaveEvent(obj, dbObjectList.get(i), collectionName)); + T saved = populateIdIfNecessary(obj, ids.get(i)); + Document doc = documentList.get(i); + maybeEmitEvent(new AfterSaveEvent<>(saved, doc, collectionName)); + savedObjects.add(maybeCallAfterSave(saved, doc, collectionName)); + } else { + savedObjects.add(obj); } i++; } + + return savedObjects; } - public void save(Object objectToSave) { + @Override + public T save(T objectToSave) { - Assert.notNull(objectToSave); - save(objectToSave, determineEntityCollectionName(objectToSave)); + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - public void save(Object objectToSave, String collectionName) { - - Assert.notNull(objectToSave); - Assert.hasText(collectionName); + @Override + @SuppressWarnings("unchecked") + public T save(T objectToSave, String collectionName) { - MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(objectToSave.getClass()); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + ensureNotCollectionLike(objectToSave); - // No optimistic locking -> simple save - if (mongoPersistentEntity == null || !mongoPersistentEntity.hasVersionProperty()) { - doSave(collectionName, objectToSave, this.mongoConverter); - return; - } + AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - doSaveVersioned(objectToSave, mongoPersistentEntity, collectionName); + return source.isVersionedEntity() // + ? doSaveVersioned(source, collectionName) // + : (T) doSave(collectionName, objectToSave, this.mongoConverter); } - private void doSaveVersioned(T objectToSave, MongoPersistentEntity entity, String collectionName) { + @SuppressWarnings("unchecked") + private T doSaveVersioned(AdaptibleEntity source, String collectionName) { - ConvertingPropertyAccessor convertingAccessor = new ConvertingPropertyAccessor( - entity.getPropertyAccessor(objectToSave), mongoConverter.getConversionService()); + if (source.isNew()) { + return (T) doInsert(collectionName, source.getBean(), this.mongoConverter); + } - MongoPersistentProperty idProperty = entity.getIdProperty(); - MongoPersistentProperty versionProperty = entity.getVersionProperty(); + // Create query for entity with the id and old version + Query query = source.getQueryForVersion(); - Object version = convertingAccessor.getProperty(versionProperty); - Number versionNumber = convertingAccessor.getProperty(versionProperty, Number.class); + // Bump version number + T toSave = source.incrementVersion(); - // Fresh instance -> initialize version property - if (version == null) { - doInsert(collectionName, objectToSave, this.mongoConverter); - } else { + toSave = maybeEmitEvent(new BeforeConvertEvent(toSave, collectionName)).getSource(); + toSave = maybeCallBeforeConvert(toSave, collectionName); - assertUpdateableIdIfNotSet(objectToSave); + if (source.getBean() != toSave) { + source = operations.forEntity(toSave, mongoConverter.getConversionService()); + } - // Create query for entity with the id and old version - Object id = convertingAccessor.getProperty(idProperty); - Query query = new Query(Criteria.where(idProperty.getName()).is(id).and(versionProperty.getName()).is(version)); + source.assertUpdateableIdIfNotSet(); - // Bump version number - convertingAccessor.setProperty(versionProperty, versionNumber.longValue() + 1); + MappedDocument mapped = source.toMappedDocument(mongoConverter); - BasicDBObject dbObject = new BasicDBObject(); + maybeEmitEvent(new BeforeSaveEvent<>(toSave, mapped.getDocument(), collectionName)); + toSave = maybeCallBeforeSave(toSave, mapped.getDocument(), collectionName); + UpdateDefinition update = mapped.updateWithoutId(); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); - this.mongoConverter.write(objectToSave, dbObject); + UpdateResult result = doUpdate(collectionName, query, update, toSave.getClass(), false, false); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbObject, collectionName)); - Update update = Update.fromDBObject(dbObject, ID_FIELD); + if (result.getModifiedCount() == 0) { - doUpdate(collectionName, query, update, objectToSave.getClass(), false, false); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbObject, collectionName)); + throw new OptimisticLockingFailureException( + String.format("Cannot save entity %s with version %s to collection %s; Has it been modified meanwhile", + source.getId(), source.getVersion(), collectionName)); } + maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName)); + + return maybeCallAfterSave(toSave, mapped.getDocument(), collectionName); } - protected void doSave(String collectionName, T objectToSave, MongoWriter writer) { + protected T doSave(String collectionName, T objectToSave, MongoWriter writer) { + + objectToSave = maybeEmitEvent(new BeforeConvertEvent<>(objectToSave, collectionName)).getSource(); + objectToSave = maybeCallBeforeConvert(objectToSave, collectionName); - assertUpdateableIdIfNotSet(objectToSave); + AdaptibleEntity entity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + MappedDocument mapped = entity.toMappedDocument(writer); + Document dbDoc = mapped.getDocument(); - DBObject dbDoc = toDbObject(objectToSave, writer); + maybeEmitEvent(new BeforeSaveEvent<>(objectToSave, dbDoc, collectionName)); + objectToSave = maybeCallBeforeSave(objectToSave, dbDoc, collectionName); + Object id = saveDocument(collectionName, dbDoc, objectToSave.getClass()); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); - Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass()); + T saved = populateIdIfNecessary(objectToSave, id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); } - protected Object insertDBObject(final String collectionName, final DBObject dbDoc, final Class entityClass) { + @SuppressWarnings("ConstantConditions") + protected Object insertDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting DBObject containing fields: {} in collection: {}", dbDoc.keySet(), collectionName); + LOGGER.debug(String.format("Inserting Document containing fields: %s in collection: %s", document.keySet(), + collectionName)); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, - entityClass, dbDoc, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc) - : collection.insert(dbDoc, writeConcernToUse); - handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.INSERT); - return dbDoc.get(ID_FIELD); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(entityClass); + + return execute(collectionName, collection -> { + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, + mappedDocument.getDocument(), null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + if (writeConcernToUse == null) { + collection.insertOne(mappedDocument.getDocument()); + } else { + collection.withWriteConcern(writeConcernToUse).insertOne(mappedDocument.getDocument()); } + + return operations.forEntity(mappedDocument.getDocument()).getId(); }); } - protected List insertDBObjectList(final String collectionName, final List dbDocList) { - if (dbDocList.isEmpty()) { + protected List insertDocumentList(String collectionName, List documents) { + + if (documents.isEmpty()) { return Collections.emptyList(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of DBObjects containing {} items", dbDocList.size()); + LOGGER.debug(String.format("Inserting list of Documents containing %s items", documents.size())); } - execute(collectionName, new CollectionCallback() { - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null, - null, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList) - : collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse); - handleAnyWriteResultErrors(writeResult, null, MongoActionOperation.INSERT_LIST); - return null; - } - }); + execute(collectionName, collection -> { - List ids = new ArrayList(); - for (DBObject dbo : dbDocList) { - Object id = dbo.get(ID_FIELD); - if (id instanceof ObjectId) { - ids.add((ObjectId) id); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null, + null, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + if (writeConcernToUse == null) { + collection.insertMany(documents); } else { - // no id was generated - ids.add(null); + collection.withWriteConcern(writeConcernToUse).insertMany(documents); } - } - return ids; + + return null; + }); + + return MappedDocument.toIds(documents); } - protected Object saveDBObject(final String collectionName, final DBObject dbDoc, final Class entityClass) { + protected Object saveDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving DBObject containing fields: {}", dbDoc.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", dbDoc.keySet())); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, - dbDoc, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc) - : collection.save(dbDoc, writeConcernToUse); - handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.SAVE); - return dbDoc.get(ID_FIELD); + return execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + MappedDocument mapped = MappedDocument.of(dbDoc); + + MongoCollection collectionToUse = writeConcernToUse == null // + ? collection // + : collection.withWriteConcern(writeConcernToUse); + + if (!mapped.hasId()) { + + mapped = queryOperations.createInsertContext(mapped).prepareId(mappingContext.getPersistentEntity(entityClass)); + collectionToUse.insertOne(mapped.getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document replacement = updateContext.getMappedUpdate(entity); + Document filter = updateContext.getReplacementQuery(); + if (updateContext.requiresShardKey(filter, entity)) { + + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); + } else { + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); + } + } + + collectionToUse.replaceOne(filter, replacement, new com.mongodb.client.model.ReplaceOptions().upsert(true)); } + return mapped.getId(); }); } - public WriteResult upsert(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false); + @Override + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } - public WriteResult upsert(Query query, Update update, String collectionName) { + @Override + public UpdateResult upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } - public WriteResult upsert(Query query, Update update, Class entityClass, String collectionName) { + @Override + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + + Assert.notNull(entityClass, "EntityClass must not be null"); + return doUpdate(collectionName, query, update, entityClass, true, false); } - public WriteResult updateFirst(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false); + @Override + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } - public WriteResult updateFirst(final Query query, final Update update, final String collectionName) { + @Override + public UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } - public WriteResult updateFirst(Query query, Update update, Class entityClass, String collectionName) { + @Override + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + + Assert.notNull(entityClass, "EntityClass must not be null"); + return doUpdate(collectionName, query, update, entityClass, false, false); } - public WriteResult updateMulti(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true); + @Override + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } - public WriteResult updateMulti(final Query query, final Update update, String collectionName) { + @Override + public UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } - public WriteResult updateMulti(final Query query, final Update update, Class entityClass, String collectionName) { + @Override + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + + Assert.notNull(entityClass, "EntityClass must not be null"); + return doUpdate(collectionName, query, update, entityClass, false, true); } - protected WriteResult doUpdate(final String collectionName, final Query query, final Update update, - final Class entityClass, final boolean upsert, final boolean multi) { + @SuppressWarnings("ConstantConditions") + protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); - return execute(collectionName, new CollectionCallback() { - public WriteResult doInCollection(DBCollection collection) throws MongoException, DataAccessException { + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions opts = updateContext.getUpdateOptions(entityClass, query); - MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + if (updateContext.isAggregationUpdate()) { - increaseVersionForUpdateIfNecessary(entity, update); + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - DBObject queryObj = query == null ? new BasicDBObject() - : queryMapper.getMappedObject(query.getQueryObject(), entity); - DBObject updateObj = update == null ? new BasicDBObject() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); + return execute(collectionName, collection -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", - serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName); + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); } - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, - entityClass, updateObj, queryObj); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - WriteResult writeResult = writeConcernToUse == null ? collection.update(queryObj, updateObj, upsert, multi) - : collection.update(queryObj, updateObj, upsert, multi, writeConcernToUse); - - if (entity != null && entity.hasVersionProperty() && !multi) { - if (ReflectiveWriteResultInvoker.wasAcknowledged(writeResult) && writeResult.getN() == 0 - && dbObjectContainsVersionProperty(queryObj, entity)) { - throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: " - + updateObj.toMap().toString() + " to collection " + collectionName); + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + + return multi ? collection.updateMany(queryObj, pipeline, opts) : collection.updateOne(queryObj, pipeline, opts); + }); + } + + Document updateObj = updateContext.getMappedUpdate(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + updateObj, queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + return execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } + + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + + if (!UpdateMapper.isUpdateObject(updateObj)) { + + Document filter = new Document(queryObj); + + if (updateContext.requiresShardKey(filter, entity)) { + + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); + } else { + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); } } - handleAnyWriteResultErrors(writeResult, queryObj, MongoActionOperation.UPDATE); - return writeResult; + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return collection.replaceOne(filter, updateObj, replaceOptions); + } else { + return multi ? collection.updateMany(queryObj, updateObj, opts) + : collection.updateOne(queryObj, updateObj, opts); } }); } - private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity persistentEntity, Update update) { + @Override + public DeleteResult remove(Object object) { - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - String versionFieldName = persistentEntity.getVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { - update.inc(versionFieldName, 1L); - } - } + Assert.notNull(object, "Object must not be null"); + + return remove(object, getCollectionName(object.getClass())); } - private boolean dbObjectContainsVersionProperty(DBObject dbObject, MongoPersistentEntity persistentEntity) { + @Override + public DeleteResult remove(Object object, String collectionName) { - if (persistentEntity == null || !persistentEntity.hasVersionProperty()) { - return false; - } + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + Query query = operations.forEntity(object).getRemoveByQuery(); - return dbObject.containsField(persistentEntity.getVersionProperty().getFieldName()); + return doRemove(collectionName, query, object.getClass(), false); } - public WriteResult remove(Object object) { + @Override + public DeleteResult remove(Query query, String collectionName) { + return doRemove(collectionName, query, null, true); + } - if (object == null) { - return null; - } + @Override + public DeleteResult remove(Query query, Class entityClass) { + return remove(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public DeleteResult remove(Query query, Class entityClass, String collectionName) { - return remove(getIdQueryFor(object), object.getClass()); + Assert.notNull(entityClass, "EntityClass must not be null"); + return doRemove(collectionName, query, entityClass, true); } - public WriteResult remove(Object object, String collection) { + @SuppressWarnings("ConstantConditions") + protected DeleteResult doRemove(String collectionName, Query query, @Nullable Class entityClass, + boolean multi) { - Assert.hasText(collection); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - if (object == null) { - return null; - } + MongoPersistentEntity entity = getPersistentEntity(entityClass); - return doRemove(collection, getIdQueryFor(object), object.getClass()); - } + DeleteContext deleteContext = multi ? queryOperations.deleteQueryContext(query) + : queryOperations.deleteSingleContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions options = deleteContext.getDeleteOptions(entityClass); - /** - * Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s - * property value as its {@link Entry#getValue()}. - * - * @param object - * @return - */ - private Entry extractIdPropertyAndValue(Object object) { - - Assert.notNull(object, "Id cannot be extracted from 'null'."); - - Class objectType = object.getClass(); - - if (object instanceof DBObject) { - return Collections.singletonMap(ID_FIELD, ((DBObject) object).get(ID_FIELD)).entrySet().iterator().next(); - } - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(objectType); - MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty(); - - if (idProp == null || entity == null) { - throw new MappingException("No id property found for object of type " + objectType); - } - - Object idValue = entity.getPropertyAccessor(object).getProperty(idProp); - return Collections.singletonMap(idProp.getFieldName(), idValue).entrySet().iterator().next(); - } - - /** - * Returns a {@link Query} for the given entity by its id. - * - * @param object must not be {@literal null}. - * @return - */ - private Query getIdQueryFor(Object object) { - - Entry id = extractIdPropertyAndValue(object); - return new Query(where(id.getKey()).is(id.getValue())); - } - - /** - * Returns a {@link Query} for the given entities by their ids. - * - * @param objects must not be {@literal null} or {@literal empty}. - * @return - */ - private Query getIdInQueryFor(Collection objects) { - - Assert.notEmpty(objects, "Cannot create Query for empty collection."); - - Iterator it = objects.iterator(); - Entry firstEntry = extractIdPropertyAndValue(it.next()); - - ArrayList ids = new ArrayList(objects.size()); - ids.add(firstEntry.getValue()); - - while (it.hasNext()) { - ids.add(extractIdPropertyAndValue(it.next()).getValue()); - } - - return new Query(where(firstEntry.getKey()).in(ids)); - } - - private void assertUpdateableIdIfNotSet(Object entity) { - - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entity.getClass()); - MongoPersistentProperty idProperty = persistentEntity == null ? null : persistentEntity.getIdProperty(); - - if (idProperty == null || persistentEntity == null) { - return; - } - - Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty); - - if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) { - throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(), - entity.getClass().getName())); - } - } - - public WriteResult remove(Query query, String collectionName) { - return remove(query, null, collectionName); - } - - public WriteResult remove(Query query, Class entityClass) { - return remove(query, entityClass, determineCollectionName(entityClass)); - } - - public WriteResult remove(Query query, Class entityClass, String collectionName) { - return doRemove(collectionName, query, entityClass); - } + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, queryObject); - protected WriteResult doRemove(final String collectionName, final Query query, final Class entityClass) { + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - if (query == null) { - throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!"); - } - - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + return execute(collectionName, collection -> { - final DBObject queryObject = query.getQueryObject(); - final MongoPersistentEntity entity = getPersistentEntity(entityClass); + maybeEmitEvent(new BeforeDeleteEvent<>(queryObject, entityClass, collectionName)); - return execute(collectionName, new CollectionCallback() { - public WriteResult doInCollection(DBCollection collection) throws MongoException, DataAccessException { + Document removeQuery = queryObject; - maybeEmitEvent(new BeforeDeleteEvent(queryObject, entityClass, collectionName)); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); + } - DBObject dboq = queryMapper.getMappedObject(queryObject, entity); + if (query.getLimit() > 0 || query.getSkip() > 0) { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, - entityClass, null, queryObject); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MongoCursor cursor = new QueryCursorPreparer(query, entityClass) + .prepare(collection.find(removeQuery).projection(MappedDocument.getIdOnlyProjection())) // + .iterator(); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(dboq), collection.getName() }); + Set ids = new LinkedHashSet<>(); + while (cursor.hasNext()) { + ids.add(MappedDocument.of(cursor.next()).getId()); } - WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) - : collection.remove(dboq, writeConcernToUse); + removeQuery = MappedDocument.getIdIn(ids); + } - handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE); + MongoCollection collectionToUse = writeConcernToUse != null + ? collection.withWriteConcern(writeConcernToUse) + : collection; - maybeEmitEvent(new AfterDeleteEvent(queryObject, entityClass, collectionName)); + DeleteResult result = multi ? collectionToUse.deleteMany(removeQuery, options) + : collectionToUse.deleteOne(removeQuery, options); - return wr; - } + maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName)); + + return result; }); } + @Override public List findAll(Class entityClass) { - return findAll(entityClass, determineCollectionName(entityClass)); + return findAll(entityClass, getCollectionName(entityClass)); } + @Override public List findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(null), null, - new ReadDbObjectCallback(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal( + new FindCallback(CollectionPreparer.identity(), new Document(), new Document(), + operations.forType(entityClass).getCollation().map(Collation::toMongoCollation).orElse(null)), + CursorPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } + @Override public MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(null, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(), + return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), entityClass); } + @Override public MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, - MapReduceOptions mapReduceOptions, Class entityClass) { - return mapReduce(null, inputCollectionName, mapFunction, reduceFunction, mapReduceOptions, entityClass); + @Nullable MapReduceOptions mapReduceOptions, Class entityClass) { + return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, mapReduceOptions, entityClass); } + @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(), - entityClass); + return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), entityClass); } + @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, - String reduceFunction, MapReduceOptions mapReduceOptions, Class entityClass) { + String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass) { + + return new MapReduceResults<>( + mapReduce(query, entityClass, inputCollectionName, mapFunction, reduceFunction, mapReduceOptions, entityClass), + new Document()); + } + + /** + * @param query + * @param domainType + * @param inputCollectionName + * @param mapFunction + * @param reduceFunction + * @param mapReduceOptions + * @param resultType + * @return + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + public List mapReduce(Query query, Class domainType, String inputCollectionName, String mapFunction, + String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class resultType) { + + Assert.notNull(domainType, "Domain type must not be null"); + Assert.notNull(inputCollectionName, "Input collection name must not be null"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); String mapFunc = replaceWithResourceIfNecessary(mapFunction); String reduceFunc = replaceWithResourceIfNecessary(reduceFunction); - DBCollection inputCollection = getCollection(inputCollectionName); + CollectionPreparerDelegate readPreference = createDelegate(query); + MongoCollection inputCollection = readPreference + .prepare(getAndPrepareCollection(doGetDatabase(), inputCollectionName)); - MapReduceCommand command = new MapReduceCommand(inputCollection, mapFunc, reduceFunc, - mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(), - query == null || query.getQueryObject() == null ? null - : queryMapper.getMappedObject(query.getQueryObject(), null)); + // MapReduceOp + MapReduceIterable mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class); - copyMapReduceOptionsToCommand(query, mapReduceOptions, command); + if (query.getLimit() > 0 && mapReduceOptions != null && mapReduceOptions.getLimit() == null) { + mapReduce = mapReduce.limit(query.getLimit()); + } + if (query.getMeta().hasMaxTime()) { + mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); + } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing MapReduce on collection [{}], mapFunction [{}], reduceFunction [{}]", command.getInput(), - mapFunc, reduceFunc); + Document mappedSort = getMappedSortObject(query, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + mapReduce = mapReduce.sort(mappedSort); } - MapReduceOutput mapReduceOutput = inputCollection.mapReduce(command); + mapReduce = mapReduce + .filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType))); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("MapReduce command result = [{}]", serializeToJsonSafely(mapReduceOutput.results())); - } + Optional collation = query.getCollation(); - List mappedResults = new ArrayList(); - DbObjectCallback callback = new ReadDbObjectCallback(mongoConverter, entityClass, inputCollectionName); + if (mapReduceOptions != null) { - for (DBObject dbObject : mapReduceOutput.results()) { - mappedResults.add(callback.doWith(dbObject)); - } + Optionals.ifAllPresent(collation, mapReduceOptions.getCollation(), (l, r) -> { + throw new IllegalArgumentException( + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); + }); - return new MapReduceResults(mappedResults, mapReduceOutput); - } + if (mapReduceOptions.getCollation().isPresent()) { + collation = mapReduceOptions.getCollation(); + } - public GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass) { - return group(null, inputCollectionName, groupBy, entityClass); - } + if (!CollectionUtils.isEmpty(mapReduceOptions.getScopeVariables())) { + mapReduce = mapReduce.scope(new Document(mapReduceOptions.getScopeVariables())); + } - public GroupByResults group(Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass) { + if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit() > 0) { + mapReduce = mapReduce.limit(mapReduceOptions.getLimit()); + } - DBObject dbo = groupBy.getGroupByObject(); - dbo.put("ns", inputCollectionName); + if (mapReduceOptions.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { + mapReduce = mapReduce.finalizeFunction(mapReduceOptions.getFinalizeFunction().get()); + } - if (criteria == null) { - dbo.put("cond", null); - } else { - dbo.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), null)); - } - // If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and - // convert to DBObject + if (mapReduceOptions.getJavaScriptMode() != null) { + mapReduce = mapReduce.jsMode(mapReduceOptions.getJavaScriptMode()); + } - if (dbo.containsField("initial")) { - Object initialObj = dbo.get("initial"); - if (initialObj instanceof String) { - String initialAsString = replaceWithResourceIfNecessary((String) initialObj); - dbo.put("initial", JSON.parse(initialAsString)); + if (mapReduceOptions.getOutputSharded().isPresent()) { + MongoCompatibilityAdapter.mapReduceIterableAdapter(mapReduce) + .sharded(mapReduceOptions.getOutputSharded().get()); } - } - if (dbo.containsField("$reduce")) { - dbo.put("$reduce", replaceWithResourceIfNecessary(dbo.get("$reduce").toString())); - } - if (dbo.containsField("$keyf")) { - dbo.put("$keyf", replaceWithResourceIfNecessary(dbo.get("$keyf").toString())); - } - if (dbo.containsField("finalize")) { - dbo.put("finalize", replaceWithResourceIfNecessary(dbo.get("finalize").toString())); - } + if (StringUtils.hasText(mapReduceOptions.getOutputCollection()) && !mapReduceOptions.usesInlineOutput()) { - DBObject commandObject = new BasicDBObject("group", dbo); + mapReduce = mapReduce.collectionName(mapReduceOptions.getOutputCollection()) + .action(mapReduceOptions.getMapReduceAction()); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing Group with DBObject [{}]", serializeToJsonSafely(commandObject)); + if (mapReduceOptions.getOutputDatabase().isPresent()) { + mapReduce = mapReduce.databaseName(mapReduceOptions.getOutputDatabase().get()); + } + } } - CommandResult commandResult = executeCommand(commandObject, getDb().getOptions()); - handleCommandError(commandResult, commandObject); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Group command result = [{}]", commandResult); + if (!collation.isPresent()) { + collation = operations.forType(domainType).getCollation(); } - @SuppressWarnings("unchecked") - Iterable resultSet = (Iterable) commandResult.get("retval"); - List mappedResults = new ArrayList(); - DbObjectCallback callback = new ReadDbObjectCallback(mongoConverter, entityClass, inputCollectionName); + mapReduce = collation.map(Collation::toMongoCollation).map(mapReduce::collation).orElse(mapReduce); - for (DBObject dbObject : resultSet) { - mappedResults.add(callback.doWith(dbObject)); + List mappedResults = new ArrayList<>(); + DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName); + + for (Document document : mapReduce) { + mappedResults.add(callback.doWith(document)); } - return new GroupByResults(mappedResults, commandResult); + return mappedResults; } @Override public AggregationResults aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType); + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } @Override public AggregationResults aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregate(aggregation, inputCollectionName, outputType, context); + return aggregate(aggregation, inputCollectionName, outputType, null); } @Override public AggregationResults aggregate(Aggregation aggregation, Class inputType, Class outputType) { - return aggregate(aggregation, determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } @Override @@ -1487,38 +2038,95 @@ public AggregationResults aggregate(Aggregation aggregation, String colle return aggregate(aggregation, collectionName, outputType, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override + public Stream aggregateStream(TypedAggregation aggregation, String inputCollectionName, + Class outputType) { + return aggregateStream(aggregation, inputCollectionName, outputType, null); + } + + @Override + public Stream aggregateStream(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(aggregation.getInputType()), outputType); + } + + @Override + public Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); + } + + @Override + public Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { + return aggregateStream(aggregation, collectionName, outputType, null); + } + + @Override + @SuppressWarnings("unchecked") public List findAllAndRemove(Query query, String collectionName) { - return findAndRemove(query, null, collectionName); + return (List) findAllAndRemove(query, Object.class, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List findAllAndRemove(Query query, Class entityClass) { - return findAllAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } + @Override + public UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected UpdateResult replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use ReplaceOptions#none() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = updateContext.getMappedUpdate(mappingContext.getPersistentEntity(entityType)); + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + replacement = maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedReplacement, updateContext.getQueryObject()); + + UpdateResult result = doReplace(options, entityType, collectionName, updateContext, + createCollectionPreparer(query, action), mappedReplacement); + + if (result.wasAcknowledged()) { + + maybeEmitEvent(new AfterSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallAfterSave(replacement, mappedReplacement, collectionName); + } + + return result; + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is * constructed out of the find result. - * + * * @param collectionName * @param query * @param entityClass @@ -1529,69 +2137,212 @@ protected List doFindAndDelete(String collectionName, Query query, Class< List result = find(query, entityClass, collectionName); if (!CollectionUtils.isEmpty(result)) { - remove(getIdInQueryFor(result), entityClass, collectionName); + + Query byIdInQuery = operations.getByIdInQuery(result); + if (query.hasReadPreference()) { + byIdInQuery.withReadPreference(query.getReadPreference()); + } + + remove(byIdInQuery, entityClass, collectionName); } return result; } protected AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType, + @Nullable AggregationOperationContext context) { + + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.notNull(outputType, "Output type must not be null"); + + return doAggregate(aggregation, collectionName, outputType, + queryOperations.createAggregation(aggregation, context)); + } + + private AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + AggregationDefinition context) { + return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext()); + } + + @SuppressWarnings("ConstantConditions") + protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); + ReadDocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + + AggregationOptions options = aggregation.getOptions(); + AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; - DBObject command = aggregation.toDbObject(collectionName, rootContext); + if (options.isExplain()) { + + Document command = aggregationUtil.createCommand(collectionName, aggregation, context); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command))); + } + + Document commandResult = executeCommand(command); + return new AggregationResults<>(commandResult.get("results", new ArrayList(0)).stream() + .map(callback::doWith).collect(Collectors.toList()), commandResult); + } + + List pipeline = aggregationUtil.createPipeline(aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); + LOGGER.debug( + String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } - CommandResult commandResult = executeCommand(command, this.readPreference); - handleCommandError(commandResult, command); + return execute(collectionName, collection -> { + + List rawResult = new ArrayList<>(); + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); + Class domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType() + : null; + + Optional collation = Optionals.firstNonEmpty(options::getCollation, + () -> operations.forType(domainType) // + .getCollation()); + + AggregateIterable aggregateIterable = delegate.prepare(collection).aggregate(pipeline, Document.class) // + .collation(collation.map(Collation::toMongoCollation).orElse(null)); - return new AggregationResults(returnPotentiallyMappedResults(outputType, commandResult, collectionName), - commandResult); + if (options.isAllowDiskUseSet()) { + aggregateIterable = aggregateIterable.allowDiskUse(options.isAllowDiskUse()); + } + + if (options.getCursorBatchSize() != null) { + aggregateIterable = aggregateIterable.batchSize(options.getCursorBatchSize()); + } + + options.getComment().ifPresent(aggregateIterable::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + aggregateIterable = hintFunction.apply(mongoDbFactory, aggregateIterable::hintString, aggregateIterable::hint); + } + + if (options.hasExecutionTimeLimit()) { + aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (options.isSkipResults()) { + + // toCollection only allowed for $out and $merge if those are the last stages + if (aggregation.getPipeline().isOutOrMerge()) { + aggregateIterable.toCollection(); + } else { + aggregateIterable.first(); + } + return new AggregationResults<>(Collections.emptyList(), new Document()); + } + + MongoIterable iterable = aggregateIterable.map(val -> { + + rawResult.add(val); + return callback.doWith(val); + }); + + return new AggregationResults<>(iterable.into(new ArrayList<>()), + new Document("results", rawResult).append("ok", 1.0D)); + }); } - /** - * Returns the potentially mapped results of the given {@commandResult} contained some. - * - * @param outputType - * @param commandResult - * @return - */ - private List returnPotentiallyMappedResults(Class outputType, CommandResult commandResult, - String collectionName) { + @SuppressWarnings("ConstantConditions") + protected Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType, + @Nullable AggregationOperationContext context) { - @SuppressWarnings("unchecked") - Iterable resultSet = (Iterable) commandResult.get("result"); - if (resultSet == null) { - return Collections.emptyList(); - } + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); + Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming"); + + AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context); - DbObjectCallback callback = new UnwrapAndReadDbObjectCallback(mongoConverter, outputType, collectionName); + AggregationOptions options = aggregation.getOptions(); + List pipeline = aggregationDefinition.getAggregationPipeline(); - List mappedResults = new ArrayList(); - for (DBObject dbObject : resultSet) { - mappedResults.add(callback.doWith(dbObject)); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } - return mappedResults; + ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + + return execute(collectionName, (CollectionCallback>) collection -> { + + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); + + AggregateIterable cursor = delegate.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } + + if (options.getCursorBatchSize() != null) { + cursor = cursor.batchSize(options.getCursorBatchSize()); + } + + options.getComment().ifPresent(cursor::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (options.getHintObject().isPresent()) { + cursor = hintFunction.apply(mongoDbFactory, cursor::hintString, cursor::hint); + } + + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + Class domainType = aggregation instanceof TypedAggregation typedAggregation ? typedAggregation.getInputType() + : null; + + Optionals.firstNonEmpty(options::getCollation, // + () -> operations.forType(domainType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback).stream(); + }); } - protected String replaceWithResourceIfNecessary(String function) { + @Override + public ExecutableFind query(Class domainType) { + return new ExecutableFindOperationSupport(this).query(domainType); + } + + @Override + public ExecutableUpdate update(Class domainType) { + return new ExecutableUpdateOperationSupport(this).update(domainType); + } - String func = function; + @Override + public ExecutableRemove remove(Class domainType) { + return new ExecutableRemoveOperationSupport(this).remove(domainType); + } + + @Override + public ExecutableAggregation aggregateAndReturn(Class domainType) { + return new ExecutableAggregationOperationSupport(this).aggregateAndReturn(domainType); + } + + @Override + public ExecutableMapReduce mapReduce(Class domainType) { + return new ExecutableMapReduceOperationSupport(this).mapReduce(domainType); + } + + @Override + public ExecutableInsert insert(Class domainType) { + return new ExecutableInsertOperationSupport(this).insert(domainType); + } + + protected String replaceWithResourceIfNecessary(String function) { if (this.resourceLoader != null && ResourceUtils.isUrl(function)) { - Resource functionResource = resourceLoader.getResource(func); + Resource functionResource = resourceLoader.getResource(function); if (!functionResource.exists()) { - throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found!", function)); + throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found", function)); } Scanner scanner = null; @@ -1600,7 +2351,7 @@ protected String replaceWithResourceIfNecessary(String function) { scanner = new Scanner(functionResource.getInputStream()); return scanner.useDelimiter("\\A").next(); } catch (IOException e) { - throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s!", function), e); + throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s", function), e); } finally { if (scanner != null) { scanner.close(); @@ -1608,209 +2359,389 @@ protected String replaceWithResourceIfNecessary(String function) { } } - return func; + return function; } - private void copyMapReduceOptionsToCommand(Query query, MapReduceOptions mapReduceOptions, - MapReduceCommand mapReduceCommand) { - - if (query != null) { - if (query.getSkip() != 0 || query.getFieldsObject() != null) { - throw new InvalidDataAccessApiUsageException( - "Can not use skip or field specification with map reduce operations"); - } - if (query.getLimit() > 0 && mapReduceOptions.getLimit() == null) { - mapReduceCommand.setLimit(query.getLimit()); - } - if (query.getSortObject() != null) { - mapReduceCommand.setSort(queryMapper.getMappedObject(query.getSortObject(), null)); + @Override + @SuppressWarnings("ConstantConditions") + public Set getCollectionNames() { + return execute(db -> { + Set result = new LinkedHashSet<>(); + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { + result.add(name); } - } + return result; + }); + } - if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit().intValue() > 0) { - mapReduceCommand.setLimit(mapReduceOptions.getLimit()); - } + public MongoDatabase getDb() { + return doGetDatabase(); + } - if (mapReduceOptions.getJavaScriptMode() != null) { - mapReduceCommand.setJsMode(true); - } - if (!mapReduceOptions.getExtraOptions().isEmpty()) { - for (Map.Entry entry : mapReduceOptions.getExtraOptions().entrySet()) { - ReflectiveMapReduceInvoker.addExtraOption(mapReduceCommand, entry.getKey(), entry.getValue()); - } - } - if (mapReduceOptions.getFinalizeFunction() != null) { - mapReduceCommand.setFinalize(this.replaceWithResourceIfNecessary(mapReduceOptions.getFinalizeFunction())); - } - if (mapReduceOptions.getOutputDatabase() != null) { - mapReduceCommand.setOutputDB(mapReduceOptions.getOutputDatabase()); - } - if (!mapReduceOptions.getScopeVariables().isEmpty()) { - mapReduceCommand.setScope(mapReduceOptions.getScopeVariables()); + protected MongoDatabase doGetDatabase() { + return MongoDatabaseUtils.getDatabase(mongoDbFactory, sessionSynchronization); + } + + protected MongoDatabase prepareDatabase(MongoDatabase database) { + return database; + } + + protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; + } + + protected T maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeConvertCallback.class, object, collection); } + + return object; } - public Set getCollectionNames() { - return execute(new DbCallback>() { - public Set doInDB(DB db) throws MongoException, DataAccessException { - return db.getCollectionNames(); - } - }); + protected T maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeSaveCallback.class, object, document, collection); + } + + return object; } - public DB getDb() { - return mongoDbFactory.getDb(); + protected T maybeCallAfterSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterSaveCallback.class, object, document, collection); + } + + return object; } - protected void maybeEmitEvent(MongoMappingEvent event) { - if (null != eventPublisher) { - eventPublisher.publishEvent(event); + protected T maybeCallAfterConvert(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection); } + + return object; } /** * Create the specified collection using the provided options - * + * * @param collectionName * @param collectionOptions * @return the collection that was created */ - protected DBCollection doCreateCollection(final String collectionName, final DBObject collectionOptions) { - return execute(new DbCallback() { - public DBCollection doInDB(DB db) throws MongoException, DataAccessException { - DBCollection coll = db.createCollection(collectionName, collectionOptions); - // TODO: Emit a collection created event - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", coll.getFullName()); - } - return coll; + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, Document collectionOptions) { + return doCreateCollection(collectionName, getCreateCollectionOptions(collectionOptions)); + } + + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + * @since 3.3.3 + */ + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { + + return execute(db -> { + + db.createCollection(collectionName, collectionOptions); + + MongoCollection coll = db.getCollection(collectionName, Document.class); + + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Created collection [%s]", + coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName)); } + return coll; }); } + private CreateCollectionOptions getCreateCollectionOptions(Document document) { + + CreateCollectionOptions options = new CreateCollectionOptions(); + + if (document.containsKey("capped")) { + options.capped((Boolean) document.get("capped")); + } + if (document.containsKey("size")) { + options.sizeInBytes(((Number) document.get("size")).longValue()); + } + if (document.containsKey("max")) { + options.maxDocuments(((Number) document.get("max")).longValue()); + } + + if (document.containsKey("collation")) { + options.collation(IndexConverters.fromDocument(document.get("collation", Document.class))); + } + + if (document.containsKey("validator")) { + + ValidationOptions validation = new ValidationOptions(); + + if (document.containsKey("validationLevel")) { + validation.validationLevel(ValidationLevel.fromString(document.getString("validationLevel"))); + } + if (document.containsKey("validationAction")) { + validation.validationAction(ValidationAction.fromString(document.getString("validationAction"))); + } + + validation.validator(document.get("validator", Document.class)); + options.validationOptions(validation); + } + + if (document.containsKey("timeseries")) { + + Document timeSeries = document.get("timeseries", Document.class); + TimeSeriesOptions timeseries = new TimeSeriesOptions(timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { + timeseries.metaField(timeSeries.getString("metaField")); + } + if (timeSeries.containsKey("granularity")) { + timeseries.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + options.timeSeriesOptions(timeseries); + } + return options; + } + /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The query document is specified as a standard {@link DBObject} and so is the fields specification. - * + * The query document is specified as a standard {@link Document} and so is the fields specification. + * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @return the {@link List} of converted objects. + * @return the converted object or {@literal null} if none exists. */ - protected T doFindOne(String collectionName, DBObject query, DBObject fields, Class entityClass) { + @Nullable + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFindOne(collectionName, collectionPreparer, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param preparer the preparer used to modify the cursor on execution. + * @param entityClass the parameterized type of the returned list. + * @return the converted object or {@literal null} if none exists. + * @since 2.2 + */ + @Nullable + @SuppressWarnings("ConstantConditions") + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, CursorPreparer preparer, Class entityClass) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - DBObject mappedQuery = queryMapper.getMappedObject(query, entity); - DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query), - mappedFields, entityClass, collectionName); + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), - new ReadDbObjectCallback(this.mongoConverter, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** * Map the results of an ad-hoc query on the default MongoDB collection to a List using the template's converter. The - * query document is specified as a standard DBObject and so is the fields specification. - * + * query document is specified as a standard Document and so is the fields specification. + * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected List doFind(String collectionName, DBObject query, DBObject fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, - new ReadDbObjectCallback(this.mongoConverter, entityClass, collectionName)); + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } /** * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is * converted from the MongoDB native representation using an instance of {@see MongoConverter}. The query document is - * specified as a standard DBObject and so is the fields specification. - * + * specified as a standard Document and so is the fields specification. + * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected List doFind(String collectionName, DBObject query, DBObject fields, Class entityClass, - CursorPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, - new ReadDbObjectCallback(mongoConverter, entityClass, collectionName)); + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, CursorPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, + new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected List doFind(String collectionName, DBObject query, DBObject fields, Class entityClass, - CursorPreparer preparer, DbObjectCallback objectCallback) { + protected List doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - DBObject mappedFields = queryMapper.getMappedFields(fields, entity); - DBObject mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp ? getMappedSortObject(sqcp.getSortObject(), entity) : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, objectCallback, collectionName); } - protected DBObject convertToDbObject(CollectionOptions collectionOptions) { - DBObject dbo = new BasicDBObject(); - if (collectionOptions != null) { - if (collectionOptions.getCapped() != null) { - dbo.put("capped", collectionOptions.getCapped().booleanValue()); - } - if (collectionOptions.getSize() != null) { - dbo.put("size", collectionOptions.getSize().intValue()); - } - if (collectionOptions.getMaxDocuments() != null) { - dbo.put("max", collectionOptions.getMaxDocuments().intValue()); - } + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while + * using sourceClass for mapping the query. + * + * @since 2.0 + */ + List doFind(CollectionPreparer> collectionPreparer, String collectionName, + Document query, Document fields, Class sourceClass, Class targetClass, CursorPreparer preparer) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); + + if (LOGGER.isDebugEnabled()) { + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp + ? getMappedSortObject(sqcp.getSortObject(), entity) + : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), sourceClass, + collectionName)); + } + + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); + } + + /** + * Convert given {@link CollectionOptions} to a document and take the domain type information into account when + * creating a mapped schema for validation.
    + * + * @param collectionOptions can be {@literal null}. + * @param targetType must not be {@literal null}. Use {@link Object} type instead. + * @return never {@literal null}. + * @since 2.1 + */ + protected Document convertToDocument(@Nullable CollectionOptions collectionOptions, Class targetType) { + + if (collectionOptions == null) { + return new Document(); + } + + Document doc = new Document(); + collectionOptions.getCapped().ifPresent(val -> doc.put("capped", val)); + collectionOptions.getSize().ifPresent(val -> doc.put("size", val)); + collectionOptions.getMaxDocuments().ifPresent(val -> doc.put("max", val)); + collectionOptions.getCollation().ifPresent(val -> doc.append("collation", val.toDocument())); + + collectionOptions.getValidationOptions().ifPresent(it -> { + + it.getValidationLevel().ifPresent(val -> doc.append("validationLevel", val.getValue())); + it.getValidationAction().ifPresent(val -> doc.append("validationAction", val.getValue())); + it.getValidator().ifPresent(val -> doc.append("validator", getMappedValidator(val, targetType))); + }); + + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { + + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); + + collectionOptions.getChangeStreamOptions().map(it -> new Document("enabled", it.getPreAndPostImages())) + .ifPresent(it -> { + doc.put("changeStreamPreAndPostImages", it); + }); + + return doc; + } + + Document getMappedValidator(Validator validator, Class domainType) { + + Document validationRules = validator.toDocument(); + + if (validationRules.containsKey("$jsonSchema")) { + return schemaMapper.mapSchema(validationRules, domainType); } - return dbo; + + return queryMapper.getMappedObject(validationRules, mappingContext.getPersistentEntity(domainType)); } /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

    - * The query document is specified as a standard DBObject and so is the fields specification. - * + * The first document that matches the query is returned and also removed from the collection in the database.
    + * The query document is specified as a standard Document and so is the fields specification. + * * @param collectionName name of the collection to retrieve the objects from * @param query the query document that specifies the criteria used to find a record * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected T doFindAndRemove(String collectionName, DBObject query, DBObject fields, DBObject sort, - Class entityClass) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + protected T doFindAndRemove(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName); + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort), - new ReadDbObjectCallback(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected T doFindAndModify(String collectionName, DBObject query, DBObject fields, DBObject sort, - Class entityClass, Update update, FindAndModifyOptions options) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + protected T doFindAndModify(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, Class entityClass, UpdateDefinition update, + @Nullable FindAndModifyOptions options) { if (options == null) { options = new FindAndModifyOptions(); @@ -1818,61 +2749,136 @@ protected T doFindAndModify(String collectionName, DBObject query, DBObject MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - increaseVersionForUpdateIfNecessary(entity, update); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); - DBObject mappedQuery = queryMapper.getMappedObject(query, entity); - DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), - collectionName); + LOGGER.debug(String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s in collection: %s", + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } - return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options), - new ReadDbObjectCallback(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** - * Populates the id property of the saved object, if it's not set already. - * - * @param savedObject - * @param id + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param resultType the target domain type. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. */ - protected void populateIdIfNecessary(Object savedObject, Object id) { + @Nullable + protected T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, Class resultType) { - if (id == null) { - return; - } + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); + } + + CollectionPreparerDelegate createDelegate(Query query) { + return CollectionPreparerDelegate.of(query); + } - if (savedObject instanceof BasicDBObject) { - DBObject dbObject = (DBObject) savedObject; - dbObject.put(ID_FIELD, id); - return; + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createDelegate(query); + if (action == null) { + return collectionPreparer; } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); + } - MongoPersistentProperty idProp = getIdPropertyFor(savedObject.getClass()); + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + @Nullable + private T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, EntityProjection projection) { - if (idProp == null) { - return; + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), + serializeToJsonSafely(mappedSort), entityType, serializeToJsonSafely(replacement), collectionName)); } - ConversionService conversionService = mongoConverter.getConversionService(); - MongoPersistentEntity entity = mappingContext.getPersistentEntity(savedObject.getClass()); - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(savedObject); + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, mappedSort, + replacement, collation, options), new ProjectingReadCallback<>(mongoConverter, projection, collectionName), + collectionName); + } + + private UpdateResult doReplace(ReplaceOptions options, Class entityType, String collectionName, + UpdateContext updateContext, CollectionPreparer> collectionPreparer, + Document replacement) { + + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); - if (accessor.getProperty(idProp) != null) { - return; + ReplaceCallback replaceCallback = new ReplaceCallback(collectionPreparer, + updateContext.getMappedQuery(persistentEntity), replacement, updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("replace one using query: %s for class: %s in collection: %s", + serializeToJsonSafely(updateContext.getMappedQuery(persistentEntity)), entityType, collectionName)); } - new ConvertingPropertyAccessor(accessor, conversionService).setProperty(idProp, id); + return execute(collectionName, replaceCallback); + } + + /** + * Populates the id property of the saved object, if it's not set already. + * + * @param savedObject + * @param id + */ + protected T populateIdIfNecessary(T savedObject, Object id) { + + return operations.forEntity(savedObject, mongoConverter.getConversionService()) // + .populateIdIfNecessary(id); } - private DBCollection getAndPrepareCollection(DB db, String collectionName) { + private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { try { - DBCollection collection = db.getCollection(collectionName); - prepareCollection(collection); + MongoCollection collection = db.getCollection(collectionName, Document.class); + collection = prepareCollection(collection); return collection; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); @@ -1883,23 +2889,24 @@ private DBCollection getAndPrepareCollection(DB db, String collectionName) { * Internal method using callbacks to do queries against the datastore that requires reading a single object from a * collection of objects. It will take the following steps *

      - *
    1. Execute the given {@link ConnectionCallback} for a {@link DBObject}.
    2. - *
    3. Apply the given {@link DbObjectCallback} to each of the {@link DBObject}s to obtain the result.
    4. + *
    5. Execute the given {@link CollectionCallback} for a {@link Document}.
    6. + *
    7. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
    8. *
        - * + * * @param - * @param collectionCallback the callback to retrieve the {@link DBObject} with - * @param objectCallback the {@link DbObjectCallback} to transform {@link DBObject}s into the actual domain type + * @param collectionCallback the callback to retrieve the {@link Document} with + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ - private T executeFindOneInternal(CollectionCallback collectionCallback, - DbObjectCallback objectCallback, String collectionName) { + @Nullable + private T executeFindOneInternal(CollectionCallback collectionCallback, + DocumentCallback documentCallback, String collectionName) { try { - T result = objectCallback - .doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName))); - return result; + + Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); + return document != null ? documentCallback.doWith(document) : null; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -1909,207 +2916,119 @@ private T executeFindOneInternal(CollectionCallback collectionCall * Internal method using callback to do queries against the datastore that requires reading a collection of objects. * It will take the following steps *
          - *
        1. Execute the given {@link ConnectionCallback} for a {@link DBCursor}.
        2. - *
        3. Prepare that {@link DBCursor} with the given {@link CursorPreparer} (will be skipped if {@link CursorPreparer} - * is {@literal null}
        4. - *
        5. Iterate over the {@link DBCursor} and applies the given {@link DbObjectCallback} to each of the - * {@link DBObject}s collecting the actual result {@link List}.
        6. + *
        7. Execute the given {@link CollectionCallback} for a {@link FindIterable}.
        8. + *
        9. Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if + * {@link CursorPreparer} is {@literal null}
        10. + *
        11. Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the + * {@link Document}s collecting the actual result {@link List}.
        12. *
            - * + * * @param - * @param collectionCallback the callback to retrieve the {@link DBCursor} with - * @param preparer the {@link CursorPreparer} to potentially modify the {@link DBCursor} before ireating over it - * @param objectCallback the {@link DbObjectCallback} to transform {@link DBObject}s into the actual domain type + * @param collectionCallback the callback to retrieve the {@link FindIterable} with + * @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ - private List executeFindMultiInternal(CollectionCallback collectionCallback, CursorPreparer preparer, - DbObjectCallback objectCallback, String collectionName) { + private List executeFindMultiInternal(CollectionCallback> collectionCallback, + CursorPreparer preparer, DocumentCallback documentCallback, String collectionName) { try { - DBCursor cursor = null; - - try { - - cursor = collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName)); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - if (preparer != null) { - cursor = preparer.prepare(cursor); - } - - List result = new ArrayList(); + int available = cursor.available(); + List result = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { - DBObject object = cursor.next(); - result.add(objectCallback.doWith(object)); + Document object = cursor.next(); + result.add(documentCallback.doWith(object)); } return result; - - } finally { - - if (cursor != null) { - cursor.close(); - } } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - private void executeQueryInternal(CollectionCallback collectionCallback, CursorPreparer preparer, - DocumentCallbackHandler callbackHandler, String collectionName) { - - try { - - DBCursor cursor = null; - - try { - cursor = collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName)); - - if (preparer != null) { - cursor = preparer.prepare(cursor); - } + private void executeQueryInternal(CollectionCallback> collectionCallback, + CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - while (cursor.hasNext()) { - DBObject dbobject = cursor.next(); - callbackHandler.processDocument(dbobject); - } + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - } finally { - if (cursor != null) { - cursor.close(); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); } - } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - private MongoPersistentEntity getPersistentEntity(Class type) { - return type == null ? null : mappingContext.getPersistentEntity(type); + public PersistenceExceptionTranslator getExceptionTranslator() { + return exceptionTranslator; } - private MongoPersistentProperty getIdPropertyFor(Class type) { - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(type); - return persistentEntity == null ? null : persistentEntity.getIdProperty(); + @Nullable + private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { + return type != null ? mappingContext.getPersistentEntity(type) : null; } - private String determineEntityCollectionName(T obj) { - if (null != obj) { - return determineCollectionName(obj.getClass()); - } + private static MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) { - return null; - } + DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); + MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); - String determineCollectionName(Class entityClass) { + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); - } + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(factory); + converter.afterPropertiesSet(); - MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - if (entity == null) { - throw new InvalidDataAccessApiUsageException( - "No Persistent Entity information found for the class " + entityClass.getName()); - } - return entity.getCollection(); + return converter; } - /** - * Handles {@link WriteResult} errors based on the configured {@link WriteResultChecking}. - * - * @param writeResult - * @param query - * @param operation - */ - protected void handleAnyWriteResultErrors(WriteResult writeResult, DBObject query, MongoActionOperation operation) { - - if (writeResultChecking == WriteResultChecking.NONE) { - return; - } - - String error = ReflectiveWriteResultInvoker.getError(writeResult); + @Nullable + private Document getMappedSortObject(@Nullable Query query, Class type) { - if (error == null) { - return; - } - - String message; - - switch (operation) { - - case INSERT: - case SAVE: - message = String.format("Insert/Save for %s failed: %s", query, error); - break; - case INSERT_LIST: - message = String.format("Insert list failed: %s", error); - break; - default: - message = String.format("Execution of %s%s failed: %s", operation, - query == null ? "" : " using query " + query.toString(), error); - } - - if (writeResultChecking == WriteResultChecking.EXCEPTION) { - throw new MongoDataIntegrityViolationException(message, writeResult, operation); - } else { - LOGGER.error(message); - return; + if (query == null) { + return null; } - } - - /** - * Inspects the given {@link CommandResult} for erros and potentially throws an - * {@link InvalidDataAccessApiUsageException} for that error. - * - * @param result must not be {@literal null}. - * @param source must not be {@literal null}. - */ - private void handleCommandError(CommandResult result, DBObject source) { - - try { - result.throwOnError(); - } catch (MongoException ex) { - - String error = result.getErrorMessage(); - error = error == null ? "NO MESSAGE" : error; - throw new InvalidDataAccessApiUsageException( - "Command execution failed: Error [" + error + "], Command = " + source, ex); - } + return getMappedSortObject(query.getSortObject(), type); } - private static final MongoConverter getDefaultMongoConverter(MongoDbFactory factory) { - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); - converter.afterPropertiesSet(); - return converter; + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { + return getMappedSortObject(sortObject, mappingContext.getPersistentEntity(type)); } - private DBObject getMappedSortObject(Query query, Class type) { + @Nullable + private Document getMappedSortObject(Document sortObject, @Nullable MongoPersistentEntity entity) { - if (query == null || query.getSortObject() == null) { + if (ObjectUtils.isEmpty(sortObject)) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return queryMapper.getMappedSort(sortObject, entity); } /** * Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original * exception if the conversation failed. Thus allows safe re-throwing of the return value. - * + * * @param ex the exception to translate * @param exceptionTranslator the {@link PersistenceExceptionTranslator} to be used for translation * @return */ - private static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, + static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, PersistenceExceptionTranslator exceptionTranslator) { RuntimeException resolved = exceptionTranslator.translateExceptionIfPossible(ex); return resolved == null ? ex : resolved; @@ -2118,237 +3037,424 @@ private static RuntimeException potentiallyConvertRuntimeException(RuntimeExcept // Callback implementations /** - * Simple {@link CollectionCallback} that takes a query {@link DBObject} plus an optional fields specification - * {@link DBObject} and executes that against the {@link DBCollection}. - * + * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * * @author Oliver Gierke * @author Thomas Risberg + * @author Christoph Strobl */ - private static class FindOneCallback implements CollectionCallback { + private static class FindOneCallback implements CollectionCallback { - private final DBObject query; - private final DBObject fields; + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Optional fields; + private final CursorPreparer cursorPreparer; - public FindOneCallback(DBObject query, DBObject fields) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, Document fields, + CursorPreparer preparer) { + + this.collectionPreparer = collectionPreparer; this.query = query; - this.fields = fields; + this.fields = Optional.of(fields).filter(it -> !ObjectUtils.isEmpty(fields)); + this.cursorPreparer = preparer; } - public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException { - if (fields == null) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} in db.collection: {}", serializeToJsonSafely(query), - collection.getFullName()); - } - return collection.findOne(query); - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), fields, - collection.getFullName()); - } - return collection.findOne(query, fields); + @Override + public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + FindIterable iterable = cursorPreparer.initiateFind(collection, + col -> collectionPreparer.prepare(col).find(query, Document.class)); + + if (fields.isPresent()) { + iterable = iterable.projection(fields.get()); } + + return iterable.first(); } } /** - * Simple {@link CollectionCallback} that takes a query {@link DBObject} plus an optional fields specification - * {@link DBObject} and executes that against the {@link DBCollection}. - * + * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * * @author Oliver Gierke * @author Thomas Risberg + * @author Christoph Strobl */ - private static class FindCallback implements CollectionCallback { + private static class FindCallback implements CollectionCallback> { - private final DBObject query; - private final DBObject fields; + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final @Nullable com.mongodb.client.model.Collation collation; - public FindCallback(DBObject query) { - this(query, null); - } + public FindCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, @Nullable com.mongodb.client.model.Collation collation) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(fields, "Fields must not be null"); - public FindCallback(DBObject query, DBObject fields) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; + this.collation = collation; } - public DBCursor doInCollection(DBCollection collection) throws MongoException, DataAccessException { + @Override + public FindIterable doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { - if (fields == null || fields.toMap().isEmpty()) { - return collection.find(query); - } else { - return collection.find(query, fields); + FindIterable findIterable = collectionPreparer.prepare(collection).find(query, Document.class) + .projection(fields); + + if (collation != null) { + findIterable = findIterable.collation(collation); } + return findIterable; + } + } + + /** + * Optimized {@link CollectionCallback} that takes an already mapped query and a nullable + * {@link com.mongodb.client.model.Collation} to execute a count query limited to one element. + * + * @author Christoph Strobl + * @since 2.0 + */ + private class ExistsCallback implements CollectionCallback { + + private final CollectionPreparer collectionPreparer; + private final Document mappedQuery; + private final com.mongodb.client.model.Collation collation; + + ExistsCallback(CollectionPreparer collectionPreparer, Document mappedQuery, + com.mongodb.client.model.Collation collation) { + + this.collectionPreparer = collectionPreparer; + this.mappedQuery = mappedQuery; + this.collation = collation; + } + + @Override + public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + return doCount(collectionPreparer, collection.getNamespace().getCollectionName(), mappedQuery, + new CountOptions().limit(1).collation(collation)) > 0; } } /** - * Simple {@link CollectionCallback} that takes a query {@link DBObject} plus an optional fields specification - * {@link DBObject} and executes that against the {@link DBCollection}. - * + * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * * @author Thomas Risberg */ - private static class FindAndRemoveCallback implements CollectionCallback { + private static class FindAndRemoveCallback implements CollectionCallback { - private final DBObject query; - private final DBObject fields; - private final DBObject sort; + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Optional collation; + + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; - public FindAndRemoveCallback(DBObject query, DBObject fields, DBObject sort) { this.query = query; this.fields = fields; this.sort = sort; + this.collation = Optional.ofNullable(collation); } - public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException { - return collection.findAndModify(query, fields, sort, true, null, false, false); + @Override + public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields); + collation.map(Collation::toMongoCollation).ifPresent(opts::collation); + + return collectionPreparer.prepare(collection).findOneAndDelete(query, opts); } } - private static class FindAndModifyCallback implements CollectionCallback { + private static class FindAndModifyCallback implements CollectionCallback { - private final DBObject query; - private final DBObject fields; - private final DBObject sort; - private final DBObject update; + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Object update; + private final List arrayFilters; private final FindAndModifyOptions options; - public FindAndModifyCallback(DBObject query, DBObject fields, DBObject sort, DBObject update, - FindAndModifyOptions options) { + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.arrayFilters = arrayFilters; + this.options = options; + } + + @Override + public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + FindOneAndUpdateOptions opts = new FindOneAndUpdateOptions(); + opts.sort(sort); + if (options.isUpsert()) { + opts.upsert(true); + } + opts.projection(fields); + if (options.isReturnNew()) { + opts.returnDocument(ReturnDocument.AFTER); + } + + options.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation); + + if (!arrayFilters.isEmpty()) { + opts.arrayFilters(arrayFilters); + } + + if (update instanceof Document document) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, document, opts); + } else if (update instanceof List) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, (List) update, opts); + } + + throw new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update)); + } + } + + /** + * {@link CollectionCallback} specific for find and remove operation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + private static class FindAndReplaceCallback implements CollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Document update; + private final @Nullable com.mongodb.client.model.Collation collation; + private final FindAndReplaceOptions options; + + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, @Nullable com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; this.update = update; this.options = options; + this.collation = collation; } - public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException { - return collection.findAndModify(query, fields, sort, options.isRemove(), update, options.isReturnNew(), - options.isUpsert()); + @Override + public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + FindOneAndReplaceOptions opts = new FindOneAndReplaceOptions(); + opts.sort(sort); + opts.collation(collation); + opts.projection(fields); + + if (options.isUpsert()) { + opts.upsert(true); + } + + if (options.isReturnNew()) { + opts.returnDocument(ReturnDocument.AFTER); + } + + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, opts); } } /** - * Simple internal callback to allow operations on a {@link DBObject}. - * + * Simple internal callback to allow operations on a {@link Document}. + * * @author Oliver Gierke * @author Thomas Darimont */ - static interface DbObjectCallback { + protected interface DocumentCallback { - T doWith(DBObject object); + T doWith(Document object); } /** - * Simple {@link DbObjectCallback} that will transform {@link DBObject} into the given target type using the given - * {@link MongoReader}. - * + * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given + * {@link EntityReader}. + * * @author Oliver Gierke * @author Christoph Strobl + * @author Roman Puchkovskiy */ - private class ReadDbObjectCallback implements DbObjectCallback { + private class ReadDocumentCallback implements DocumentCallback { - private final EntityReader reader; + private final EntityReader reader; private final Class type; private final String collectionName; - public ReadDbObjectCallback(EntityReader reader, Class type, String collectionName) { + ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - Assert.notNull(reader); - Assert.notNull(type); this.reader = reader; this.type = type; this.collectionName = collectionName; } - public T doWith(DBObject object) { - if (null != object) { - maybeEmitEvent(new AfterLoadEvent(object, type, collectionName)); - } - T source = reader.read(type, object); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent(object, source, collectionName)); + @Override + public T doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return source; + + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); + + return entity; } } - class UnwrapAndReadDbObjectCallback extends ReadDbObjectCallback { + /** + * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. + * + * @param + * @param + * @since 2.0 + */ + private class ProjectingReadCallback implements DocumentCallback { + + private final MongoConverter mongoConverter; + private final EntityProjection projection; + private final String collectionName; + + ProjectingReadCallback(MongoConverter mongoConverter, EntityProjection projection, String collectionName) { - public UnwrapAndReadDbObjectCallback(EntityReader reader, Class type, - String collectionName) { - super(reader, type, collectionName); + this.mongoConverter = mongoConverter; + this.projection = projection; + this.collectionName = collectionName; } @Override - public T doWith(DBObject object) { - - Object idField = object.get(Fields.UNDERSCORE_ID); + @SuppressWarnings("unchecked") + public T doWith(Document document) { - if (!(idField instanceof DBObject)) { - return super.doWith(object); + if (document == null) { + return null; } - DBObject toMap = new BasicDBObject(); - DBObject nested = (DBObject) idField; - toMap.putAll(nested); + maybeEmitEvent(new AfterLoadEvent<>(document, projection.getMappedType().getType(), collectionName)); - for (String key : object.keySet()) { - if (!Fields.UNDERSCORE_ID.equals(key)) { - toMap.put(key, object.get(key)); - } + Object entity = mongoConverter.project(projection, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", mongoConverter)); } - return super.doWith(toMap); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return (T) maybeCallAfterConvert(entity, document, collectionName); } } - class QueryCursorPreparer implements CursorPreparer { + class QueryCursorPreparer implements SortingQueryCursorPreparer { private final Query query; - private final Class type; + private final Document sortObject; + private final int limit; + private final long skip; + private final @Nullable Class type; - public QueryCursorPreparer(Query query, Class type) { + QueryCursorPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + QueryCursorPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.DBCursor) - */ - public DBCursor prepare(DBCursor cursor) { + @Override + public FindIterable prepare(FindIterable iterable) { - if (query == null) { - return cursor; - } + FindIterable cursorToUse = iterable; - if (query.getSkip() <= 0 && query.getLimit() <= 0 && query.getSortObject() == null - && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues()) { - return cursor; - } + operations.forType(type).getCollation(query) // + .map(Collation::toMongoCollation) // + .ifPresent(cursorToUse::collation); - DBCursor cursorToUse = cursor.copy(); + Meta meta = query.getMeta(); + HintFunction hintFunction = HintFunction.from(query.getHint()); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues() + && query.getCollation().isEmpty()) { + return cursorToUse; + } try { - if (query.getSkip() > 0) { - cursorToUse = cursorToUse.skip(query.getSkip()); + if (skip > 0) { + cursorToUse = cursorToUse.skip((int) skip); } - if (query.getLimit() > 0) { - cursorToUse = cursorToUse.limit(query.getLimit()); + if (limit > 0) { + cursorToUse = cursorToUse.limit(limit); } - if (query.getSortObject() != null) { - DBObject sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject(); - cursorToUse = cursorToUse.sort(sortDbo); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; + cursorToUse = cursorToUse.sort(sort); } - if (StringUtils.hasText(query.getHint())) { - cursorToUse = cursorToUse.hint(query.getHint()); + + if (hintFunction.isPresent()) { + cursorToUse = hintFunction.apply(mongoDbFactory, cursorToUse::hintString, cursorToUse::hint); } - if (query.getMeta().hasValues()) { - for (Entry entry : query.getMeta().values()) { - cursorToUse = cursorToUse.addSpecial(entry.getKey(), entry.getValue()); + + if (meta.hasValues()) { + + if (meta.hasComment()) { + cursorToUse = cursorToUse.comment(meta.getRequiredComment()); + } + + if (meta.hasMaxTime()) { + cursorToUse = cursorToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize()); + } + + if (meta.getAllowDiskUse() != null) { + cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse()); + } + + for (Meta.CursorOption option : meta.getFlags()) { + + switch (option) { + + case NO_TIMEOUT: + cursorToUse = cursorToUse.noCursorTimeout(true); + break; + case PARTIAL: + cursorToUse = cursorToUse.partial(true); + break; + case SECONDARY_READS: + break; + default: + throw new IllegalArgumentException(String.format("%s is no supported flag.", option)); + } } } @@ -2358,63 +3464,91 @@ public DBCursor prepare(DBCursor cursor) { return cursorToUse; } + + @Nullable + @Override + public Document getSortObject() { + return sortObject; + } } /** - * {@link DbObjectCallback} that assumes a {@link GeoResult} to be created, delegates actual content unmarshalling to + * {@link DocumentCallback} that assumes a {@link GeoResult} to be created, delegates actual content unmarshalling to * a delegate and creates a {@link GeoResult} from the result. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ - static class GeoNearResultDbObjectCallback implements DbObjectCallback> { + static class GeoNearResultDocumentCallback implements DocumentCallback> { - private final DbObjectCallback delegate; + private final String distanceField; + private final DocumentCallback delegate; private final Metric metric; /** - * Creates a new {@link GeoNearResultDbObjectCallback} using the given {@link DbObjectCallback} delegate for + * Creates a new {@link GeoNearResultDocumentCallback} using the given {@link DocumentCallback} delegate for * {@link GeoResult} content unmarshalling. - * + * + * @param distanceField the field to read the distance from. * @param delegate must not be {@literal null}. + * @param metric the {@link Metric} to apply to the result distance. */ - public GeoNearResultDbObjectCallback(DbObjectCallback delegate, Metric metric) { - Assert.notNull(delegate); + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { + + Assert.notNull(delegate, "DocumentCallback must not be null"); + + this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } - public GeoResult doWith(DBObject object) { + @Override + public GeoResult doWith(Document object) { - double distance = ((Double) object.get("dis")).doubleValue(); - DBObject content = (DBObject) object.get("obj"); + double distance = Double.NaN; + if (object.containsKey(distanceField)) { + distance = NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } - T doWith = delegate.doWith(content); + T doWith = delegate.doWith(object); - return new GeoResult(doWith, new Distance(distance, metric)); + return new GeoResult<>(doWith, new Distance(distance, metric)); } } /** - * A {@link CloseableIterator} that is backed by a MongoDB {@link Cursor}. - * - * @since 1.7 + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public MongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDbFactory; + } + + /** + * A {@link CloseableIterator} that is backed by a MongoDB {@link MongoCollection}. + * * @author Thomas Darimont + * @since 1.7 */ static class CloseableIterableCursorAdapter implements CloseableIterator { - private volatile Cursor cursor; + private volatile @Nullable MongoCursor cursor; private PersistenceExceptionTranslator exceptionTranslator; - private DbObjectCallback objectReadCallback; + private DocumentCallback objectReadCallback; /** - * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link Cursor}. - * - * @param cursor - * @param exceptionTranslator - * @param objectReadCallback + * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}. */ - public CloseableIterableCursorAdapter(Cursor cursor, PersistenceExceptionTranslator exceptionTranslator, - DbObjectCallback objectReadCallback) { + CloseableIterableCursorAdapter(MongoIterable cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { + + this.cursor = cursor.iterator(); + this.exceptionTranslator = exceptionTranslator; + this.objectReadCallback = objectReadCallback; + } + + CloseableIterableCursorAdapter(MongoCursor cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { this.cursor = cursor; this.exceptionTranslator = exceptionTranslator; @@ -2424,6 +3558,8 @@ public CloseableIterableCursorAdapter(Cursor cursor, PersistenceExceptionTransla @Override public boolean hasNext() { + MongoCursor cursor = this.cursor; + if (cursor == null) { return false; } @@ -2435,6 +3571,7 @@ public boolean hasNext() { } } + @Nullable @Override public T next() { @@ -2443,9 +3580,8 @@ public T next() { } try { - DBObject item = cursor.next(); - T converted = objectReadCallback.doWith(item); - return converted; + Document item = cursor.next(); + return objectReadCallback.doWith(item); } catch (RuntimeException ex) { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } @@ -2454,9 +3590,13 @@ public T next() { @Override public void close() { - Cursor c = cursor; + MongoCursor c = cursor; + try { - c.close(); + + if (c != null) { + c.close(); + } } catch (RuntimeException ex) { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } finally { @@ -2466,4 +3606,78 @@ public void close() { } } } + + /** + * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the + * server through the driver API.
            + * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired + * target method matching the actual arguments plus a {@link ClientSession}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static class SessionBoundMongoTemplate extends MongoTemplate { + + private final MongoTemplate delegate; + private final ClientSession session; + + /** + * @param session must not be {@literal null}. + * @param that must not be {@literal null}. + */ + SessionBoundMongoTemplate(ClientSession session, MongoTemplate that) { + + super(that.getMongoDatabaseFactory().withSession(session), that); + + this.delegate = that; + this.session = session; + } + + @Override + public MongoCollection getCollection(String collectionName) { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getCollection(collectionName); + } + + @Override + public MongoDatabase getDb() { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getDb(); + } + + @Override + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + return false; + } + } + + @FunctionalInterface + interface CountExecution { + long countDocuments(CollectionPreparer collectionPreparer, String collection, Document filter, + CountOptions options); + } + + private static class ReplaceCallback implements CollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document update; + private final com.mongodb.client.model.ReplaceOptions options; + + ReplaceCallback(CollectionPreparer> collectionPreparer, Document query, Document update, + com.mongodb.client.model.ReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.update = update; + this.options = options; + } + + @Override + public UpdateResult doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + return collectionPreparer.prepare(collection).replaceOne(query, update, options); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java new file mode 100644 index 0000000000..583b243aa8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Predicates; + +/** + * Common operations performed on properties of an entity like extracting fields information for projection creation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class PropertyOperations { + + private final MappingContext, MongoPersistentProperty> mappingContext; + + PropertyOperations(MappingContext, MongoPersistentProperty> mappingContext) { + this.mappingContext = mappingContext; + } + + /** + * For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for + * creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a + * {@literal closed interface projection}. + * + * @param projection must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return {@link Document} with fields to be included. + */ + Document computeMappedFieldsForProjection(EntityProjection projection, + Document fields) { + + if (!projection.isClosedProjection()) { + return fields; + } + + Document projectedFields = new Document(); + + if (projection.getMappedType().getType().isInterface()) { + projection.forEach(it -> { + projectedFields.put(it.getPropertyPath().getSegment(), 1); + }); + } else { + + // DTO projections use merged metadata between domain type and result type + PersistentPropertyTranslator translator = PersistentPropertyTranslator.create( + mappingContext.getRequiredPersistentEntity(projection.getDomainType()), + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(projection.getMappedType()); + for (MongoPersistentProperty property : persistentEntity) { + projectedFields.put(translator.translate(property).getFieldName(), 1); + } + } + + return projectedFields; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java new file mode 100644 index 0000000000..28ca85fbd7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -0,0 +1,1045 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.ShardKey; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateOptions; + +/** + * {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed. + * This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options + * for {@literal count}, {@literal remove}, and other methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Florian Lüdiger + * @since 3.0 + */ +class QueryOperations { + + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + private final EntityOperations entityOperations; + private final PropertyOperations propertyOperations; + private final CodecRegistryProvider codecRegistryProvider; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final AggregationUtil aggregationUtil; + private final Map, Document> mappedShardKey = new ConcurrentHashMap<>(1); + + /** + * Create a new instance of {@link QueryOperations}. + * + * @param queryMapper must not be {@literal null}. + * @param updateMapper must not be {@literal null}. + * @param entityOperations must not be {@literal null}. + * @param propertyOperations must not be {@literal null}. + * @param codecRegistryProvider must not be {@literal null}. + */ + QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations, + PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) { + + this.queryMapper = queryMapper; + this.updateMapper = updateMapper; + this.entityOperations = entityOperations; + this.propertyOperations = propertyOperations; + this.codecRegistryProvider = codecRegistryProvider; + this.mappingContext = queryMapper.getMappingContext(); + this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext); + } + + InsertContext createInsertContext(Document source) { + return createInsertContext(MappedDocument.of(source)); + } + + InsertContext createInsertContext(MappedDocument mappedDocument) { + return new InsertContext(mappedDocument); + } + + /** + * Create a new {@link QueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + QueryContext createQueryContext(Query query) { + return new QueryContext(query); + } + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link DistinctQueryContext}. + */ + DistinctQueryContext distinctQueryContext(Query query, String fieldName) { + return new DistinctQueryContext(query, fieldName); + } + + /** + * Create a new {@link CountContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link CountContext}. + */ + CountContext countQueryContext(Query query) { + return new CountContext(query); + } + + /** + * Create a new {@link UpdateContext} instance affecting multiple documents. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, true, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) { + return new UpdateContext(replacement, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(Query query, MappedDocument replacement, boolean upsert) { + return new UpdateContext(query, replacement, upsert); + } + + /** + * Create a new {@link DeleteContext} instance removing all matching documents. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteQueryContext(Query query) { + return new DeleteContext(query, true); + } + + /** + * Create a new {@link DeleteContext} instance only the first matching document. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteSingleContext(Query query) { + return new DeleteContext(query, false); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class inputType) { + return new AggregationDefinition(aggregation, inputType); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, + @Nullable AggregationOperationContext aggregationOperationContext) { + return new AggregationDefinition(aggregation, aggregationOperationContext); + } + + /** + * {@link InsertContext} encapsulates common tasks required to interact with {@link Document} to be inserted. + * + * @since 3.4.3 + */ + class InsertContext { + + private final MappedDocument source; + + private InsertContext(MappedDocument source) { + this.source = source; + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param type must not be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + * @see #prepareId(MongoPersistentEntity) + */ + MappedDocument prepareId(Class type) { + return prepareId(mappingContext.getPersistentEntity(type)); + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param entity can be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + */ + MappedDocument prepareId(@Nullable MongoPersistentEntity entity) { + + if (entity == null || source.hasId()) { + return source; + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty != null + && (idProperty.hasExplicitWriteTarget() || idProperty.isAnnotationPresent(MongoId.class))) { + if (!ClassUtils.isAssignable(ObjectId.class, idProperty.getFieldType())) { + source.updateId(queryMapper.convertId(new ObjectId(), idProperty.getFieldType())); + } + } + return source; + } + } + + /** + * {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document + * representation, mapping field names, as well as determining and applying {@link Collation collations}. + * + * @author Christoph Strobl + */ + class QueryContext { + + private final Query query; + + /** + * Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a + * plain {@link Document}. + * + * @param query can be {@literal null}. + */ + private QueryContext(@Nullable Query query) { + this.query = query != null ? query : new Query(); + } + + /** + * @return never {@literal null}. + */ + Query getQuery() { + return query; + } + + /** + * Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}. + * + * @return + */ + Document getQueryObject() { + return query.getQueryObject(); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param domainType can be {@literal null}. + * @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the + * given{@literal domainType} + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable Class domainType, + Function, MongoPersistentEntity> entityLookup) { + return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType)); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedObject(getQueryObject(), entity); + } + + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + + Document fields = evaluateFields(entity); + + if (entity == null) { + return fields; + } + + Document mappedFields; + if (!fields.isEmpty()) { + mappedFields = queryMapper.getMappedFields(fields, entity); + } else { + mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields); + mappedFields = queryMapper.addMetaAttributes(mappedFields, entity); + } + + if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName()) + && !query.getQueryObject().containsKey("$text")) { + mappedFields.remove(entity.getTextScoreProperty().getFieldName()); + } + + if (mappedFields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + return mappedFields; + } + + private Document evaluateFields(@Nullable MongoPersistentEntity entity) { + + Document fields = query.getFieldsObject(); + + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + Document evaluated = new Document(); + + for (Entry entry : fields.entrySet()) { + + if (entry.getValue() instanceof MongoExpression mongoExpression) { + + AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT + : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper); + + evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx)); + } else { + evaluated.put(entry.getKey(), entry.getValue()); + } + } + + return evaluated; + } + + /** + * Get the already mapped {@link Query#getSortObject() sort} option. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @return never {@literal null}. + */ + Document getMappedSort(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedSort(query.getSortObject(), entity); + } + + /** + * Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @param consumer must not be {@literal null}. + */ + void applyCollation(@Nullable Class domainType, Consumer consumer) { + getCollation(domainType).ifPresent(consumer); + } + + /** + * Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + Optional getCollation(@Nullable Class domainType) { + + return entityOperations.forType(domainType).getCollation(query) // + .map(Collation::toMongoCollation); + } + + /** + * Get the {@link HintFunction} reading the actual hint form the {@link Query}. + * + * @return new instance of {@link HintFunction}. + * @since 4.2 + */ + HintFunction getHintFunction() { + return HintFunction.from(query.getHint()); + } + + /** + * Read and apply the hint from the {@link Query}. + * + * @since 4.2 + */ + void applyHint(Function stringConsumer, Function bsonConsumer) { + getHintFunction().ifPresent(codecRegistryProvider, stringConsumer, bsonConsumer); + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries. + * + * @author Christoph Strobl + */ + class DistinctQueryContext extends QueryContext { + + private final String fieldName; + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query can be {@literal null}. + * @param fieldName must not be {@literal null}. + */ + private DistinctQueryContext(@Nullable Object query, String fieldName) { + + super(query instanceof Document document ? new BasicQuery(document) : (Query) query); + this.fieldName = fieldName; + } + + @Override + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + return getMappedFields(entity); + } + + Document getMappedFields(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedFields(new Document(fieldName, 1), entity); + } + + /** + * Get the mapped field name to project to. + * + * @param entity can be {@literal null}. + * @return never {@literal null}. + */ + String getMappedFieldName(@Nullable MongoPersistentEntity entity) { + return getMappedFields(entity).keySet().iterator().next(); + } + + /** + * Get the MongoDB native representation of the given {@literal type}. + * + * @param type must not be {@literal null}. + * @param + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + Class getDriverCompatibleClass(Class type) { + + return codecRegistryProvider.getCodecFor(type) // + .map(Codec::getEncoderClass) // + .orElse((Class) BsonValue.class); + } + + /** + * Get the most specific read target type based on the user {@literal requestedTargetType} an the property type + * based on meta information extracted from the {@literal domainType}. + * + * @param requestedTargetType must not be {@literal null}. + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + Class getMostSpecificConversionTargetType(Class requestedTargetType, Class domainType) { + + Class conversionTargetType = requestedTargetType; + try { + + Class propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType(); + + // use the more specific type but favor UserType over property one + if (ClassUtils.isAssignable(requestedTargetType, propertyType)) { + conversionTargetType = propertyType; + } + } catch (PropertyReferenceException e) { + // just don't care about it as we default to Object.class anyway. + } + + return conversionTargetType; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries. + * + * @author Christoph Strobl + */ + class CountContext extends QueryContext { + + /** + * Creates a new {@link CountContext} instance. + * + * @param query can be {@literal null}. + */ + CountContext(@Nullable Query query) { + super(query); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + CountOptions getCountOptions(@Nullable Class domainType) { + return getCountOptions(domainType, null); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + CountOptions getCountOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + CountOptions options = new CountOptions(); + Query query = getQuery(); + + applyCollation(domainType, options::collation); + + if (query.getLimit() > 0) { + options.limit(query.getLimit()); + } + + if (query.getSkip() > 0) { + options.skip((int) query.getSkip()); + } + + Meta meta = query.getMeta(); + if (meta.hasValues()) { + + if (meta.hasMaxTime()) { + options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.hasComment()) { + options.comment(meta.getComment()); + } + } + + HintFunction hintFunction = HintFunction.from(query.getHint()); + + if (hintFunction.isPresent()) { + options = hintFunction.apply(codecRegistryProvider, options::hintString, options::hint); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries. + * + * @author Christoph Strobl + */ + class DeleteContext extends QueryContext { + + private final boolean multi; + + /** + * Crate a new {@link DeleteContext} instance. + * + * @param query can be {@literal null}. + * @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one. + */ + DeleteContext(@Nullable Query query, boolean multi) { + + super(query); + this.multi = multi; + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType) { + return getDeleteOptions(domainType, null); + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + DeleteOptions options = new DeleteOptions(); + applyCollation(domainType, options::collation); + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + /** + * @return {@literal true} if all matching documents shall be deleted. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}. + */ + class UpdateContext extends QueryContext { + + private final boolean multi; + private final boolean upsert; + private final @Nullable UpdateDefinition update; + private final @Nullable MappedDocument mappedDocument; + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query must not be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) { + this(update, new BasicQuery(query), multi, upsert); + } + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query can be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) { + + super(query); + + this.multi = multi; + this.upsert = upsert; + this.update = update; + this.mappedDocument = null; + } + + UpdateContext(MappedDocument update, boolean upsert) { + this(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())), update, upsert); + } + + UpdateContext(Query query, MappedDocument update, boolean upsert) { + + super(query); + this.multi = false; + this.upsert = upsert; + this.mappedDocument = update; + this.update = null; + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType) { + return getUpdateOptions(domainType, null); + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param query can be {@literal null} + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Query query) { + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update != null && update.hasArrayFilters()) { + options + .arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList())); + } + + if (query != null && query.isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint); + applyCollation(domainType, options::collation); + + return options; + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType) { + return getReplaceOptions(domainType, null); + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + UpdateOptions updateOptions = getUpdateOptions(domainType); + + ReplaceOptions options = new ReplaceOptions(); + options.collation(updateOptions.getCollation()); + options.upsert(updateOptions.isUpsert()); + applyHint(options::hintString, options::hint); + if (!isMulti() && getQuery().isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + @Override + Document getMappedQuery(@Nullable MongoPersistentEntity domainType) { + return applyIsolation(super.getMappedQuery(domainType)); + } + + /** + * A replacement query that is derived from the already {@link MappedDocument}. + * + * @return + */ + Document getReplacementQuery() { + return applyIsolation(getQueryObject()); + } + + private Document applyIsolation(Document mappedQuery) { + if (multi && update != null && update.isIsolated() && !mappedQuery.containsKey("$isolated")) { + mappedQuery = new Document(mappedQuery); + mappedQuery.put("$isolated", 1); + } + return mappedQuery; + } + + Document applyShardKey(MongoPersistentEntity domainType, Document filter, @Nullable Document existing) { + + Document shardKeySource = existing != null ? existing + : mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType); + + Document filterWithShardKey = new Document(filter); + getMappedShardKeyFields(domainType) + .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key))); + + return filterWithShardKey; + } + + boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity domainType) { + + return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType) + && !filter.keySet().containsAll(getMappedShardKeyFields(domainType)); + } + + /** + * @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities + * {@literal id} property. + * @since 3.0 + */ + private boolean shardedById(MongoPersistentEntity domainType) { + + ShardKey shardKey = domainType.getShardKey(); + if (shardKey.size() != 1) { + return false; + } + + String key = shardKey.getPropertyNames().iterator().next(); + if (FieldName.ID.name().equals(key)) { + return true; + } + + MongoPersistentProperty idProperty = domainType.getIdProperty(); + return idProperty != null && idProperty.getName().equals(key); + } + + Set getMappedShardKeyFields(MongoPersistentEntity entity) { + return getMappedShardKey(entity).keySet(); + } + + Document getMappedShardKey(MongoPersistentEntity entity) { + return mappedShardKey.computeIfAbsent(entity.getType(), + key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity)); + } + + /** + * Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + List getUpdatePipeline(@Nullable Class domainType) { + + Class type = domainType != null ? domainType : Object.class; + + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, + queryMapper); + return aggregationUtil.createPipeline((AggregationUpdate) update, context); + } + + /** + * Get the already mapped update {@link Document}. + * + * @param entity + * @return + */ + Document getMappedUpdate(@Nullable MongoPersistentEntity entity) { + + if (update != null) { + return update instanceof MappedUpdate ? update.getUpdateObject() + : updateMapper.getMappedObject(update.getUpdateObject(), entity); + } + return mappedDocument.getDocument(); + } + + /** + * Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not + * already done in the actual {@link UpdateDefinition} + * + * @param persistentEntity can be {@literal null}. + */ + void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity) { + + if (persistentEntity != null && persistentEntity.hasVersionProperty()) { + + String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); + if (update != null && !update.modifies(versionFieldName)) { + update.inc(versionFieldName); + } + } + } + + /** + * @return {@literal true} if the update holds an aggregation pipeline. + */ + boolean isAggregationUpdate() { + return update instanceof AggregationUpdate; + } + + /** + * @return {@literal true} if all matching documents should be updated. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A value object that encapsulates common tasks required when running {@literal aggregations}. + * + * @since 3.2 + */ + class AggregationDefinition { + + private final Aggregation aggregation; + private final Lazy aggregationOperationContext; + private final Lazy> pipeline; + private final @Nullable Class inputType; + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
            + * Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and + * the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
            + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param aggregationOperationContext can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) { + this.inputType = typeBasedAggregationOperationContext.getType(); + } else { + this.inputType = null; + } + + this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext + : aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
            + * Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired + * {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
            + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param inputType can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable Class inputType) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else { + this.inputType = inputType; + } + + this.aggregationOperationContext = Lazy + .of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Obtain the already mapped pipeline. + * + * @return never {@literal null}. + */ + List getAggregationPipeline() { + return pipeline.get(); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + * @see AggregationPipeline#isOutOrMerge() + */ + boolean isOutOrMerge() { + return aggregation.getPipeline().isOutOrMerge(); + } + + /** + * Obtain the {@link AggregationOperationContext} used for mapping the pipeline. + * + * @return never {@literal null}. + */ + AggregationOperationContext getAggregationOperationContext() { + return aggregationOperationContext.get(); + } + + /** + * @return the input type to map the pipeline against. Can be {@literal null}. + */ + @Nullable + Class getInputType() { + return inputType; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java new file mode 100644 index 0000000000..54129e6b5d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java @@ -0,0 +1,101 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; + +/** + * {@link ReactiveAggregationOperation} allows creation and execution of reactive MongoDB aggregation operations in a + * fluent API style.
            + * The starting {@literal domainType} is used for mapping the {@link Aggregation} provided via {@code by} into the + * MongoDB specific representation, as well as mapping back the resulting {@link org.bson.Document}. An alternative + * input type for mapping the {@link Aggregation} can be provided by using + * {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation}. + * + *
            + *     
            + *         aggregateAndReturn(Jedi.class)
            + *             .by(newAggregation(Human.class, project("These are not the droids you are looking for")))
            + *             .all();
            + *     
            + * 
            + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveAggregationOperation { + + /** + * Start creating an aggregation operation that returns results mapped to the given domain type.
            + * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different + * input type for he aggregation. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ReactiveAggregation}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveAggregation aggregateAndReturn(Class domainType); + + /** + * Collection override (optional). + */ + interface AggregationOperationWithCollection { + + /** + * Explicitly set the name of the collection to perform the query on.
            + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link AggregationOperationWithAggregation}. Never {@literal null}. + * @throws IllegalArgumentException if collection is {@literal null} or empty. + */ + AggregationOperationWithAggregation inCollection(String collection); + } + + /** + * Trigger execution by calling one of the terminating methods. + */ + interface TerminatingAggregationOperation { + + /** + * Apply pipeline operations as specified and stream all matching elements.
            + * + * @return a {@link Flux} streaming all matching elements. Never {@literal null}. + */ + Flux all(); + } + + /** + * Define the aggregation with pipeline stages. + */ + interface AggregationOperationWithAggregation { + + /** + * Set the aggregation to be used. + * + * @param aggregation must not be {@literal null}. + * @return new instance of {@link TerminatingAggregationOperation}. Never {@literal null}. + * @throws IllegalArgumentException if aggregation is {@literal null}. + */ + TerminatingAggregationOperation by(Aggregation aggregation); + } + + interface ReactiveAggregation + extends AggregationOperationWithCollection, AggregationOperationWithAggregation {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java new file mode 100644 index 0000000000..954fd61716 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java @@ -0,0 +1,111 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ReactiveAggregationOperation} operating directly on {@link ReactiveMongoTemplate}. + * + * @author Mark Paluch + * @autor Christoph Strobl + * @since 2.0 + */ +class ReactiveAggregationOperationSupport implements ReactiveAggregationOperation { + + private final ReactiveMongoTemplate template; + + /** + * Create new instance of {@link ReactiveAggregationOperationSupport}. + * + * @param template must not be {@literal null}. + * @throws IllegalArgumentException if template is {@literal null}. + */ + ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) { + + Assert.notNull(template, "Template must not be null"); + + this.template = template; + } + + @Override + public ReactiveAggregation aggregateAndReturn(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveAggregationSupport<>(template, domainType, null, null); + } + + static class ReactiveAggregationSupport + implements AggregationOperationWithAggregation, ReactiveAggregation, TerminatingAggregationOperation { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Aggregation aggregation; + private final String collection; + + ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, Aggregation aggregation, + String collection) { + + this.template = template; + this.domainType = domainType; + this.aggregation = aggregation; + this.collection = collection; + } + + @Override + public AggregationOperationWithAggregation inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + } + + @Override + public TerminatingAggregationOperation by(Aggregation aggregation) { + + Assert.notNull(aggregation, "Aggregation must not be null"); + + return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + } + + @Override + public Flux all() { + return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + } + + private String getCollectionName(Aggregation aggregation) { + + if (StringUtils.hasText(collection)) { + return collection; + } + + if (aggregation instanceof TypedAggregation typedAggregation) { + + if (typedAggregation.getInputType() != null) { + return template.getCollectionName(typedAggregation.getInputType()); + } + } + + return template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java new file mode 100644 index 0000000000..7f88b63f28 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java @@ -0,0 +1,144 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling + * {@link #execute()}. + * + *
            + * ReactiveMongoOperations ops = …;
            + *
            + * ops.bulkOps(BulkMode.UNORDERED, Person.class)
            + * 				.insert(newPerson)
            + * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
            + * 				.execute();
            + * 
            + *

            + * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface ReactiveBulkOperations { + + /** + * Add a single insert to the bulk operation. + * + * @param documents the document to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(Object documents); + + /** + * Add a list of inserts to the bulk operation. + * + * @param documents List of documents to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(List documents); + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateOne(Query query, UpdateDefinition update); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update); + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations upsert(Query query, UpdateDefinition update); + + /** + * Add a single remove operation to the bulk operation. + * + * @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(Query remove); + + /** + * Add a list of remove operations to the bulk operation. + * + * @param removes the remove operations to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(List removes); + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. The {@link Query} may define a + * {@link Query#with(Sort) sort order} to influence which document to replace when potentially matching + * multiple candidates. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + default ReactiveBulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + + /** + * Execute all bulk operations using the default write concern. + * + * @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc. + */ + Mono execute(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java new file mode 100644 index 0000000000..4f936e0ffa --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -0,0 +1,200 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB + * Change Stream operations in a fluent API style.
            + * The starting {@literal domainType} is used for mapping a potentially given + * {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the + * originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}. + * However, it is possible to define an different {@literal returnType} via {@code as}.
            + * The collection to operate on is optional in which case call collection with the actual database are watched, use + * {@literal watchCollection} to define a fixed collection. + * + *

            + *     
            + *         changeStream(Jedi.class)
            + *             .watchCollection("star-wars")
            + *             .filter(where("operationType").is("insert"))
            + *             .resumeAt(Instant.now())
            + *             .listen();
            + *     
            + * 
            + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface ReactiveChangeStreamOperation { + + /** + * Start creating a change stream operation for the given {@literal domainType} watching all collections within the + * database.
            + * Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or + * {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}. + * + * @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements. + * @return new instance of {@link ReactiveChangeStream}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveChangeStream changeStream(Class domainType); + + /** + * Compose change stream execution by calling one of the terminating methods. + */ + interface TerminatingChangeStream { + + /** + * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} + * is {@link org.reactivestreams.Subscription#cancel() canceled}. + *
            + * However, the stream may become dead, or invalid, if all watched collections, databases are dropped. + */ + Flux> listen(); + } + + /** + * Collection override (optional). + */ + interface ChangeStreamWithCollection { + + /** + * Explicitly set the name of the collection to watch.
            + * Skip this step to watch all collections within the database. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code collection} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(String collection); + + /** + * Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.
            + * Skip this step to watch all collections within the database. + * + * @param entityClass must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code entityClass} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(Class entityClass); + } + + /** + * Provide a filter for limiting results (optional). + */ + interface ChangeStreamWithFilterAndProjection extends ResumingChangeStream, TerminatingChangeStream { + + /** + * Use an {@link Aggregation} to filter matching events. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(Aggregation by); + + /** + * Use a {@link CriteriaDefinition critera} to filter matching events via an + * {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by); + + /** + * Define the target type fields should be mapped to. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + ChangeStreamWithFilterAndProjection as(Class resultType); + } + + /** + * Resume a change stream. (optional). + */ + interface ResumingChangeStream extends TerminatingChangeStream { + + /** + * Resume the change stream at a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAt(Instant) + * @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp) + * @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}. + */ + TerminatingChangeStream resumeAt(Object token); + + /** + * Resume the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue) + * @see ChangeStreamOptionsBuilder#resumeToken(BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream resumeAfter(Object token); + + /** + * Start the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream startAfter(Object token); + } + + /** + * Provide some options. + */ + interface ChangeStreamWithOptions { + + /** + * Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously + * defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the + * builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}. + * + * @param optionsConsumer never {@literal null}. + * @return new instance of {@link ReactiveChangeStream}. + */ + ReactiveChangeStream withOptions(Consumer optionsConsumer); + } + + /** + * {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way. + */ + interface ReactiveChangeStream extends ChangeStreamWithOptions, ChangeStreamWithCollection, + TerminatingChangeStream, ResumingChangeStream, ChangeStreamWithFilterAndProjection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java new file mode 100644 index 0000000000..afeb6c5e0e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java @@ -0,0 +1,187 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.MatchOperation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * @author Christoph Strobl + * @since 2.2 + */ +class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation { + + private final ReactiveMongoTemplate template; + + /** + * @param template must not be {@literal null}. + */ + ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveChangeStream changeStream(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null); + } + + static class ReactiveChangeStreamSupport + implements ReactiveChangeStream, ChangeStreamWithFilterAndProjection { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final @Nullable ChangeStreamOptions options; + + private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, @Nullable ChangeStreamOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.options = options; + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(Class entityClass) { + + Assert.notNull(entityClass, "Collection type not be null"); + + return watchCollection(template.getCollectionName(entityClass)); + } + + @Override + public TerminatingChangeStream resumeAt(Object token) { + + return withOptions(builder -> { + + if (token instanceof Instant instant) { + builder.resumeAt(instant); + } else if (token instanceof BsonTimestamp bsonTimestamp) { + builder.resumeAt(bsonTimestamp); + } + }); + } + + @Override + public TerminatingChangeStream resumeAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.resumeAfter((BsonValue) token)); + } + + @Override + public TerminatingChangeStream startAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.startAfter((BsonValue) token)); + } + + @Override + public ReactiveChangeStreamSupport withOptions(Consumer optionsConsumer) { + + ChangeStreamOptionsBuilder builder = initOptionsBuilder(); + optionsConsumer.accept(builder); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build()); + } + + @Override + public ChangeStreamWithFilterAndProjection as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(Aggregation filter) { + return withOptions(builder -> builder.filter(filter)); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by) { + + MatchOperation $match = Aggregation.match(by); + Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match) + : Aggregation.newAggregation($match); + return filter(aggregation); + } + + @Override + public Flux> listen() { + return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType); + } + + private ChangeStreamOptionsBuilder initOptionsBuilder() { + + ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder(); + if (options == null) { + return builder; + } + + options.getFilter().ifPresent(it -> { + if (it instanceof Aggregation aggregation) { + builder.filter(aggregation); + } else { + builder.filter(((List) it).toArray(new Document[0])); + } + }); + options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup); + options.getFullDocumentBeforeChangeLookup().ifPresent(builder::fullDocumentBeforeChangeLookup); + options.getCollation().ifPresent(builder::collation); + + if (options.isResumeAfter()) { + options.getResumeToken().ifPresent(builder::resumeAfter); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter); + } else if (options.isStartAfter()) { + options.getResumeToken().ifPresent(builder::startAfter); + } else { + options.getResumeTimestamp().ifPresent(builder::resumeAt); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAt); + } + + return builder; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java new file mode 100644 index 0000000000..dda6bf1b96 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -0,0 +1,33 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.reactivestreams.Publisher; +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Mark Paluch + * @param + * @since 2.0 + */ +public interface ReactiveCollectionCallback { + + Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java similarity index 54% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoSynchronization.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java index 02e48989f4..470fd05ef7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoSynchronization.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,18 @@ */ package org.springframework.data.mongodb.core; -import org.springframework.transaction.support.ResourceHolder; -import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.reactivestreams.Publisher; /** - * @author Oliver Gierke + * @author Mark Paluch + * @param + * @since 2.0 */ -class MongoSynchronization extends ResourceHolderSynchronization { +public interface ReactiveDatabaseCallback { - public MongoSynchronization(ResourceHolder resourceHolder, Object resourceKey) { - super(resourceHolder, resourceKey); - } + Publisher doInDB(MongoDatabase db) throws MongoException, DataAccessException; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java new file mode 100644 index 0000000000..cba827ffed --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -0,0 +1,334 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * {@link ReactiveFindOperation} allows creation and execution of reactive MongoDB find operations in a fluent API + * style.
            + * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the + * MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping back the + * result from the {@link org.bson.Document}. However, it is possible to define an different {@literal returnType} via + * {@code as} to mapping the result.
            + * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
            + *     
            + *         query(Human.class)
            + *             .inCollection("star-wars")
            + *             .as(Jedi.class)
            + *             .matching(where("firstname").is("luke"))
            + *             .all();
            + *     
            + * 
            + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Juergen Zimmermann + * @since 2.0 + */ +public interface ReactiveFindOperation { + + /** + * Start creating a find operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ReactiveFind}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveFind query(Class domainType); + + /** + * Compose find execution by calling one of the terminating methods. + */ + interface TerminatingFind { + + /** + * Get exactly zero or one result. + * + * @return {@link Mono#empty()} if no match found. Never {@literal null}. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + */ + Mono one(); + + /** + * Get the first or no result. + * + * @return {@link Mono#empty()} if no match found. Never {@literal null}. + */ + Mono first(); + + /** + * Get all matching elements. + * + * @return never {@literal null}. + */ + Flux all(); + + /** + * Return a scroll of elements either starting or resuming at {@link ScrollPosition}. + *

            + * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a scroll of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Mono> scroll(ScrollPosition scrollPosition); + + /** + * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will + * not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link org.reactivestreams.Subscription#cancel() canceled}.
            + * However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the + * document at the "end" of the collection and then the application deletes that document.
            + * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the + * streams will linger and exhaust resources.
            + * NOTE: Requires a capped collection. + * + * @return the {@link Flux} emitting converted objects. + * @since 2.1 + */ + Flux tail(); + + /** + * Get the number of matching elements.
            + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead. + * + * @return {@link Mono} emitting total number of matching elements. Never {@literal null}. + */ + Mono count(); + + /** + * Check for the presence of matching elements. + * + * @return {@link Mono} emitting {@literal true} if at least one matching element exists. Never {@literal null}. + */ + Mono exists(); + } + + /** + * Compose geonear execution by calling one of the terminating methods. + */ + interface TerminatingFindNear { + + /** + * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. + * + * @return never {@literal null}. + */ + Flux> all(); + } + + /** + * Provide a {@link Query} override (optional). + */ + interface FindWithQuery extends TerminatingFind { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingFind matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + + /** + * Set the filter query for the geoNear execution. + * + * @param nearQuery must not be {@literal null}. + * @return new instance of {@link TerminatingFindNear}. + * @throws IllegalArgumentException if nearQuery is {@literal null}. + */ + TerminatingFindNear near(NearQuery nearQuery); + } + + /** + * Collection override (optional). + */ + interface FindWithCollection extends FindWithQuery { + + /** + * Explicitly set the name of the collection to perform the query on.
            + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link FindWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + FindWithProjection inCollection(String collection); + } + + /** + * Result type override (optional). + */ + interface FindWithProjection extends FindWithQuery, FindDistinct { + + /** + * Define the target type fields should be mapped to.
            + * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + FindWithQuery as(Class resultType); + } + + /** + * Distinct Find support. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindDistinct { + + /** + * Finds the distinct values for a specified {@literal field} across a single + * {@link com.mongodb.reactivestreams.client.MongoCollection} or view. + * + * @param field name of the field. Must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if field is {@literal null}. + */ + TerminatingDistinct distinct(String field); + } + + /** + * Result type override. Optional. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface DistinctWithProjection { + + /** + * Define the target type the result should be mapped to.
            + * Skip this step if you are anyway fine with the default conversion. + *
            + *
            {@link Object} (the default)
            + *
            Result is mapped according to the {@link org.bson.BsonType} converting eg. {@link org.bson.BsonString} into + * plain {@link String}, {@link org.bson.BsonInt64} to {@link Long}, etc. always picking the most concrete type with + * respect to the domain types property.
            + * Any {@link org.bson.BsonType#DOCUMENT} is run through the {@link org.springframework.data.convert.EntityReader} + * to obtain the domain type.
            + * Using {@link Object} also works for non strictly typed fields. Eg. a mixture different types like fields using + * {@link String} in one {@link org.bson.Document} while {@link Long} in another.
            + *
            Any Simple type like {@link String}, {@link Long}, ...
            + *
            The result is mapped directly by the MongoDB Java driver and the {@link org.bson.codecs.CodeCodec Codecs} in + * place. This works only for results where all documents considered for the operation use the very same type for + * the field.
            + *
            Any Domain type
            + *
            Domain types can only be mapped if the if the result of the actual {@code distinct()} operation returns + * {@link org.bson.BsonType#DOCUMENT}.
            + *
            {@link org.bson.BsonValue}
            + *
            Using {@link org.bson.BsonValue} allows retrieval of the raw driver specific format, which returns eg. + * {@link org.bson.BsonString}.
            + *
            + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + TerminatingDistinct as(Class resultType); + } + + /** + * Result restrictions. Optional. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface DistinctWithQuery extends DistinctWithProjection { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Terminating distinct find operations. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingDistinct extends DistinctWithQuery { + + /** + * Get all matching distinct field values. + * + * @return empty {@link Flux} if not match found. Never {@literal null}. + */ + Flux all(); + } + + /** + * {@link ReactiveFind} provides methods for constructing lookup operations in a fluent way. + */ + interface ReactiveFind extends FindWithCollection, FindWithProjection, FindDistinct {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java new file mode 100644 index 0000000000..d1aec8af36 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java @@ -0,0 +1,241 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.Window; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ReactiveFindOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class ReactiveFindOperationSupport implements ReactiveFindOperation { + + private static final Query ALL_QUERY = new Query(); + + private final ReactiveMongoTemplate template; + + ReactiveFindOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveFind query(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + } + + /** + * @param + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ + static class ReactiveFindSupport + implements ReactiveFind, FindWithCollection, FindWithProjection, FindWithQuery { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final String collection; + private final Query query; + + ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, String collection, + Query query) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + } + + @Override + public FindWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public FindWithQuery as(Class returnType) { + + Assert.notNull(returnType, "ReturnType must not be null"); + + return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public TerminatingFind matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + } + + @Override + public Mono first() { + + FindPublisherPreparer preparer = getCursorPreparer(query); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(1)); + + return result.next(); + } + + @Override + public Mono one() { + + FindPublisherPreparer preparer = getCursorPreparer(query); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(2)); + + return result.collectList().flatMap(it -> { + + if (it.isEmpty()) { + return Mono.empty(); + } + + if (it.size() > 1) { + return Mono.error( + new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1)); + } + + return Mono.just(it.get(0)); + }); + } + + @Override + public Flux all() { + return doFind(null); + } + + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + } + + @Override + public Flux tail() { + return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType)); + } + + @Override + public TerminatingFindNear near(NearQuery nearQuery) { + return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + } + + @Override + public Mono count() { + return template.count(query, domainType, getCollectionName()); + } + + @Override + public Mono exists() { + return template.exists(query, domainType, getCollectionName()); + } + + @Override + public TerminatingDistinct distinct(String field) { + + Assert.notNull(field, "Field must not be null"); + + return new DistinctOperationSupport<>(this, field); + } + + private Flux doFind(@Nullable FindPublisherPreparer preparer) { + + Document queryObject = query.getQueryObject(); + Document fieldsObject = query.getFieldsObject(); + + return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject, + fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query)); + } + + @SuppressWarnings("unchecked") + private Flux doFindDistinct(String field) { + + return template.findDistinct(query, field, getCollectionName(), domainType, + returnType == domainType ? (Class) Object.class : returnType); + } + + private FindPublisherPreparer getCursorPreparer(Query query) { + return template.new QueryFindPublisherPreparer(query, domainType); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + + private String asString() { + return SerializationUtils.serializeToJsonSafely(query); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class DistinctOperationSupport implements TerminatingDistinct { + + private final String field; + private final ReactiveFindSupport delegate; + + public DistinctOperationSupport(ReactiveFindSupport delegate, String field) { + + this.delegate = delegate; + this.field = field; + } + + @Override + public TerminatingDistinct as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.as(resultType), field); + } + + @Override + @SuppressWarnings("unchecked") + public TerminatingDistinct matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); + } + + @Override + public Flux all() { + return delegate.doFindDistinct(field); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java new file mode 100644 index 0000000000..30d61771df --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java @@ -0,0 +1,26 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Stripped down interface providing access to a fluent API that specifies a basic set of reactive MongoDB operations. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation, + ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java new file mode 100644 index 0000000000..ff3b690639 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java @@ -0,0 +1,94 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; + +/** + * {@link ReactiveInsertOperation} allows creation and execution of reactive MongoDB insert and bulk insert operations + * in a fluent API style.
            + * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
            + *     
            + *         insert(Jedi.class)
            + *             .inCollection("star-wars")
            + *             .one(luke);
            + *     
            + * 
            + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveInsertOperation { + + /** + * Start creating an insert operation for given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ReactiveInsert}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveInsert insert(Class domainType); + + /** + * Compose insert execution by calling one of the terminating methods. + */ + interface TerminatingInsert { + + /** + * Insert exactly one object. + * + * @param object must not be {@literal null}. + * @return {@link Mono} emitting the inserted {@code object} when operation has completed. Never {@literal null}. + * @throws IllegalArgumentException if object is {@literal null}. + */ + Mono one(T object); + + /** + * Insert a collection of objects. + * + * @param objects must not be {@literal null}. + * @return {@literal Flux} emitting the inserted {@code objects} ony by one. Never {@literal null}. + * @throws IllegalArgumentException if objects is {@literal null}. + */ + Flux all(Collection objects); + } + + /** + * Collection override (optional). + */ + interface InsertWithCollection { + + /** + * Explicitly set the name of the collection.
            + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link TerminatingInsert}. Never {@literal null}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + TerminatingInsert inCollection(String collection); + } + + interface ReactiveInsert extends TerminatingInsert, InsertWithCollection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java new file mode 100644 index 0000000000..06d3c6eae7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java @@ -0,0 +1,90 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ReactiveInsertOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class ReactiveInsertOperationSupport implements ReactiveInsertOperation { + + private final ReactiveMongoTemplate template; + + ReactiveInsertOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveInsert insert(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveInsertSupport<>(template, domainType, null); + } + + static class ReactiveInsertSupport implements ReactiveInsert { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final String collection; + + ReactiveInsertSupport(ReactiveMongoTemplate template, Class domainType, String collection) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + } + + @Override + public Mono one(T object) { + + Assert.notNull(object, "Object must not be null"); + + return template.insert(object, getCollectionName()); + } + + @Override + public Flux all(Collection objects) { + + Assert.notNull(objects, "Objects must not be null"); + + return template.insert(objects, getCollectionName()); + } + + @Override + public ReactiveInsert inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ReactiveInsertSupport<>(template, domainType, collection); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java new file mode 100644 index 0000000000..798b1ca7dd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java @@ -0,0 +1,212 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +/** + * {@link ReactiveMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API + * style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching} + * into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping + * back the results from the {@link org.bson.Document}. However, it is possible to define an different + * {@literal returnType} via {@code as} to mapping the result.
            + * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
            + *     
            + *         mapReduce(Human.class)
            + *             .map("function() { emit(this.id, this.firstname) }")
            + *             .reduce("function(id, name) { return sum(id, name); }")
            + *             .inCollection("star-wars")
            + *             .as(Jedi.class)
            + *             .matching(query(where("lastname").is("skywalker")))
            + *             .all();
            + *     
            + * 
            + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface ReactiveMapReduceOperation { + + /** + * Start creating a mapReduce operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableFind}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + MapReduceWithMapFunction mapReduce(Class domainType); + + /** + * Trigger mapReduce execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingMapReduce { + + /** + * Get the {@link Flux} emitting mapReduce results. + * + * @return a {@link Flux} emitting the already mapped operation results. + */ + Flux all(); + } + + /** + * Provide the Javascript {@code function()} used to map matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithMapFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param mapFunction must not be {@literal null} nor empty. + * @return new instance of {@link MapReduceWithReduceFunction}. + * @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty. + */ + MapReduceWithReduceFunction map(String mapFunction); + + } + + /** + * Provide the Javascript {@code function()} used to reduce matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithReduceFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param reduceFunction must not be {@literal null} nor empty. + * @return new instance of {@link ReactiveMapReduce}. + * @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty. + */ + ReactiveMapReduce reduce(String reduceFunction); + + } + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithCollection extends MapReduceWithQuery { + + /** + * Explicitly set the name of the collection to perform the mapReduce operation on.
            + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link MapReduceWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + MapReduceWithProjection inCollection(String collection); + } + + /** + * Input document filter query (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithQuery extends TerminatingMapReduce { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingMapReduce matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingMapReduce matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithProjection extends MapReduceWithQuery { + + /** + * Define the target type fields should be mapped to.
            + * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + MapReduceWithQuery as(Class resultType); + } + + /** + * Additional mapReduce options (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithOptions { + + /** + * Set additional options to apply to the mapReduce operation. + * + * @param options must not be {@literal null}. + * @return new instance of {@link ReactiveMapReduce}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + ReactiveMapReduce with(MapReduceOptions options); + } + + /** + * {@link ReactiveMapReduce} provides methods for constructing reactive mapReduce operations in a fluent way. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface ReactiveMapReduce extends MapReduceWithMapFunction, MapReduceWithReduceFunction, + MapReduceWithCollection, MapReduceWithProjection, MapReduceWithOptions { + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java new file mode 100644 index 0000000000..4f0d395950 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java @@ -0,0 +1,178 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ReactiveMapReduceOperation}. + * + * @author Christoph Strobl + * @since 2.1 + */ +class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation { + + private static final Query ALL_QUERY = new Query(); + + private final ReactiveMongoTemplate template; + + ReactiveMapReduceOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class) + */ + @Override + public ReactiveMapReduceSupport mapReduce(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class ReactiveMapReduceSupport + implements ReactiveMapReduce, MapReduceWithOptions, MapReduceWithCollection, MapReduceWithProjection, + MapReduceWithQuery, MapReduceWithReduceFunction, MapReduceWithMapFunction { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + private final @Nullable String mapFunction; + private final @Nullable String reduceFunction; + private final @Nullable MapReduceOptions options; + + ReactiveMapReduceSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction, + @Nullable MapReduceOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + this.mapFunction = mapFunction; + this.reduceFunction = reduceFunction; + this.options = options; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all() + */ + @Override + public Flux all() { + + return template.mapReduce(query, domainType, getCollectionName(), returnType, mapFunction, reduceFunction, + options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String) + */ + @Override + public MapReduceWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query) + */ + @Override + public TerminatingMapReduce matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithProjection#as(java.lang.Class) + */ + @Override + public MapReduceWithQuery as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) + */ + @Override + public ReactiveMapReduce with(MapReduceOptions options) { + + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String) + */ + @Override + public MapReduceWithReduceFunction map(String mapFunction) { + + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String) + */ + @Override + public ReactiveMapReduce reduce(String reduceFunction) { + + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java new file mode 100644 index 0000000000..89d1cd78ac --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java @@ -0,0 +1,130 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Convenient factory for configuring a reactive streams {@link MongoClient}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean + implements PersistenceExceptionTranslator { + + private @Nullable String connectionString; + private @Nullable String host; + private @Nullable Integer port; + private @Nullable MongoClientSettings mongoClientSettings; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; + + /** + * Configures the host to connect to. + * + * @param host + */ + public void setHost(@Nullable String host) { + this.host = host; + } + + /** + * Configures the port to connect to. + * + * @param port + */ + public void setPort(int port) { + this.port = port; + } + + /** + * Configures the connection string. + * + * @param connectionString + */ + public void setConnectionString(@Nullable String connectionString) { + this.connectionString = connectionString; + } + + /** + * Configures the mongo client settings. + * + * @param mongoClientSettings + */ + public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientSettings) { + this.mongoClientSettings = mongoClientSettings; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to use. + * + * @param exceptionTranslator + */ + public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return exceptionTranslator.translateExceptionIfPossible(ex); + } + + @Override + public Class getObjectType() { + return MongoClient.class; + } + + @Override + protected MongoClient createInstance() throws Exception { + + if (mongoClientSettings != null) { + return MongoClients.create(mongoClientSettings); + } + + if (StringUtils.hasText(connectionString)) { + return MongoClients.create(connectionString); + } + + if (StringUtils.hasText(host)) { + + if (port != null) { + return MongoClients.create(String.format("mongodb://%s:%d", host, port)); + } + + return MongoClients.create(String.format("mongodb://%s", host)); + } + + throw new IllegalStateException( + "Cannot create MongoClients; One of the following is required: mongoClientSettings, connectionString or host/port"); + } + + @Override + protected void destroyInstance(@Nullable MongoClient instance) throws Exception { + instance.close(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java new file mode 100644 index 0000000000..8697ce4dcd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java @@ -0,0 +1,78 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; +import reactor.util.context.Context; + +import java.util.function.Function; + +import org.reactivestreams.Publisher; + +import org.springframework.util.Assert; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * {@link ReactiveMongoContext} utilizes and enriches the Reactor {@link Context} with information potentially required + * for e.g. {@link ClientSession} handling and transactions. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see Mono#deferContextual(Function) + * @see Context + */ +public class ReactiveMongoContext { + + private static final Class SESSION_KEY = ClientSession.class; + + /** + * Gets the {@code Mono} from Reactor {@link reactor.util.context.Context}. The resulting {@link Mono} + * emits the {@link ClientSession} if a session is associated with the current {@link reactor.util.context.Context + * subscriber context}. If the context does not contain a session, the resulting {@link Mono} terminates empty (i.e. + * without emitting a value). + * + * @return the {@link Mono} emitting the client session if present; otherwise the {@link Mono} terminates empty. + */ + public static Mono getSession() { + + return Mono.deferContextual(ctx -> { + + if (ctx.hasKey(SESSION_KEY)) { + return ctx.> get(SESSION_KEY); + } + + return Mono.empty(); + }); + } + + /** + * Sets the {@link ClientSession} into the Reactor {@link reactor.util.context.Context}. + * + * @param context must not be {@literal null}. + * @param session must not be {@literal null}. + * @return a new {@link Context}. + * @see Context#put(Object, Object) + */ + public static Context setSession(Context context, Publisher session) { + + Assert.notNull(context, "Context must not be null"); + Assert.notNull(session, "Session publisher must not be null"); + + return context.put(SESSION_KEY, Mono.from(session)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java new file mode 100644 index 0000000000..90f2d2345d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -0,0 +1,1974 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.bson.Document; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscription; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Interface that specifies a basic set of MongoDB operations executed in a reactive way. + *

            + * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability + * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
            + * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.0 + * @see Flux + * @see Mono + * @see Project Reactor + */ +public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { + + /** + * Returns the reactive operations that can be performed on indexes + * + * @param collectionName must not be {@literal null}. + * @return index operations on the named collection + */ + ReactiveIndexOperations indexOps(String collectionName); + + /** + * Returns the reactive operations that can be performed on indexes + * + * @param entityClass must not be {@literal null}. + * @return index operations on the named collection associated with the given entity class + */ + ReactiveIndexOperations indexOps(Class entityClass); + + /** + * Execute a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be + * converted into Spring's DAO exception hierarchy. + * + * @param jsonCommand a MongoDB command expressed as a JSON string. + * @return a result object returned by the action + */ + Mono executeCommand(String jsonCommand); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO + * exception hierarchy. + * + * @param command a MongoDB command. + * @return a result object returned by the action + */ + Mono executeCommand(Document command); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data + * access exception hierarchy. + * + * @param command a MongoDB command, must not be {@literal null}. + * @param readPreference read preferences to use, can be {@literal null}. + * @return a result object returned by the action. + */ + Mono executeCommand(Document command, @Nullable ReadPreference readPreference); + + /** + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
            + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not + * be {@literal null}. + * @param return type. + * @return a result object returned by the action + */ + Flux execute(ReactiveDatabaseCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
            + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @param action callback object that specifies the MongoDB action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action or {@literal null}. + */ + Flux execute(Class entityClass, ReactiveCollectionCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
            + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be + * passed into. Must not be {@literal null} or empty. + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action or {@literal null}. + */ + Flux execute(String collectionName, ReactiveCollectionCallback action); + + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
            + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionProvider must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + default ReactiveSessionScoped withSession(Supplier sessionProvider) { + + Assert.notNull(sessionProvider, "SessionProvider must not be null"); + + return withSession(Mono.fromSupplier(sessionProvider)); + } + + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} + * with given {@literal sessionOptions} to each and every command issued against MongoDB.
            + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionOptions must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions); + + /** + * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the + * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. + *
            + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionProvider must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + ReactiveSessionScoped withSession(Publisher sessionProvider); + + /** + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
            + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. + * + * @return {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. + * @since 2.1 + */ + ReactiveMongoOperations withSession(ClientSession session); + + /** + * Create an uncapped collection with a name based on the provided entity class. + * + * @param entityClass class that determines the collection to create. + * @return the created collection. + */ + Mono> createCollection(Class entityClass); + + /** + * Create a collection with a name based on the provided entity class using the options. + * + * @param entityClass class that determines the collection to create. Must not be {@literal null}. + * @param collectionOptions options to use when creating the collection. + * @return the created collection. + */ + Mono> createCollection(Class entityClass, + @Nullable CollectionOptions collectionOptions); + + /** + * Create an uncapped collection with the provided name. + * + * @param collectionName name of the collection. + * @return the created collection. + */ + Mono> createCollection(String collectionName); + + /** + * Create a collection with the provided name and options. + * + * @param collectionName name of the collection. Must not be {@literal null} nor empty. + * @param collectionOptions options to use when creating the collection. + * @return the created collection. + */ + Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * A set of collection names. + * + * @return Flux of collection names. + */ + Flux getCollectionNames(); + + /** + * Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is + * created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
            + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection. + * @return an existing collection or one created on first server interaction. + */ + Mono> getCollection(String collectionName); + + /** + * Check to see if a collection with a name indicated by the entity class exists.
            + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the name of the collection. Must not be {@literal null}. + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(Class entityClass); + + /** + * Check to see if a collection with a given name exists.
            + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection. Must not be {@literal null}. + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(String collectionName); + + /** + * Drop the collection with the name indicated by the entity class.
            + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. + */ + Mono dropCollection(Class entityClass); + + /** + * Drop the collection with the given name.
            + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection to drop/delete. + */ + Mono dropCollection(String collectionName); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given collection.
            + * NOTE: Any additional support for field mapping, etc. is not available for {@literal update} or + * {@literal remove} operations in bulk mode due to the lack of domain type information. Use + * {@link #bulkOps(BulkMode, Class, String)} to get full type specific support. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityClass the name of the entity class, must not be {@literal null}. + * @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); + + /** + * Query for a {@link Flux} of objects of type T from the collection used by the entity class.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned {@link Flux}. + * @return the converted collection. + */ + Flux findAll(Class entityClass); + + /** + * Query for a {@link Flux} of objects of type T from the specified collection.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned {@link Flux}. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted collection. + */ + Flux findAll(Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the + * specified type.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Mono}. + * @return the converted object. + */ + Mono findOne(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Mono}. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted object. + */ + Mono findOne(Query query, Class entityClass, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element.
            + * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of + * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. + * + * @param query the {@link Query} class that specifies the criteria used to find a document. + * @param collectionName name of the collection to check for objects. + * @return {@literal true} if the query yields a result. + */ + Mono exists(Query query, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a document. + * @param entityClass the parametrized type. + * @return {@literal true} if the query yields a result. + */ + Mono exists(Query query, Class entityClass); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a document. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName name of the collection to check for objects. + * @return {@literal true} if the query yields a result. + */ + Mono exists(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. + *
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityClass the parametrized type of the returned {@link Flux}. Must not be {@literal null}. + * @return the {@link Flux} of converted objects. + */ + Flux find(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityClass the parametrized type of the returned {@link Flux}. + * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. + * @return the {@link Flux} of converted objects. + */ + Flux find(Query query, Class entityClass, String collectionName); + + /** + * Query for a scroll of objects of type T from the specified collection.
            + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
            + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

            + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
            + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
            + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

            + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType, String collectionName); + + /** + * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be + * derived from the given target class as well. + * + * @param id the id of the document to return. Must not be {@literal null}. + * @param entityClass the type the document shall be converted into. Must not be {@literal null}. + * @return the document with the given id mapped onto the given target class. + */ + Mono findById(Object id, Class entityClass); + + /** + * Returns the document with the given id from the given collection mapped onto the given target class. + * + * @param id the id of the document to return. + * @param entityClass the type to convert the document to. + * @param collectionName the collection to query for the document. + * @return the converted object. + */ + Mono findById(Object id, Class entityClass, String collectionName); + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link Flux}. + * + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param entityClass the domain type used for determining the actual {@link MongoCollection}. Must not be + * {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default Flux findDistinct(String field, Class entityClass, Class resultClass) { + return findDistinct(new Query(), field, entityClass, resultClass); + } + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link Flux}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param entityClass the domain type used for determining the actual {@link MongoCollection} and mapping the + * {@link Query} to the domain type fields. Must not be {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + Flux findDistinct(Query query, String field, Class entityClass, Class resultClass); + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link Flux}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param collectionName the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}. + * @param entityClass the domain type used for mapping the {@link Query} to the domain type fields. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + Flux findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass); + + /** + * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and + * returns the results in a {@link Flux}. + * + * @param query filter {@link Query} to restrict search. Must not be {@literal null}. + * @param field the name of the field to inspect for distinct values. Must not be {@literal null}. + * @param collection the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}. + * @param resultClass the result type. Must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default Flux findDistinct(Query query, String field, String collection, Class resultClass) { + return findDistinct(query, field, collection, Object.class, resultClass); + } + + /** + * Execute an aggregation operation. + *

            + * The raw results will be mapped to the given entity class. + *

            + * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with + * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause + * {@link IllegalArgumentException}. + * + * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations. Must not be + * {@literal null}. + * @param collectionName The name of the input collection to use for the aggregation. Must not be {@literal null}. + * @param outputType The parametrized type of the returned {@link Flux}. Must not be {@literal null}. + * @return The results of the aggregation operation. + * @throws IllegalArgumentException if {@code aggregation}, {@code collectionName} or {@code outputType} is + * {@literal null}. + */ + Flux aggregate(TypedAggregation aggregation, String collectionName, Class outputType); + + /** + * Execute an aggregation operation.
            + * The raw results will be mapped to the given entity class and are returned as stream. The name of the + * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
            + * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with + * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause + * {@link IllegalArgumentException}. + * + * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations. Must not be + * {@literal null}. + * @param outputType The parametrized type of the returned {@link Flux}. Must not be {@literal null}. + * @return The results of the aggregation operation. + * @throws IllegalArgumentException if {@code aggregation} or {@code outputType} is {@literal null}. + */ + Flux aggregate(TypedAggregation aggregation, Class outputType); + + /** + * Execute an aggregation operation.
            + * The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the + * {@code inputType}.
            + * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with + * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause + * {@link IllegalArgumentException}. + * + * @param aggregation The {@link Aggregation} specification holding the aggregation operations. Must not be + * {@literal null}. + * @param inputType the inputType where the aggregation operation will read from. Must not be {@literal null}. + * @param outputType The parametrized type of the returned {@link Flux}. Must not be {@literal null}. + * @return The results of the aggregation operation. + * @throws IllegalArgumentException if {@code aggregation}, {@code inputType} or {@code outputType} is + * {@literal null}. + */ + Flux aggregate(Aggregation aggregation, Class inputType, Class outputType); + + /** + * Execute an aggregation operation.
            + * The raw results will be mapped to the given entity class.
            + * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with + * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause + * {@link IllegalArgumentException}. + * + * @param aggregation The {@link Aggregation} specification holding the aggregation operations. Must not be + * {@literal null}. + * @param collectionName the collection where the aggregation operation will read from. Must not be {@literal null} or + * empty. + * @param outputType The parametrized type of the returned {@link Flux}. Must not be {@literal null}. + * @return The results of the aggregation operation. + * @throws IllegalArgumentException if {@code aggregation}, {@code collectionName} or {@code outputType} is + * {@literal null}. + */ + Flux aggregate(Aggregation aggregation, String collectionName, Class outputType); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider + * entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the + * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a + * particular number of results. + *

            + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

            + * + *
            +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
            +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
            +	 * Flux<Document> results = aggregate(geoNear, Document.class);
            +	 * 
            + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. + */ + @Deprecated + Flux> geoNear(NearQuery near, Class entityClass); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB + * limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect + * a particular number of results. + *

            + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

            + * + *
            +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
            +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
            +	 * Flux<Document> results = aggregate(geoNear, Document.class);
            +	 * 
            + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @param collectionName the collection to trigger the query against. If no collection name is given the entity class + * will be inspected. + * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. + */ + @Deprecated + Flux> geoNear(NearQuery near, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} + * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityClass the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} + * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, + String collectionName); + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
            + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
            + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, String collectionName) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { + return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, + String collectionName) { + + return findAndReplace(query, replacement, options, entityType, collectionName, entityType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + Class resultType) { + + return findAndReplace(query, replacement, options, entityType, + getCollectionName(ClassUtils.getUserClass(entityType)), resultType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
            + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @param resultType resultType the parametrized type projection return type. Must not be {@literal null}, use the + * domain type of {@code Object.class} instead. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType); + + /** + * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the + * specified type. The first document that matches the query is returned and also removed from the collection in the + * database.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Mono}. + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. The first document that matches the query is returned and also removed from the collection in the database. + *
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Mono}. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted object. + */ + Mono findAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) + */ + Mono count(Query query, Class entityClass); + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @see #estimatedCount(String) + * @see #exactCount(Query, String) + */ + Mono count(Query query, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #estimatedCount(String) + * @see #exactCount(Query, Class, String) + */ + Mono count(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
            + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param entityClass must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default Mono estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
            + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @since 3.1 + */ + Mono estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default Mono exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default Mono exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
            + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
            + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + Mono exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
            + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
            + * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

            + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + */ + Mono insert(T objectToSave); + + /** + * Insert the object into the specified collection.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

            + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + */ + Mono insert(T objectToSave, String collectionName); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the batch of objects to save. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + */ + Flux insert(Collection batchToSave, Class entityClass); + + /** + * Insert a batch of objects into the specified collection in a single batch write to the database. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the list of objects to save. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted objects. + */ + Flux insert(Collection batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param objectsToSave the list of objects to save. Must not be {@literal null}. + * @return the saved objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. + */ + Flux insertAll(Collection objectsToSave); + + /** + * Insert the object into the collection for the entity type of the object to save.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
            + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
            + * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

            + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the inserted objects. + */ + Mono insert(Mono objectToSave); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the type. + */ + Flux insertAll(Mono> batchToSave, Class entityClass); + + /** + * Insert objects into the specified collection in a single batch write to the database. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted objects. + */ + Flux insertAll(Mono> batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + *

            + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

            + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + * + * @param objectsToSave the publisher which provides objects to save. Must not be {@literal null}. + * @return the inserted objects. + */ + Flux insertAll(Mono> objectsToSave); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details. + *

            + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

            + * The {@code objectToSave} must not be collection-like. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. + */ + Mono save(T objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

            + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. + */ + Mono save(T objectToSave, String collectionName); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

            + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + * + * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the saved object. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. + */ + Mono save(Mono objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'.
            + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

            + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + * + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. + * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the saved object. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. + */ + Mono save(Mono objectToSave, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

            + * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono upsert(Query query, UpdateDefinition update, Class entityClass); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document.
            + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono upsert(Query query, UpdateDefinition update, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); + + /** + * Updates the first object that is found in the collection of the entity class that matches the query document with + * the provided update document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class that determines the collection to use. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document.
            + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono updateFirst(Query query, UpdateDefinition update, String collectionName); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate + */ + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass); + + /** + * Updates all objects that are found in the specified collection that matches the query document criteria with the + * provided updated document.
            + * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono updateMulti(Query query, UpdateDefinition update, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + *

            + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be + * {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. + * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. + * @param collectionName name of the collection to update the object in. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); + + /** + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. + * + * @param object must not be {@literal null}. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + */ + Mono remove(Object object); + + /** + * Removes the given object from the given collection. + * + * @param object must not be {@literal null}. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + */ + Mono remove(Object object, String collectionName); + + /** + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. + * + * @param objectToRemove must not be {@literal null}. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + */ + Mono remove(Mono objectToRemove); + + /** + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. + * + * @param objectToRemove must not be {@literal null}. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + */ + Mono remove(Mono objectToRemove, String collectionName); + + /** + * Remove all documents that match the provided query document criteria from the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query the query document that specifies the criteria used to remove a document. + * @param entityClass class that determines the collection to use. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + */ + Mono remove(Query query, Class entityClass); + + /** + * Remove all documents that match the provided query document criteria from the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query the query document that specifies the criteria used to remove a document. + * @param entityClass class of the pojo to be operated on. Can be {@literal null}. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + */ + Mono remove(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Remove all documents from the specified collection that match the provided query document criteria. There is no + * conversion/mapping done for any criteria using the id field.
            + * NOTE: Any additional support for field mapping is not available due to the lack of domain type + * information. Use {@link #remove(Query, Class, String)} to get full type specific support. + * + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link DeleteResult} which lets you access the results of the previous delete. + */ + Mono remove(Query query, String collectionName); + + /** + * Returns and removes all documents form the specified collection that match the provided query.
            + * NOTE: Any additional support for field mapping is not available due to the lack of domain type + * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link Flux} converted objects deleted by this operation. + */ + Flux findAllAndRemove(Query query, String collectionName); + + /** + * Returns and removes all documents matching the given query form the collection used to store the entityClass. + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param entityClass class of the pojo to be operated on. + * @return the {@link Flux} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + */ + Flux findAllAndRemove(Query query, Class entityClass); + + /** + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. + * + * @param query the query document that specifies the criteria used to find and remove documents. + * @param entityClass class of the pojo to be operated on. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. + * @return the {@link Flux} converted objects deleted by this operation. + */ + Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
            + * The collection name is derived from the {@literal replacement} type.
            + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Flux}. + * @return the {@link Flux} of converted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + */ + Flux tail(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
            + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
            + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. + * @param entityClass the parametrized type of the returned {@link Flux}. + * @param collectionName name of the collection to retrieve the objects from. + * @return the {@link Flux} of converted objects. + */ + Flux tail(Query query, Class entityClass, String collectionName); + + /** + * Subscribe to a MongoDB Change Stream for all events in + * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to + * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
            + * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
            + * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} + * for resuming change streams. + * + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. + * @param + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. + * @since 2.1 + * @see ReactiveMongoDatabaseFactory#getMongoDatabase() + * @see ChangeStreamOptions#getFilter() + */ + default Flux> changeStream(ChangeStreamOptions options, Class targetType) { + return changeStream(null, options, targetType); + } + + /** + * Subscribe to a MongoDB Change Stream for all events in + * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter + * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
            + * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
            + * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} + * for resuming change streams. + * + * @param collectionName the collection to watch. Can be {@literal null} to watch all collections. + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. + * @param + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. + * @since 2.1 + * @see ChangeStreamOptions#getFilter() + */ + default Flux> changeStream(@Nullable String collectionName, ChangeStreamOptions options, + Class targetType) { + + return changeStream(null, collectionName, options, targetType); + } + + /** + * Subscribe to a MongoDB Change Stream via the reactive + * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed + * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
            + * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
            + * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} + * for resuming change streams. + * + * @param database the database to watch. Can be {@literal null}, uses configured default if so. + * @param collectionName the collection to watch. Can be {@literal null}, watches all collections if so. + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. + * @param + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. + * @since 2.1 + * @see ChangeStreamOptions#getFilter() + */ + Flux> changeStream(@Nullable String database, @Nullable String collectionName, + ChangeStreamOptions options, Class targetType); + + /** + * Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other + * args. + * + * @param filterQuery the selection criteria for the documents going input to the map function. Must not be + * {@literal null}. + * @param domainType source type used to determine the input collection name and map the filter {@link Query} against. + * Must not be {@literal null}. + * @param resultType the mapping target of the operations result documents. Must not be {@literal null}. + * @param mapFunction the JavaScript map function. Must not be {@literal null}. + * @param reduceFunction the JavaScript reduce function. Must not be {@literal null}. + * @param options additional options like output collection. Must not be {@literal null}. + * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, + String reduceFunction, MapReduceOptions options); + + /** + * Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other + * args. + * + * @param filterQuery the selection criteria for the documents going input to the map function. Must not be + * {@literal null}. + * @param domainType source type used to map the filter {@link Query} against. Must not be {@literal null}. + * @param inputCollectionName the input collection. + * @param resultType the mapping target of the operations result documents. Must not be {@literal null}. + * @param mapFunction the JavaScript map function. Must not be {@literal null}. + * @param reduceFunction the JavaScript reduce function. Must not be {@literal null}. + * @param options additional options like output collection. Must not be {@literal null}. + * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, + String mapFunction, String reduceFunction, MapReduceOptions options); + + /** + * Returns the underlying {@link MongoConverter}. + * + * @return never {@literal null}. + */ + MongoConverter getConverter(); + + /** + * The collection name used for the specified class by this template. + * + * @param entityClass must not be {@literal null}. + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. + * @since 2.1 + */ + String getCollectionName(Class entityClass); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java new file mode 100644 index 0000000000..b74ec6aa1c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -0,0 +1,3459 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.SerializationUtils.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; +import reactor.util.function.Tuples; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.context.MappingContextEvent; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils; +import org.springframework.data.mongodb.SessionSynchronization; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; +import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; +import org.springframework.data.mongodb.core.index.ReactiveMongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.event.*; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Optionals; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.ResourceUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.CursorType; +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateViewOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.EstimatedDocumentCountOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.InsertOneResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps + * to avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link ReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. + *

            + * Note: The {@link ReactiveMongoDatabaseFactory} should always be configured as a bean in the application context, in + * the first case given to the service directly, in the second case to the prepared template. + *

            {@link ReadPreference} and {@link com.mongodb.ReadConcern}

            + *

            + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

            + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Roman Puchkovskiy + * @author Mathieu Ouellet + * @author Yadhukrishna S Pai + * @author Florian Lüdiger + * @since 2.0 + */ +public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { + + public static final DbRefResolver NO_OP_REF_RESOLVER = NoOpDbRefResolver.INSTANCE; + + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoTemplate.class); + private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; + + private final MongoConverter mongoConverter; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final ReactiveMongoDatabaseFactory mongoDatabaseFactory; + private final PersistenceExceptionTranslator exceptionTranslator; + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + private final ApplicationListener> indexCreatorListener; + private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; + + private @Nullable WriteConcern writeConcern; + private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; + private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; + private @Nullable ReadPreference readPreference; + private @Nullable ApplicationEventPublisher eventPublisher; + private @Nullable ReactiveEntityCallbacks entityCallbacks; + private @Nullable ReactiveMongoPersistentEntityIndexCreator indexCreator; + + private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + + private CountExecution countExecution = this::doExactCount; + + /** + * Constructor used for a basic template configuration. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), (MongoConverter) null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) { + this(mongoDatabaseFactory, (MongoConverter) null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter can be {@literal null}. + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, + @Nullable MongoConverter mongoConverter) { + this(mongoDatabaseFactory, mongoConverter, ReactiveMongoTemplate::handleSubscriptionException); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter can be {@literal null}. + * @param subscriptionExceptionHandler exception handler called by {@link Flux#doOnError(Consumer)} on reactive type + * materialization via {@link Publisher#subscribe(Subscriber)}. This callback is used during non-blocking + * subscription of e.g. index creation {@link Publisher}s. Must not be {@literal null}. + * @since 2.1 + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, + @Nullable MongoConverter mongoConverter, Consumer subscriptionExceptionHandler) { + + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null"); + + this.mongoDatabaseFactory = mongoDatabaseFactory; + this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); + this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; + this.queryMapper = new QueryMapper(this.mongoConverter); + this.updateMapper = new UpdateMapper(this.mongoConverter); + this.indexCreatorListener = new IndexCreatorEventListener(subscriptionExceptionHandler); + + // We always have a mapping context in the converter, whether it's a simple one or not + this.mappingContext = this.mongoConverter.getMappingContext(); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDatabaseFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); + + // We create indexes based on mapping events + if (this.mappingContext instanceof MongoMappingContext mongoMappingContext) { + + if (mongoMappingContext.isAutoIndexCreation()) { + this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps); + this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener); + + mongoMappingContext.setApplicationEventPublisher(this.eventPublisher); + this.mappingContext.getPersistentEntities() + .forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler)); + } + } + } + + private ReactiveMongoTemplate(ReactiveMongoDatabaseFactory dbFactory, ReactiveMongoTemplate that) { + + this.mongoDatabaseFactory = dbFactory; + this.exceptionTranslator = that.exceptionTranslator; + this.mongoConverter = that.mongoConverter; + this.queryMapper = that.queryMapper; + this.updateMapper = that.updateMapper; + this.indexCreator = that.indexCreator; + this.indexCreatorListener = that.indexCreatorListener; + this.mappingContext = that.mappingContext; + this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.sessionSynchronization = that.sessionSynchronization; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; + } + + private void onCheckForIndexes(MongoPersistentEntity entity, Consumer subscriptionExceptionHandler) { + + if (indexCreator != null) { + indexCreator.checkForIndexes(entity).subscribe(v -> {}, subscriptionExceptionHandler); + } + } + + private static void handleSubscriptionException(Throwable t) { + LOGGER.error("Unexpected exception during asynchronous execution", t); + } + + /** + * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the + * default of {@link ReactiveMongoTemplate#DEFAULT_WRITE_RESULT_CHECKING}. + * + * @param resultChecking + */ + public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) { + this.writeResultChecking = resultChecking == null ? DEFAULT_WRITE_RESULT_CHECKING : resultChecking; + } + + /** + * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} + * configured on the {@link MongoDatabaseFactory} will apply. + * + * @param writeConcern can be {@literal null}. + */ + public void setWriteConcern(@Nullable WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Configures the {@link WriteConcernResolver} to be used with the template. + * + * @param writeConcernResolver can be {@literal null}. + */ + public void setWriteConcernResolver(@Nullable WriteConcernResolver writeConcernResolver) { + this.writeConcernResolver = writeConcernResolver; + } + + /** + * Used by {@link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * are performed. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent + */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + prepareIndexCreator(applicationContext); + + eventPublisher = applicationContext; + eventDelegate.setPublisher(eventPublisher); + + if (entityCallbacks == null) { + setEntityCallbacks(ReactiveEntityCallbacks.create(applicationContext)); + } + + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); + } + } + + /** + * Set the {@link ReactiveEntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the + * {@link ReactiveBeforeSaveCallback}.
            + * Overrides potentially existing {@link ReactiveEntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(ReactiveEntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiFunction> estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionName, filter, options) -> { + + return estimationFilter.apply(filter, options).flatMap(canEstimate -> { + if (!canEstimate) { + return doExactCount(collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionName, estimatedDocumentCountOptions); + }); + }; + } else { + this.countExecution = this::doExactCount; + } + } + + /** + * Inspects the given {@link ApplicationContext} for {@link ReactiveMongoPersistentEntityIndexCreator} and those in + * turn if they were registered for the current {@link MappingContext}. If no creator for the current + * {@link MappingContext} can be found we manually add the internally created one as {@link ApplicationListener} to + * make sure indexes get created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} + * instance. + * + * @param context must not be {@literal null}. + */ + private void prepareIndexCreator(ApplicationContext context) { + + String[] indexCreators = context.getBeanNamesForType(ReactiveMongoPersistentEntityIndexCreator.class); + + for (String creator : indexCreators) { + ReactiveMongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, + ReactiveMongoPersistentEntityIndexCreator.class); + if (creatorBean.isIndexCreatorFor(mappingContext)) { + return; + } + } + + if (context instanceof ConfigurableApplicationContext configurableApplicationContext) { + configurableApplicationContext.addApplicationListener(indexCreatorListener); + } + } + + /** + * Returns the default {@link MongoConverter}. + * + * @return + */ + @Override + public MongoConverter getConverter() { + return this.mongoConverter; + } + + @Override + public ReactiveIndexOperations indexOps(String collectionName) { + return new DefaultReactiveIndexOperations(this, collectionName, this.queryMapper); + } + + @Override + public ReactiveIndexOperations indexOps(Class entityClass) { + return new DefaultReactiveIndexOperations(this, getCollectionName(entityClass), this.queryMapper, entityClass); + } + + @Override + public String getCollectionName(Class entityClass) { + return operations.determineCollectionName(entityClass); + } + + @Override + public Mono executeCommand(String jsonCommand) { + + Assert.notNull(jsonCommand, "Command must not be empty"); + + return executeCommand(Document.parse(jsonCommand)); + } + + @Override + public Mono executeCommand(Document command) { + return executeCommand(command, null); + } + + @Override + public Mono executeCommand(Document command, @Nullable ReadPreference readPreference) { + + Assert.notNull(command, "Command must not be null"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference, Document.class) + : db.runCommand(command, Document.class)).next(); + } + + @Override + public Flux execute(Class entityClass, ReactiveCollectionCallback action) { + return createFlux(getCollectionName(entityClass), action); + } + + @Override + public Flux execute(ReactiveDatabaseCallback action) { + return createFlux(action); + } + + @Override + public Flux execute(String collectionName, ReactiveCollectionCallback callback) { + + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); + + return createFlux(collectionName, callback); + } + + @Override + public ReactiveSessionScoped withSession(Publisher sessionProvider) { + + Mono cachedSession = Mono.from(sessionProvider).cache(); + + return new ReactiveSessionScoped() { + + @Override + public Flux execute(ReactiveSessionCallback action, Consumer doFinally) { + + return cachedSession.flatMapMany(session -> { + + return ReactiveMongoTemplate.this.withSession(action, session) // + .doFinally(signalType -> { + doFinally.accept(session); + }); + }); + } + }; + } + + /** + * Define if {@link ReactiveMongoTemplate} should participate in transactions. Default is set to + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION}.
            + * NOTE: MongoDB transactions require at least MongoDB 4.0. + * + * @since 2.2 + */ + public void setSessionSynchronization(SessionSynchronization sessionSynchronization) { + this.sessionSynchronization = sessionSynchronization; + } + + private Flux withSession(ReactiveSessionCallback action, ClientSession session) { + + ReactiveSessionBoundMongoTemplate operations = new ReactiveSessionBoundMongoTemplate(session, + ReactiveMongoTemplate.this); + + return Flux.from(action.doInSession(operations)) // + .contextWrite(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session))); + } + + @Override + public ReactiveMongoOperations withSession(ClientSession session) { + return new ReactiveSessionBoundMongoTemplate(session, ReactiveMongoTemplate.this); + } + + @Override + public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { + return withSession(mongoDatabaseFactory.getSession(sessionOptions)); + } + + /** + * Create a reusable Flux for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Flux createFlux(ReactiveDatabaseCallback callback) { + + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); + + return Mono.defer(this::doGetDatabase).flatMapMany(database -> callback.doInDB(prepareDatabase(database))) + .onErrorMap(translateException()); + } + + /** + * Create a reusable Mono for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Mono createMono(ReactiveDatabaseCallback callback) { + + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); + + return Mono.defer(this::doGetDatabase).flatMap(database -> Mono.from(callback.doInDB(prepareDatabase(database)))) + .onErrorMap(translateException()); + } + + /** + * Create a reusable {@link Flux} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @return a reusable {@link Flux} wrapping the {@link ReactiveCollectionCallback}. + */ + public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); + + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); + + return collectionPublisher.flatMapMany(callback::doInCollection).onErrorMap(translateException()); + } + + /** + * Create a reusable {@link Mono} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @param + * @return a reusable {@link Mono} wrapping the {@link ReactiveCollectionCallback}. + */ + public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); + + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); + + return collectionPublisher.flatMap(collection -> Mono.from(callback.doInCollection(collection))) + .onErrorMap(translateException()); + } + + @Override + public Mono> createCollection(Class entityClass) { + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); + } + + @Override + public Mono> createCollection(Class entityClass, + @Nullable CollectionOptions collectionOptions) { + + Assert.notNull(entityClass, "EntityClass must not be null"); + + CollectionOptions options = collectionOptions != null ? collectionOptions : CollectionOptions.empty(); + options = Optionals + .firstNonEmpty(() -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> operations.forType(entityClass).getCollation()) // + .map(options::collation).orElse(options); + + return doCreateCollection(getCollectionName(entityClass), convertToCreateCollectionOptions(options, entityClass)); + } + + @Override + public Mono> createCollection(String collectionName) { + return doCreateCollection(collectionName, new CreateCollectionOptions()); + } + + @Override + public Mono> createCollection(String collectionName, + @Nullable CollectionOptions collectionOptions) { + return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); + } + + @Override + public Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); + } + + @Override + public Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private Mono> createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected Mono> doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + return Flux.from(db.createView(name, source, pipeline, viewOptions)) + .then(Mono.fromSupplier(() -> db.getCollection(name))); + }).next(); + } + + @Override + public Mono> getCollection(String collectionName) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + return createMono(db -> Mono.just(db.getCollection(collectionName))); + } + + @Override + public Mono collectionExists(Class entityClass) { + return collectionExists(getCollectionName(entityClass)); + } + + @Override + public Mono collectionExists(String collectionName) { + return createMono(db -> Flux.from(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()) // + .filter(s -> s.equals(collectionName)) // + .map(s -> true) // + .single(false)); + } + + @Override + public Mono dropCollection(Class entityClass) { + return dropCollection(getCollectionName(entityClass)); + } + + @Override + public Mono dropCollection(String collectionName) { + + return createMono(collectionName, MongoCollection::drop).doOnSuccess(success -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Dropped collection [" + collectionName + "]"); + } + }).then(); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass) { + return bulkOps(mode, entityClass, getCollectionName(entityClass)); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { + + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + DefaultReactiveBulkOperations operations = new DefaultReactiveBulkOperations(this, collectionName, + new ReactiveBulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, + updateMapper, eventPublisher, entityCallbacks)); + + operations.setDefaultWriteConcern(writeConcern); + + return operations; + } + + @Override + public Flux getCollectionNames() { + return createFlux(db -> MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()); + } + + public Mono getMongoDatabase() { + return mongoDatabaseFactory.getMongoDatabase(); + } + + protected Mono doGetDatabase() { + return ReactiveMongoDatabaseUtils.getDatabase(mongoDatabaseFactory, sessionSynchronization); + } + + @Override + public Mono findOne(Query query, Class entityClass) { + return findOne(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono findOne(Query query, Class entityClass, String collectionName) { + + if (ObjectUtils.isEmpty(query.getSortObject())) { + return doFindOne(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); + } + + query.limit(1); + return find(query, entityClass, collectionName).next(); + } + + @Override + public Mono exists(Query query, Class entityClass) { + return exists(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono exists(Query query, String collectionName) { + return exists(query, null, collectionName); + } + + @Override + public Mono exists(Query query, @Nullable Class entityClass, String collectionName) { + + if (query == null) { + throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); + } + + return createFlux(collectionName, collection -> { + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + QueryContext queryContext = queryOperations.createQueryContext(query); + Document filter = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); + + FindPublisher findPublisher = collectionPreparer.prepare(collection).find(filter, Document.class) + .projection(new Document(FieldName.ID.name(), 1)); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("exists: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + queryContext.applyCollation(entityClass, findPublisher::collation); + + return findPublisher.limit(1); + }).hasElements(); + } + + @Override + public Flux find(Query query, Class entityClass) { + return find(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Flux find(@Nullable Query query, Class entityClass, String collectionName) { + + if (query == null) { + return findAll(entityClass, collectionName); + } + + return doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); + } + + @Override + public Mono> scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); + } + + @Override + public Mono> scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); + } + + Mono> doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); + + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), + keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, + new QueryFindPublisherPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback) + .collectList(); + + return result.map(it -> ScrollUtils.createWindow(query, it, sourceClass, operations)); + } + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), sourceClass, + new QueryFindPublisherPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback) + .collectList(); + + return result.map( + it -> ScrollUtils.createWindow(it, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip()))); + } + + @Override + public Mono findById(Object id, Class entityClass) { + return findById(id, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono findById(Object id, Class entityClass, String collectionName) { + + String idKey = operations.getIdPropertyName(entityClass); + + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), null, entityClass, + (Collation) null); + } + + @Override + public Flux findDistinct(Query query, String field, Class entityClass, Class resultClass) { + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); + } + + @Override + @SuppressWarnings("unchecked") + public Flux findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); + + MongoPersistentEntity entity = getPersistentEntity(entityClass); + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); + + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + + Flux result = execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); + } + + FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass); + + DistinctPublisher publisher = collectionPreparer.prepare(collection).distinct(mappedFieldName, mappedQuery, + mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, publisher::collation); + return publisher; + }); + + if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { + + Class targetType = distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass); + MongoConverter converter = getConverter(); + + result = result.map(it -> converter.mapValueToTargetType(it, targetType, NO_OP_REF_RESOLVER)); + } + + return (Flux) result; + } + + @Override + public Flux aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType); + } + + @Override + public Flux aggregate(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); + } + + @Override + public Flux aggregate(Aggregation aggregation, Class inputType, Class outputType) { + return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType); + } + + @Override + public Flux aggregate(Aggregation aggregation, String collectionName, Class outputType) { + return doAggregate(aggregation, collectionName, null, outputType); + } + + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); + + AggregationOptions options = aggregation.getOptions(); + Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming"); + + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Streaming aggregation: %s in collection %s", + serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName)); + } + + ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); + } + + private Flux aggregateAndMap(MongoCollection collection, List pipeline, + boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + @Nullable Class inputType) { + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options); + AggregatePublisher cursor = collectionPreparer.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } + + if (options.getCursorBatchSize() != null) { + cursor = cursor.batchSize(options.getCursorBatchSize()); + } + + options.getComment().ifPresent(cursor::comment); + + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + cursor = hintFunction.apply(mongoDatabaseFactory, cursor::hintString, cursor::hint); + } + + Optionals.firstNonEmpty(options::getCollation, () -> operations.forType(inputType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (options.isSkipResults()) { + return (isOutOrMerge ? Flux.from(cursor.toCollection()) : Flux.from(cursor.first())).thenMany(Mono.empty()); + } + + return Flux.from(cursor).flatMapSequential(readCallback::doWith); + } + + @Override + public Flux> geoNear(NearQuery near, Class entityClass) { + return geoNear(near, entityClass, getCollectionName(entityClass)); + } + + @Override + public Flux> geoNear(NearQuery near, Class entityClass, String collectionName) { + return geoNear(near, entityClass, collectionName, entityClass); + } + + @SuppressWarnings("unchecked") + protected Flux> geoNear(NearQuery near, Class entityClass, String collectionName, + Class returnType) { + + if (near == null) { + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); + } + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); + } + + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass); + String distanceField = operations.nearQueryDistanceFieldName(entityClass); + EntityProjection projection = operations.introspectProjection(returnType, entityClass); + + GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); + + Builder optionsBuilder = AggregationOptions.builder(); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } + + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } + + optionsBuilder.collation(near.getCollation()); + + Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); + + return aggregate($geoNear, collection, Document.class) // + .flatMapSequential(callback::doWith); + } + + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); + } + + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName) { + + Assert.notNull(options, "Options must not be null "); + Assert.notNull(entityClass, "Entity class must not be null"); + + FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); + + Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { + throw new IllegalArgumentException( + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); + }); + + if (!optionsToUse.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } + + return doFindAndModify(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + } + + @Override + public Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "Entity class must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + + return Mono.defer(() -> { + + PersistableEntityModel pem = PersistableEntityModel.of(replacement, collectionName); + + maybeEmitEvent(new BeforeConvertEvent<>(pem.getSource(), pem.getCollection())); + + return maybeCallBeforeConvert(pem.getSource(), pem.getCollection()).map(pem::mutate).flatMap(it -> { + PersistableEntityModel mapped = it + .addTargetDocument(operations.forEntity(it.getSource()).toMappedDocument(mongoConverter).getDocument()); + maybeEmitEvent(new BeforeSaveEvent(mapped.getSource(), mapped.getTarget(), mapped.getCollection())); + + return maybeCallBeforeSave(it.getSource(), mapped.getTarget(), mapped.getCollection()) + .map(potentiallyModified -> PersistableEntityModel.of(potentiallyModified, mapped.getTarget(), + mapped.getCollection())); + }).flatMap(it -> { + + Mono afterFindAndReplace = doFindAndReplace(it.getCollection(), collectionPreparer, mappedQuery, + mappedFields, mappedSort, queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), + options, projection); + return afterFindAndReplace.flatMap(saved -> { + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection())); + return maybeCallAfterSave(saved, it.getTarget(), it.getCollection()); + }); + }); + }); + } + + @Override + public Mono findAndRemove(Query query, Class entityClass) { + return findAndRemove(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono findAndRemove(Query query, Class entityClass, String collectionName) { + + operations.forType(entityClass).getCollation(query); + return doFindAndRemove(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), + operations.forType(entityClass).getCollation(query).orElse(null), entityClass); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + @Override + public Mono count(Query query, Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + + return count(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono count(Query query, String collectionName) { + return count(query, null, collectionName); + } + + @Override + public Mono count(Query query, @Nullable Class entityClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + return createMono(collectionName, collection -> { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document filter = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return doCount(collectionName, filter, options); + }); + } + + /** + * Run the actual count operation against the collection with given name. + * + * @param collectionName the name of the collection to count matching documents in. + * @param filter the filter to apply. Must not be {@literal null}. + * @param options options to apply. Like collation and the such. + * @return + */ + protected Mono doCount(String collectionName, Document filter, CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionName, filter, options); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String) + */ + @Override + public Mono estimatedCount(String collectionName) { + return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); + } + + protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { + return createMono(collectionName, collection -> collection.estimatedDocumentCount(options)); + } + + @Override + public Mono exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(collectionName, mappedQuery, options); + } + + protected Mono doExactCount(String collectionName, Document filter, CountOptions options) { + + return createMono(collectionName, + collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + + if (!filter.isEmpty() || !isEmptyOptions(options)) { + return Mono.just(false); + } + return ReactiveMongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()).map(it -> !it); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono) + */ + @Override + public Mono insert(Mono objectToSave) { + + Assert.notNull(objectToSave, "Mono to insert must not be null"); + + return objectToSave.flatMap(this::insert); + } + + @Override + public Flux insertAll(Mono> batchToSave, Class entityClass) { + return insertAll(batchToSave, getCollectionName(entityClass)); + } + + @Override + public Flux insertAll(Mono> batchToSave, String collectionName) { + + Assert.notNull(batchToSave, "Batch to insert must not be null"); + + return Flux.from(batchToSave).flatMapSequential(collection -> insert(collection, collectionName)); + } + + @Override + public Mono insert(T objectToSave) { + + Assert.notNull(objectToSave, "Object to insert must not be null"); + + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); + } + + @Override + public Mono insert(T objectToSave, String collectionName) { + + Assert.notNull(objectToSave, "Object to insert must not be null"); + + ensureNotCollectionLike(objectToSave); + return doInsert(collectionName, objectToSave, this.mongoConverter); + } + + protected Mono doInsert(String collectionName, T objectToSave, MongoWriter writer) { + + return Mono.just(PersistableEntityModel.of(objectToSave, collectionName)) // + .doOnNext(it -> maybeEmitEvent(new BeforeConvertEvent<>(it.getSource(), it.getCollection()))) // + .flatMap(it -> maybeCallBeforeConvert(it.getSource(), it.getCollection()).map(it::mutate)) // + .map(it -> { + + AdaptibleEntity entity = operations.forEntity(it.getSource(), mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); + + PersistableEntityModel model = PersistableEntityModel.of(entity.initializeVersionProperty(), + entity.toMappedDocument(writer).getDocument(), it.getCollection()); + + maybeEmitEvent(new BeforeSaveEvent<>(model.getSource(), model.getTarget(), model.getCollection())); + return model; + })// + .flatMap(it -> { + return maybeCallBeforeSave(it.getSource(), it.getTarget(), it.getCollection()).map(it::mutate); + }).flatMap(it -> { + + return insertDocument(it.getCollection(), it.getTarget(), it.getSource().getClass()).flatMap(id -> { + + T saved = operations.forEntity(it.getSource(), mongoConverter.getConversionService()) + .populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), collectionName)); + return maybeCallAfterSave(saved, it.getTarget(), collectionName); + }); + }); + } + + @Override + public Flux insert(Collection batchToSave, Class entityClass) { + return doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); + } + + @Override + public Flux insert(Collection batchToSave, String collectionName) { + return doInsertBatch(collectionName, batchToSave, this.mongoConverter); + } + + @Override + public Flux insertAll(Collection objectsToSave) { + return doInsertAll(objectsToSave, this.mongoConverter); + } + + @Override + public Flux insertAll(Mono> objectsToSave) { + return Flux.from(objectsToSave).flatMapSequential(this::insertAll); + } + + protected Flux doInsertAll(Collection listToSave, MongoWriter writer) { + + Map> elementsByCollection = new HashMap<>(); + + listToSave.forEach(element -> { + + String collection = getCollectionName(element.getClass()); + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); + + collectionElements.add(element); + }); + + return Flux.fromIterable(elementsByCollection.keySet()) + .concatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); + } + + protected Flux doInsertBatch(String collectionName, Collection batchToSave, + MongoWriter writer) { + + Assert.notNull(writer, "MongoWriter must not be null"); + + Mono, Document>>> prepareDocuments = Flux.fromIterable(batchToSave) + .flatMap(uninitialized -> { + + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + + return maybeCallBeforeConvert(toConvert, collectionName).flatMap(it -> { + + AdaptibleEntity entity = operations.forEntity(it, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); + + T initialized = entity.initializeVersionProperty(); + MappedDocument mapped = entity.toMappedDocument(writer); + + maybeEmitEvent(new BeforeSaveEvent<>(initialized, mapped.getDocument(), collectionName)); + return maybeCallBeforeSave(initialized, mapped.getDocument(), collectionName).map(toSave -> { + + MappedDocument mappedDocument = queryOperations.createInsertContext(mapped) + .prepareId(uninitialized.getClass()); + + return Tuples.of(entity, mappedDocument.getDocument()); + }); + }); + }).collectList(); + + Flux, Document>> insertDocuments = prepareDocuments.flatMapMany(tuples -> { + + List documents = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); + + return insertDocumentList(collectionName, documents).thenMany(Flux.fromIterable(tuples)); + }); + + return insertDocuments.flatMapSequential(tuple -> { + + Document document = tuple.getT2(); + Object id = MappedDocument.of(document).getId(); + + T saved = tuple.getT1().populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, document, collectionName)); + return maybeCallAfterSave(saved, document, collectionName); + }); + } + + @Override + public Mono save(Mono objectToSave) { + + Assert.notNull(objectToSave, "Mono to save must not be null"); + + return objectToSave.flatMap(this::save); + } + + @Override + public Mono save(Mono objectToSave, String collectionName) { + + Assert.notNull(objectToSave, "Mono to save must not be null"); + + return objectToSave.flatMap(o -> save(o, collectionName)); + } + + @Override + public Mono save(T objectToSave) { + + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); + } + + @Override + public Mono save(T objectToSave, String collectionName) { + + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); + + return source.isVersionedEntity() ? doSaveVersioned(source, collectionName) + : doSave(collectionName, objectToSave, this.mongoConverter); + } + + private Mono doSaveVersioned(AdaptibleEntity source, String collectionName) { + + if (source.isNew()) { + return doInsert(collectionName, source.getBean(), this.mongoConverter); + } + + return createMono(collectionName, collection -> { + + // Create query for entity with the id and old version + Query query = source.getQueryForVersion(); + + // Bump version number + T toSave = source.incrementVersion(); + + source.assertUpdateableIdIfNotSet(); + + BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); + T afterEvent = maybeEmitEvent(event).getSource(); + + return maybeCallBeforeConvert(afterEvent, collectionName).flatMap(toConvert -> { + + MappedDocument mapped = operations.forEntity(toConvert).toMappedDocument(mongoConverter); + Document document = mapped.getDocument(); + + maybeEmitEvent(new BeforeSaveEvent<>(toConvert, document, collectionName)); + return maybeCallBeforeSave(toConvert, document, collectionName).flatMap(it -> { + + return doUpdate(collectionName, query, mapped.updateWithoutId(), it.getClass(), false, false) + .flatMap(result -> { + maybeEmitEvent(new AfterSaveEvent(it, document, collectionName)); + return maybeCallAfterSave(it, document, collectionName); + }); + }); + }); + }); + } + + protected Mono doSave(String collectionName, T objectToSave, MongoWriter writer) { + + assertUpdateableIdIfNotSet(objectToSave); + + return createMono(collectionName, collection -> { + + T toSave = maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)).getSource(); + + return maybeCallBeforeConvert(toSave, collectionName).flatMap(toConvert -> { + + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + Document dbDoc = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent(toConvert, dbDoc, collectionName)); + + return maybeCallBeforeSave(toConvert, dbDoc, collectionName).flatMap(it -> { + + return saveDocument(collectionName, dbDoc, it.getClass()).flatMap(id -> { + + T saved = entity.populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); + }); + }); + }); + }); + } + + protected Mono insertDocument(String collectionName, Document dbDoc, Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String + .format("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName)); + } + + MappedDocument document = MappedDocument.of(dbDoc); + queryOperations.createInsertContext(document).prepareId(entityClass); + + Flux execute = execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + return collectionToUse.insertOne(document.getDocument()); + }); + + return Flux.from(execute).last().map(success -> document.getId()); + } + + protected Flux insertDocumentList(String collectionName, List dbDocList) { + + if (dbDocList.isEmpty()) { + return Flux.empty(); + } + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Inserting list of Documents containing %d items", dbDocList.size())); + } + + List documents = new ArrayList<>(dbDocList.size()); + + return execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null, + null, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + documents.addAll(toDocuments(dbDocList)); + + return collectionToUse.insertMany(documents); + + }).flatMapSequential(s -> { + + return Flux.fromStream(documents.stream() // + .map(MappedDocument::of) // + .filter(it -> it.isIdPresent(ObjectId.class)) // + .map(it -> it.getId(ObjectId.class))); + }); + } + + private MongoCollection prepareCollection(MongoCollection collection, + @Nullable WriteConcern writeConcernToUse) { + MongoCollection collectionToUse = collection; + + if (writeConcernToUse != null) { + collectionToUse = collectionToUse.withWriteConcern(writeConcernToUse); + } + return collectionToUse; + } + + protected Mono saveDocument(String collectionName, Document document, Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Saving Document containing fields: %s", document.keySet())); + } + + return createMono(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, + document, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MappedDocument mapped = MappedDocument.of(document); + + MongoCollection collectionToUse = writeConcernToUse == null // + ? collection // + : collection.withWriteConcern(writeConcernToUse); + + Publisher publisher; + if (!mapped.hasId()) { + publisher = collectionToUse + .insertOne(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document filter = updateContext.getReplacementQuery(); + Document replacement = updateContext.getMappedUpdate(entity); + + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono + .from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(replacement).map(it -> updateContext.applyShardKey(entity, filter, it)); + } + } else { + deferredFilter = Mono.just(filter); + } + + publisher = deferredFilter.flatMapMany( + it -> collectionToUse.replaceOne(it, replacement, updateContext.getReplaceOptions(entityClass))); + } + + return Mono.from(publisher).map(o -> mapped.getId()); + }); + + } + + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); + } + + @Override + public Mono upsert(Query query, UpdateDefinition update, String collectionName) { + return doUpdate(collectionName, query, update, null, true, false); + } + + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { + return doUpdate(collectionName, query, update, entityClass, true, false); + } + + /* + * (non-Javadoc)) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) + */ + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); + } + + @Override + public Mono updateFirst(Query query, UpdateDefinition update, String collectionName) { + return doUpdate(collectionName, query, update, null, false, false); + } + + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, + String collectionName) { + return doUpdate(collectionName, query, update, entityClass, false, false); + } + + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); + } + + @Override + public Mono updateMulti(Query query, UpdateDefinition update, String collectionName) { + return doUpdate(collectionName, query, update, null, false, true); + } + + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, + String collectionName) { + return doUpdate(collectionName, query, update, entityClass, false, true); + } + + protected Mono doUpdate(String collectionName, Query query, @Nullable UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { + + MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); + + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions updateOptions = updateContext.getUpdateOptions(entityClass, query); + + Flux result; + + if (updateContext.isAggregationUpdate()) { + + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + result = execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); + } + + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + + return multi ? collection.updateMany(queryObj, pipeline, updateOptions) + : collection.updateOne(queryObj, pipeline, updateOptions); + }); + } else { + + Document updateObj = updateContext.getMappedUpdate(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + updateObj, queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + result = execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } + + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + if (!UpdateMapper.isUpdateObject(updateObj)) { + + Document filter = new Document(queryObj); + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono.from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(updateObj).map(it -> updateContext.applyShardKey(entity, filter, it)); + } + } else { + deferredFilter = Mono.just(filter); + } + + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return deferredFilter.flatMap(it -> Mono.from(collectionToUse.replaceOne(it, updateObj, replaceOptions))); + } + + return multi ? collectionToUse.updateMany(queryObj, updateObj, updateOptions) + : collectionToUse.updateOne(queryObj, updateObj, updateOptions); + }); + } + + result = result.doOnNext(updateResult -> { + + if (entity != null && entity.hasVersionProperty() && !multi) { + if (updateResult.wasAcknowledged() && updateResult.getMatchedCount() == 0) { + + Document updateObj = updateContext.getMappedUpdate(entity); + if (containsVersionProperty(queryObj, entity)) + throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity %s to collection %s".formatted(entity.getName(), collectionName)); + } + } + }); + + return result.next(); + } + + private boolean containsVersionProperty(Document document, @Nullable MongoPersistentEntity persistentEntity) { + + if (persistentEntity == null || !persistentEntity.hasVersionProperty()) { + return false; + } + + return document.containsKey(persistentEntity.getRequiredVersionProperty().getFieldName()); + } + + @Override + public Mono remove(Mono objectToRemove) { + return objectToRemove.flatMap(this::remove); + } + + @Override + public Mono remove(Mono objectToRemove, String collectionName) { + return objectToRemove.flatMap(it -> remove(it, collectionName)); + } + + @Override + public Mono remove(Object object) { + + Assert.notNull(object, "Object must not be null"); + + return remove(operations.forEntity(object).getRemoveByQuery(), object.getClass()); + } + + @Override + public Mono remove(Object object, String collectionName) { + + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + return doRemove(collectionName, operations.forEntity(object).getRemoveByQuery(), object.getClass()); + } + + private void assertUpdateableIdIfNotSet(Object value) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(value.getClass()); + + if (entity != null && entity.hasIdProperty()) { + + MongoPersistentProperty property = entity.getRequiredIdProperty(); + Object propertyValue = entity.getPropertyAccessor(value).getProperty(property); + + if (propertyValue != null) { + return; + } + + if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { + throw new InvalidDataAccessApiUsageException( + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), + value.getClass().getName())); + } + } + } + + @Override + public Mono remove(Query query, String collectionName) { + return remove(query, null, collectionName); + } + + @Override + public Mono remove(Query query, Class entityClass) { + return remove(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Mono remove(Query query, @Nullable Class entityClass, String collectionName) { + return doRemove(collectionName, query, entityClass); + } + + protected Mono doRemove(String collectionName, Query query, @Nullable Class entityClass) { + + if (query == null) { + throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null"); + } + + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + MongoPersistentEntity entity = getPersistentEntity(entityClass); + + DeleteContext deleteContext = queryOperations.deleteQueryContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions deleteOptions = deleteContext.getDeleteOptions(entityClass); + Document removeQuery = deleteContext.getMappedQuery(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, removeQuery); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + + return execute(collectionName, collection -> { + + maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName)); + + MongoCollection collectionToUse = collectionPreparer + .prepare(prepareCollection(collection, writeConcernToUse)); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); + } + + if (query.getLimit() > 0 || query.getSkip() > 0) { + + FindPublisher cursor = new QueryFindPublisherPreparer(query, entityClass) + .prepare(collection.find(removeQuery)) // + .projection(MappedDocument.getIdOnlyProjection()); + + return Flux.from(cursor) // + .map(MappedDocument::of) // + .map(MappedDocument::getId) // + .collectList() // + .flatMapMany(val -> { + return collectionToUse.deleteMany(MappedDocument.getIdIn(val), deleteOptions); + }); + } else { + return collectionToUse.deleteMany(removeQuery, deleteOptions); + } + + }).doOnNext(it -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName))) // + .next(); + } + + @Override + public Flux findAll(Class entityClass) { + return findAll(entityClass, getCollectionName(entityClass)); + } + + @Override + public Flux findAll(Class entityClass, String collectionName) { + return executeFindMultiInternal(new FindCallback(CollectionPreparer.identity(), null), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); + } + + @Override + @SuppressWarnings("unchecked") + public Flux findAllAndRemove(Query query, String collectionName) { + return (Flux) findAllAndRemove(query, Object.class, collectionName); + } + + @Override + public Flux findAllAndRemove(Query query, Class entityClass) { + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Flux findAllAndRemove(Query query, Class entityClass, String collectionName) { + return doFindAndDelete(collectionName, query, entityClass); + } + + @Override + public Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected Mono replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + return createMono(collectionName, collection -> { + + Document mappedUpdate = updateContext.getMappedUpdate(entity); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedUpdate, updateContext.getQueryObject()); + + MongoCollection collectionToUse = createCollectionPreparer(query, action).prepare(collection); + + return collectionToUse.replaceOne(updateContext.getMappedQuery(entity), mappedUpdate, + updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + }); + } + + @Override + public Flux tail(Query query, Class entityClass) { + return tail(query, entityClass, getCollectionName(entityClass)); + } + + @Override + public Flux tail(@Nullable Query query, Class entityClass, String collectionName) { + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + if (query == null) { + + LOGGER.debug(String.format("Tail for class: %s in collection: %s", entityClass, collectionName)); + + return executeFindMultiInternal( + collection -> new FindCallback(collectionPreparer, null).doInCollection(collection) + .cursorType(CursorType.TailableAwait), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); + } + + return doFind(collectionName, collectionPreparer, query.getQueryObject(), query.getFieldsObject(), entityClass, + new TailingQueryFindPublisherPreparer(query, entityClass)); + } + + @Override + public Flux> changeStream(@Nullable String database, @Nullable String collectionName, + ChangeStreamOptions options, Class targetType) { + + List filter = prepareFilter(options); + FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT + : FullDocument.UPDATE_LOOKUP; + + return ReactiveMongoDatabaseUtils.getDatabase(database, mongoDatabaseFactory) // + .map(db -> { + ChangeStreamPublisher publisher; + if (StringUtils.hasText(collectionName)) { + publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class) + : db.getCollection(collectionName).watch(filter, Document.class); + + } else { + publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); + } + + if (options.isResumeAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter) + .orElse(publisher); + } else if (options.isStartAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::startAfter) + .orElse(publisher); + } + publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation) + .orElse(publisher); + publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher); + + if (options.getFullDocumentBeforeChangeLookup().isPresent()) { + publisher = publisher.fullDocumentBeforeChange(options.getFullDocumentBeforeChangeLookup().get()); + } + return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument)); + }) // + .flatMapMany(publisher -> Flux.from(publisher) + .map(document -> new ChangeStreamEvent<>(document, targetType, getConverter()))); + } + + List prepareFilter(ChangeStreamOptions options) { + + Object filter = options.getFilter().orElse(Collections.emptyList()); + + if (filter instanceof Aggregation agg) { + AggregationOperationContext context = agg instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), + getConverter().getMappingContext(), queryMapper) + : new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper); + + return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", + Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns"))); + } + + if (filter instanceof List) { + return (List) filter; + } + + throw new IllegalArgumentException( + "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); + } + + @Override + public Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, + String reduceFunction, MapReduceOptions options) { + + return mapReduce(filterQuery, domainType, getCollectionName(domainType), resultType, mapFunction, reduceFunction, + options); + } + + @Override + public Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, + String mapFunction, String reduceFunction, MapReduceOptions options) { + + Assert.notNull(filterQuery, "Filter query must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.hasText(inputCollectionName, "Input collection name must not be null or empty"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); + Assert.notNull(options, "MapReduceOptions must not be null"); + + assertLocalFunctionNames(mapFunction, reduceFunction); + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(filterQuery); + return createFlux(inputCollectionName, collection -> { + + Document mappedQuery = queryMapper.getMappedObject(filterQuery.getQueryObject(), + mappingContext.getPersistentEntity(domainType)); + + MapReducePublisher publisher = collectionPreparer.prepare(collection).mapReduce(mapFunction, + reduceFunction, Document.class); + + publisher.filter(mappedQuery); + + Document mappedSort = getMappedSortObject(filterQuery, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + publisher.sort(mappedSort); + } + + Meta meta = filterQuery.getMeta(); + if (meta.hasMaxTime()) { + publisher.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (filterQuery.getLimit() > 0 || (options.getLimit() != null)) { + + if (filterQuery.getLimit() > 0 && (options.getLimit() != null)) { + throw new IllegalArgumentException( + "Both Query and MapReduceOptions define a limit; Please provide the limit only via one of the two."); + } + + if (filterQuery.getLimit() > 0) { + publisher.limit(filterQuery.getLimit()); + } + + if (options.getLimit() != null) { + publisher.limit(options.getLimit()); + } + } + + Optional collation = filterQuery.getCollation(); + + Optionals.ifAllPresent(filterQuery.getCollation(), options.getCollation(), (l, r) -> { + throw new IllegalArgumentException( + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); + }); + + if (options.getCollation().isPresent()) { + collation = options.getCollation(); + } + + if (!CollectionUtils.isEmpty(options.getScopeVariables())) { + publisher = publisher.scope(new Document(options.getScopeVariables())); + } + + if (options.getLimit() != null && options.getLimit() > 0) { + publisher = publisher.limit(options.getLimit()); + } + + if (options.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { + publisher = publisher.finalizeFunction(options.getFinalizeFunction().get()); + } + + if (options.getJavaScriptMode() != null) { + publisher = publisher.jsMode(options.getJavaScriptMode()); + } + + if (options.getOutputSharded().isPresent()) { + MongoCompatibilityAdapter.mapReducePublisherAdapter(publisher).sharded(options.getOutputSharded().get()); + } + + if (StringUtils.hasText(options.getOutputCollection()) && !options.usesInlineOutput()) { + publisher = publisher.collectionName(options.getOutputCollection()).action(options.getMapReduceAction()); + + if (options.getOutputDatabase().isPresent()) { + publisher = publisher.databaseName(options.getOutputDatabase().get()); + } + } + + publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); + + return Flux.from(publisher) + .flatMapSequential(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); + }); + } + + private static void assertLocalFunctionNames(String... functions) { + + for (String function : functions) { + + if (ResourceUtils.isUrl(function)) { + + throw new IllegalArgumentException(String.format( + "Blocking accessing to resource %s is not allowed using reactive infrastructure; You may load the resource at startup and cache its value.", + function)); + } + } + } + + @Override + public ReactiveFind query(Class domainType) { + return new ReactiveFindOperationSupport(this).query(domainType); + } + + @Override + public ReactiveUpdate update(Class domainType) { + return new ReactiveUpdateOperationSupport(this).update(domainType); + } + + @Override + public ReactiveRemove remove(Class domainType) { + return new ReactiveRemoveOperationSupport(this).remove(domainType); + } + + @Override + public ReactiveInsert insert(Class domainType) { + return new ReactiveInsertOperationSupport(this).insert(domainType); + } + + @Override + public ReactiveAggregation aggregateAndReturn(Class domainType) { + return new ReactiveAggregationOperationSupport(this).aggregateAndReturn(domainType); + } + + @Override + public ReactiveMapReduce mapReduce(Class domainType) { + return new ReactiveMapReduceOperationSupport(this).mapReduce(domainType); + } + + @Override + public ReactiveChangeStream changeStream(Class domainType) { + return new ReactiveChangeStreamOperationSupport(this).changeStream(domainType); + } + + /** + * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} + * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is + * constructed out of the find result. + * + * @param collectionName + * @param query + * @param entityClass + * @return + */ + protected Flux doFindAndDelete(String collectionName, Query query, Class entityClass) { + + Flux flux = find(query, entityClass, collectionName); + + return Flux.from(flux).collectList().filter(it -> !it.isEmpty()) + .flatMapMany(list -> Flux.from(remove(operations.getByIdInQuery(list), entityClass, collectionName)) + .flatMapSequential(deleteResult -> Flux.fromIterable(list))); + } + + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + */ + protected Mono> doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { + + return createMono(db -> db.createCollection(collectionName, collectionOptions)).doOnSuccess(it -> { + + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Created collection [%s]", collectionName)); + } + + }).then(getCollection(collectionName)); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param collation can be {@literal null}. + * @return the {@link List} of converted objects. + */ + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, + Class entityClass, @Nullable Collation collation) { + + return doFindOne(collectionName, collectionPreparer, query, fields, entityClass, + findPublisher -> collation != null ? findPublisher.collation(collation.toMongoCollation()) : findPublisher); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param preparer the preparer modifying collection and publisher to fit the needs. + * @return the {@link List} of converted objects. + * @since 2.2 + */ + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, + Class entityClass, FindPublisherPreparer preparer) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + QueryContext queryContext = queryOperations + .createQueryContext(new BasicQuery(query, fields != null ? fields : new Document())); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); + } + + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List using the template's converter. The + * query document is specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record + * @param fields the document that specifies the fields to be returned + * @param entityClass the parameterized type of the returned list. + * @return the List of converted objects. + */ + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is + * converted from the MongoDB native representation using an instance of {@see MongoConverter}. The query document is + * specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param preparer allows for customization of the {@link com.mongodb.client.FindIterable} used when iterating over + * the result set, (apply limits, skips and so on). + * @return the {@link List} of converted objects. + */ + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, FindPublisherPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, + new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); + } + + protected Flux doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); + } + + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + objectCallback, collectionName); + } + + CollectionPreparer> createCollectionPreparer(Query query) { + return ReactiveCollectionPreparerDelegate.of(query); + } + + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createCollectionPreparer(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while + * using sourceClass for mapping the query. + * + * @since 2.0 + */ + Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class sourceClass, Class targetClass, FindPublisherPreparer preparer) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName)); + } + + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); + } + + protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) { + return convertToCreateCollectionOptions(collectionOptions, Object.class); + } + + protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, + Class entityType) { + return operations.convertToCreateCollectionOptions(collectionOptions, entityType); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The first document that matches the query is returned and also removed from the collection in the database.
            + * The query document is specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param collation collation. + * @param entityClass the parameterized type of the returned list. + * @return the List of converted objects. + */ + protected Mono doFindAndRemove(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, + @Nullable Collation collation, Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); + } + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); + } + + protected Mono doFindAndModify(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, + Class entityClass, UpdateDefinition update, FindAndModifyOptions options) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); + + return Mono.defer(() -> { + + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s " + "in collection: %s", + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), + collectionName)); + } + + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); + }); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param resultType the target domain type. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 2.1 + */ + protected Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, Class resultType) { + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionName, collectionPreparer, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + private Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, EntityProjection projection) { + + return Mono.defer(() -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityType, + serializeToJsonSafely(replacement), collectionName)); + } + + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, + mappedSort, replacement, collation, options), + new ProjectingReadCallback<>(this.mongoConverter, projection, collectionName), collectionName); + + }); + } + + protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; + } + + protected Mono maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeConvertCallback.class, object, collection); + } + + return Mono.just(object); + } + + protected Mono maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeSaveCallback.class, object, document, collection); + } + + return Mono.just(object); + } + + protected Mono maybeCallAfterSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterSaveCallback.class, object, document, collection); + } + + return Mono.just(object); + } + + protected Mono maybeCallAfterConvert(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterConvertCallback.class, object, document, collection); + } + + return Mono.just(object); + } + + private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { + + try { + MongoCollection collection = db.getCollection(collectionName, Document.class); + return prepareCollection(collection); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } + } + + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { + + if (EntityOperations.isCollectionLike(source) || source instanceof Publisher) { + throw new IllegalArgumentException("Cannot use a collection here."); + } + } + + /** + * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like + * withCodecRegistry() etc. Can be overridden in sub-classes. + * + * @param collection + */ + protected MongoCollection prepareCollection(MongoCollection collection) { + + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); + } + + return collection; + } + + /** + * @param database + * @return + * @since 2.1 + */ + protected MongoDatabase prepareDatabase(MongoDatabase database) { + return database; + } + + /** + * Prepare the WriteConcern before any processing is done using it. This allows a convenient way to apply custom + * settings in sub-classes.
            + * The returned {@link WriteConcern} will be defaulted to {@link WriteConcern#ACKNOWLEDGED} when + * {@link WriteResultChecking} is set to {@link WriteResultChecking#EXCEPTION}. + * + * @param mongoAction any WriteConcern already configured or {@literal null}. + * @return The prepared WriteConcern or {@literal null}. + * @see #setWriteConcern(WriteConcern) + * @see #setWriteConcernResolver(WriteConcernResolver) + */ + @Nullable + protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { + + WriteConcern wc = writeConcernResolver.resolve(mongoAction); + return potentiallyForceAcknowledgedWrite(wc); + } + + /** + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDatabaseFactory; + } + + @Nullable + private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) { + + if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { + if (wc == null || wc.getWObject() == null + || (wc.getWObject()instanceof Number concern && concern.intValue() < 1)) { + return WriteConcern.ACKNOWLEDGED; + } + } + return wc; + } + + /** + * Internal method using callbacks to do queries against the datastore that requires reading a single object from a + * collection of objects. It will take the following steps + *
              + *
            1. Execute the given {@link ReactiveCollectionCallback} for a {@link Document}.
            2. + *
            3. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
            4. + *
                + * + * @param collectionCallback the callback to retrieve the {@link Document} + * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param collectionName the collection to be queried + * @return + */ + private Mono executeFindOneInternal(ReactiveCollectionCallback collectionCallback, + DocumentCallback objectCallback, String collectionName) { + + return createMono(collectionName, + collection -> Mono.from(collectionCallback.doInCollection(collection)).flatMap(objectCallback::doWith)); + } + + /** + * Internal method using callback to do queries against the datastore that requires reading a collection of objects. + * It will take the following steps + *
                  + *
                1. Execute the given {@link ReactiveCollectionCallback} for a {@link FindPublisher}.
                2. + *
                3. Prepare that {@link FindPublisher} with the given {@link FindPublisherPreparer} (will be skipped if + * {@link FindPublisherPreparer} is {@literal null}
                4. + *
                5. Apply the given {@link DocumentCallback} in {@link Flux#map(Function)} of {@link FindPublisher}
                6. + *
                    + * + * @param collectionCallback the callback to retrieve the {@link FindPublisher} with, must not be {@literal null}. + * @param preparer the {@link FindPublisherPreparer} to potentially modify the {@link FindPublisher} before iterating + * over it, may be {@literal null}. + * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type, must + * not be {@literal null}. + * @param collectionName the collection to be queried, must not be {@literal null}. + * @return + */ + private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback collectionCallback, + @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback, String collectionName) { + + return createFlux(collectionName, collection -> { + return Flux.from(preparer.initiateFind(collection, collectionCallback::doInCollection)) + .flatMapSequential(objectCallback::doWith); + }); + } + + /** + * Exception translation {@link Function} intended for {@link Flux#onErrorMap(Function)} usage. + * + * @return the exception translation {@link Function} + */ + private Function translateException() { + + return throwable -> { + + if (throwable instanceof RuntimeException runtimeException) { + return potentiallyConvertRuntimeException(runtimeException, exceptionTranslator); + } + + return throwable; + }; + } + + /** + * Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original + * exception if the conversation failed. Thus allows safe re-throwing of the return value. + * + * @param ex the exception to translate + * @param exceptionTranslator the {@link PersistenceExceptionTranslator} to be used for translation + * @return + */ + private static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, + PersistenceExceptionTranslator exceptionTranslator) { + RuntimeException resolved = exceptionTranslator.translateExceptionIfPossible(ex); + return resolved == null ? ex : resolved; + } + + @Nullable + private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { + return type == null ? null : mappingContext.getPersistentEntity(type); + } + + private MappingMongoConverter getDefaultMongoConverter() { + + MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); + + MongoMappingContext context = new MongoMappingContext(); + context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + context.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NO_OP_REF_RESOLVER, context); + converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(this.mongoDatabaseFactory); + converter.afterPropertiesSet(); + + return converter; + } + + @Nullable + private Document getMappedSortObject(Query query, Class type) { + + if (query == null) { + return null; + } + + return getMappedSortObject(query.getSortObject(), type); + } + + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { + + if (ObjectUtils.isEmpty(sortObject)) { + return null; + } + + return queryMapper.getMappedSort(sortObject, mappingContext.getPersistentEntity(type)); + } + + // Callback implementations + + /** + * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * + * @author Oliver Gierke + * @author Thomas Risberg + * @author Christoph Strobl + */ + private static class FindOneCallback implements ReactiveCollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Optional fields; + private final FindPublisherPreparer preparer; + + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, FindPublisherPreparer preparer) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = Optional.ofNullable(fields); + this.preparer = preparer; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + FindPublisher publisher = preparer.initiateFind(collectionPreparer.prepare(collection), + col -> col.find(query, Document.class)); + + if (fields.isPresent()) { + publisher = publisher.projection(fields.get()); + } + + return publisher.limit(1).first(); + } + } + + /** + * Simple {@link ReactiveCollectionQueryCallback} that takes a query {@link Document} plus an optional fields + * specification {@link Document} and executes that against the {@link MongoCollection}. + * + * @author Mark Paluch + */ + private static class FindCallback implements ReactiveCollectionQueryCallback { + + private final CollectionPreparer> collectionPreparer; + + private final @Nullable Document query; + private final @Nullable Document fields; + + FindCallback(CollectionPreparer> collectionPreparer, @Nullable Document query) { + this(collectionPreparer, query, null); + } + + FindCallback(CollectionPreparer> collectionPreparer, Document query, Document fields) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + } + + @Override + public FindPublisher doInCollection(MongoCollection collection) { + + MongoCollection collectionToUse = collectionPreparer.prepare(collection); + FindPublisher findPublisher; + if (ObjectUtils.isEmpty(query)) { + findPublisher = collectionToUse.find(Document.class); + } else { + findPublisher = collectionToUse.find(query, Document.class); + } + + if (ObjectUtils.isEmpty(fields)) { + return findPublisher; + } else { + return findPublisher.projection(fields); + } + } + } + + /** + * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * + * @author Mark Paluch + */ + private static class FindAndRemoveCallback implements ReactiveCollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Optional collation; + + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.collation = Optional.ofNullable(collation); + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); + collation.map(Collation::toMongoCollation).ifPresent(findOneAndDeleteOptions::collation); + + return collectionPreparer.prepare(collection).findOneAndDelete(query, findOneAndDeleteOptions); + } + } + + /** + * @author Mark Paluch + */ + private static class FindAndModifyCallback implements ReactiveCollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Object update; + private final List arrayFilters; + private final FindAndModifyOptions options; + + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.arrayFilters = arrayFilters; + this.options = options; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + MongoCollection collectionToUse = collectionPreparer.prepare(collection); + if (options.isRemove()) { + FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); + + findOneAndDeleteOptions = options.getCollation().map(Collation::toMongoCollation) + .map(findOneAndDeleteOptions::collation).orElse(findOneAndDeleteOptions); + + return collectionToUse.findOneAndDelete(query, findOneAndDeleteOptions); + } + + FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort, + arrayFilters); + if (update instanceof Document document) { + return collection.findOneAndUpdate(query, document, findOneAndUpdateOptions); + } else if (update instanceof List) { + return collectionToUse.findOneAndUpdate(query, (List) update, findOneAndUpdateOptions); + } + + return Flux + .error(new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update))); + } + + private static FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, + Document fields, Document sort, List arrayFilters) { + + FindOneAndUpdateOptions result = new FindOneAndUpdateOptions(); + + result = result.projection(fields).sort(sort).upsert(options.isUpsert()); + + if (options.isReturnNew()) { + result = result.returnDocument(ReturnDocument.AFTER); + } else { + result = result.returnDocument(ReturnDocument.BEFORE); + } + + result = options.getCollation().map(Collation::toMongoCollation).map(result::collation).orElse(result); + + if (!CollectionUtils.isEmpty(arrayFilters)) { + result.arrayFilters(arrayFilters); + } + + return result; + } + } + + /** + * {@link ReactiveCollectionCallback} specific for find and remove operation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + private static class FindAndReplaceCallback implements ReactiveCollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Document update; + private final @Nullable com.mongodb.client.model.Collation collation; + private final FindAndReplaceOptions options; + + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.collation = collation; + this.options = options; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + FindOneAndReplaceOptions findOneAndReplaceOptions = convertToFindOneAndReplaceOptions(options, fields, sort); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, findOneAndReplaceOptions); + } + + private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplaceOptions options, Document fields, + Document sort) { + + FindOneAndReplaceOptions result = new FindOneAndReplaceOptions().collation(collation); + + result = result.projection(fields).sort(sort).upsert(options.isUpsert()); + + if (options.isReturnNew()) { + result = result.returnDocument(ReturnDocument.AFTER); + } else { + result = result.returnDocument(ReturnDocument.BEFORE); + } + + return result; + } + } + + private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(Document fields, Document sort) { + + FindOneAndDeleteOptions result = new FindOneAndDeleteOptions(); + result = result.projection(fields).sort(sort); + + return result; + } + + /** + * Simple internal callback to allow operations on a {@link Document}. + * + * @author Mark Paluch + */ + + interface DocumentCallback { + + Mono doWith(Document object); + } + + /** + * Simple internal callback to allow operations on a {@link MongoDatabase}. + * + * @author Mark Paluch + */ + + interface MongoDatabaseCallback { + + T doInDatabase(MongoDatabase db); + } + + /** + * Simple internal callback to allow operations on a {@link MongoDatabase}. + * + * @author Mark Paluch + */ + interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback { + + @Override + FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; + } + + /** + * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given + * {@link EntityReader}. + * + * @author Mark Paluch + * @author Roman Puchkovskiy + */ + class ReadDocumentCallback implements DocumentCallback { + + private final EntityReader reader; + private final Class type; + private final String collectionName; + + ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { + + Assert.notNull(reader, "EntityReader must not be null"); + Assert.notNull(type, "Entity type must not be null"); + + this.reader = reader; + this.type = type; + this.collectionName = collectionName; + } + + @Override + public Mono doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + + T entity = reader.read(type, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } + + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return maybeCallAfterConvert(entity, document, collectionName); + } + } + + /** + * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. + * + * @param + * @param + * @author Christoph Strobl + * @author Roman Puchkovskiy + * @since 2.0 + */ + private class ProjectingReadCallback implements DocumentCallback { + + private final MongoConverter reader; + private final EntityProjection projection; + private final String collectionName; + + ProjectingReadCallback(MongoConverter reader, EntityProjection projection, String collectionName) { + this.reader = reader; + this.projection = projection; + this.collectionName = collectionName; + } + + @Override + @SuppressWarnings("unchecked") + public Mono doWith(Document document) { + + Class returnType = projection.getMappedType().getType(); + maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName)); + + Object entity = reader.project(projection, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } + + T castEntity = (T) entity; + maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); + return maybeCallAfterConvert(castEntity, document, collectionName); + } + } + + /** + * {@link DocumentCallback} that assumes a {@link GeoResult} to be created, delegates actual content unmarshalling to + * a delegate and creates a {@link GeoResult} from the result. + * + * @author Mark Paluch + * @author Chrstoph Strobl + * @author Roman Puchkovskiy + */ + static class GeoNearResultDocumentCallback implements DocumentCallback> { + + private final String distanceField; + private final DocumentCallback delegate; + private final Metric metric; + + /** + * Creates a new {@link GeoNearResultDocumentCallback} using the given {@link DocumentCallback} delegate for + * {@link GeoResult} content unmarshalling. + * + * @param distanceField the field to read the distance from. + * @param delegate must not be {@literal null}. + * @param metric the {@link Metric} to apply to the result distance. + */ + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { + + Assert.notNull(delegate, "DocumentCallback must not be null"); + + this.distanceField = distanceField; + this.delegate = delegate; + this.metric = metric; + } + + @Override + public Mono> doWith(Document object) { + + double distance = getDistance(object); + + return delegate.doWith(object).map(doWith -> new GeoResult<>(doWith, new Distance(distance, metric))); + } + + double getDistance(Document object) { + + if (object.containsKey(distanceField)) { + return NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } + + return Double.NaN; + } + } + + /** + * @author Mark Paluch + */ + class QueryFindPublisherPreparer implements FindPublisherPreparer { + + private final Query query; + + private final Document sortObject; + + private final int limit; + + private final long skip; + private final @Nullable Class type; + + QueryFindPublisherPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + + QueryFindPublisherPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { + + this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; + this.type = type; + } + + @Override + public FindPublisher prepare(FindPublisher findPublisher) { + + FindPublisher findPublisherToUse = operations.forType(type) // + .getCollation(query) // + .map(Collation::toMongoCollation) // + .map(findPublisher::collation) // + .orElse(findPublisher); + + HintFunction hintFunction = HintFunction.from(query.getHint()); + Meta meta = query.getMeta(); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues()) { + return findPublisherToUse; + } + + try { + + if (skip > 0) { + findPublisherToUse = findPublisherToUse.skip((int) skip); + } + + if (limit > 0) { + findPublisherToUse = findPublisherToUse.limit(limit); + } + + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; + findPublisherToUse = findPublisherToUse.sort(sort); + } + + if (hintFunction.isPresent()) { + findPublisherToUse = hintFunction.apply(mongoDatabaseFactory, findPublisherToUse::hintString, + findPublisherToUse::hint); + } + + if (meta.hasValues()) { + + if (meta.hasComment()) { + findPublisherToUse = findPublisherToUse.comment(meta.getRequiredComment()); + } + + if (meta.hasMaxTime()) { + findPublisherToUse = findPublisherToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize()); + } + + if (meta.getAllowDiskUse() != null) { + findPublisherToUse = findPublisherToUse.allowDiskUse(meta.getAllowDiskUse()); + } + } + + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } + + return findPublisherToUse; + } + + } + + class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { + + TailingQueryFindPublisherPreparer(Query query, Class type) { + super(query, type); + } + + @Override + public FindPublisher prepare(FindPublisher findPublisher) { + return super.prepare(findPublisher.cursorType(CursorType.TailableAwait)); + } + } + + private static List toDocuments(Collection documents) { + return new ArrayList<>(documents); + } + + /** + * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the + * server through the driver API.
                    + * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired + * target method matching the actual arguments plus a {@link ClientSession}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static class ReactiveSessionBoundMongoTemplate extends ReactiveMongoTemplate { + + private final ReactiveMongoTemplate delegate; + private final ClientSession session; + + /** + * @param session must not be {@literal null}. + * @param that must not be {@literal null}. + */ + ReactiveSessionBoundMongoTemplate(ClientSession session, ReactiveMongoTemplate that) { + + super(that.mongoDatabaseFactory.withSession(session), that); + + this.delegate = that; + this.session = session; + } + + @Override + public Mono> getCollection(String collectionName) { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getCollection(collectionName); + } + + @Override + public Mono getMongoDatabase() { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getMongoDatabase(); + } + + @Override + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + return Mono.just(false); + } + } + + class IndexCreatorEventListener implements ApplicationListener> { + + final Consumer subscriptionExceptionHandler; + + public IndexCreatorEventListener(Consumer subscriptionExceptionHandler) { + this.subscriptionExceptionHandler = subscriptionExceptionHandler; + } + + @Override + public void onApplicationEvent(MappingContextEvent event) { + + if (!event.wasEmittedBy(mappingContext)) { + return; + } + + PersistentEntity entity = event.getPersistentEntity(); + + // Double check type as Spring infrastructure does not consider nested generics + if (entity instanceof MongoPersistentEntity mongoPersistentProperties) { + + onCheckForIndexes(mongoPersistentProperties, subscriptionExceptionHandler); + } + } + } + + /** + * Value object chaining together a given source document with its mapped representation and the collection to persist + * it to. + * + * @param + * @author Christoph Strobl + * @since 2.2 + */ + private static class PersistableEntityModel { + + private final T source; + private final @Nullable Document target; + private final String collection; + + private PersistableEntityModel(T source, @Nullable Document target, String collection) { + + this.source = source; + this.target = target; + this.collection = collection; + } + + static PersistableEntityModel of(T source, String collection) { + return new PersistableEntityModel<>(source, null, collection); + } + + static PersistableEntityModel of(T source, Document target, String collection) { + return new PersistableEntityModel<>(source, target, collection); + } + + PersistableEntityModel mutate(T source) { + return new PersistableEntityModel(source, target, collection); + } + + PersistableEntityModel addTargetDocument(Document target) { + return new PersistableEntityModel(source, target, collection); + } + + T getSource() { + return source; + } + + @Nullable + Document getTarget() { + return target; + } + + String getCollection() { + return collection; + } + } + + @FunctionalInterface + interface CountExecution { + Mono countDocuments(String collection, Document filter, CountOptions options); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java new file mode 100644 index 0000000000..378f13d917 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java @@ -0,0 +1,125 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +import com.mongodb.client.result.DeleteResult; + +/** + * {@link ReactiveRemoveOperation} allows creation and execution of reactive MongoDB remove / findAndRemove operations + * in a fluent API style.
                    + * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching} into the + * MongoDB specific representation. The collection to operate on is by default derived from the initial + * {@literal domainType} and can be defined there via {@link org.springframework.data.mongodb.core.mapping.Document}. + * Using {@code inCollection} allows to override the collection name for the execution. + * + *
                    + *     
                    + *         remove(Jedi.class)
                    + *             .inCollection("star-wars")
                    + *             .matching(query(where("firstname").is("luke")))
                    + *             .all();
                    + *     
                    + * 
                    + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveRemoveOperation { + + /** + * Start creating a remove operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ReactiveRemove}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveRemove remove(Class domainType); + + /** + * Compose remove execution by calling one of the terminating methods. + */ + interface TerminatingRemove { + + /** + * Remove all documents matching. + * + * @return {@link Mono} emitting the {@link DeleteResult}. Never {@literal null}. + */ + Mono all(); + + /** + * Remove and return all matching documents.
                    + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. + * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete + * operation. + * + * @return empty {@link Flux} if no match found. Never {@literal null}. + */ + Flux findAndRemove(); + } + + /** + * Collection override (optional). + */ + interface RemoveWithCollection extends RemoveWithQuery { + + /** + * Explicitly set the name of the collection to perform the query on.
                    + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link RemoveWithCollection}. Never {@literal null}. + * @throws IllegalArgumentException if collection is {@literal null} or empty. + */ + RemoveWithQuery inCollection(String collection); + } + + /** + * Provide a {@link Query} override (optional). + */ + interface RemoveWithQuery extends TerminatingRemove { + + /** + * Define the query filtering elements. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. Never {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + interface ReactiveRemove extends RemoveWithCollection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java new file mode 100644 index 0000000000..97c9cb0d0e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java @@ -0,0 +1,104 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.result.DeleteResult; + +/** + * Implementation of {@link ReactiveRemoveOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation { + + private static final Query ALL_QUERY = new Query(); + + private final ReactiveMongoTemplate template; + + ReactiveRemoveOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveRemove remove(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveRemoveSupport<>(template, domainType, ALL_QUERY, null); + } + + static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCollection { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final String collection; + + ReactiveRemoveSupport(ReactiveMongoTemplate template, Class domainType, Query query, String collection) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + } + + @Override + public RemoveWithQuery inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ReactiveRemoveSupport<>(template, domainType, query, collection); + } + + @Override + public TerminatingRemove matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ReactiveRemoveSupport<>(template, domainType, query, collection); + } + + @Override + public Mono all() { + + String collectionName = getCollectionName(); + + return template.doRemove(collectionName, query, domainType); + } + + @Override + public Flux findAndRemove() { + + String collectionName = getCollectionName(); + + return template.doFindAndDelete(collectionName, query, domainType); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java new file mode 100644 index 0000000000..aeb0e88e24 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.reactivestreams.Publisher; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Callback interface for executing operations within a {@link com.mongodb.reactivestreams.client.ClientSession} using + * reactive infrastructure. + * + * @author Christoph Strobl + * @since 2.1 + * @see com.mongodb.reactivestreams.client.ClientSession + */ +@FunctionalInterface +public interface ReactiveSessionCallback { + + /** + * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is + * inferred directly into the operation so that no further interaction is necessary. + *
                    + * Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and + * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway + * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or + * {@link com.mongodb.reactivestreams.client.MongoDatabase} via eg. + * {@link ReactiveMongoOperations#getCollection(String)} we leave responsibility for + * {@link com.mongodb.session.ClientSession} again up to the caller. + * + * @param operations will never be {@literal null}. + * @return never {@literal null}. + */ + Publisher doInSession(ReactiveMongoOperations operations); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java new file mode 100644 index 0000000000..f0ffc1ba60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.util.function.Consumer; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a + * {@link ReactiveSessionCallback}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface ReactiveSessionScoped { + + /** + * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
                    + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action, can be {@link Flux#empty()}. + */ + default Flux execute(ReactiveSessionCallback action) { + return execute(action, (session) -> {}); + } + + /** + * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
                    + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param doFinally callback object that accepts {@link ClientSession} after invoking {@link ReactiveSessionCallback}. + * This {@link Consumer} is guaranteed to be notified in any case (successful and exceptional outcome of + * {@link ReactiveSessionCallback}). + * @param return type. + * @return a result object returned by the action, can be {@link Flux#empty()}. + */ + Flux execute(ReactiveSessionCallback action, Consumer doFinally); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java new file mode 100644 index 0000000000..51f75f3265 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java @@ -0,0 +1,279 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; + +import com.mongodb.client.result.UpdateResult; + +/** + * {@link ReactiveUpdateOperation} allows creation and execution of reactive MongoDB update / findAndModify / + * findAndReplace operations in a fluent API style.
                    + * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as + * the {@link org.springframework.data.mongodb.core.query.Update} via {@code apply} into the MongoDB specific + * representations. The collection to operate on is by default derived from the initial {@literal domainType} and can be + * defined there via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows + * to override the collection name for the execution. + * + *
                    + *     
                    + *         update(Jedi.class)
                    + *             .inCollection("star-wars")
                    + *             .matching(query(where("firstname").is("luke")))
                    + *             .apply(new Update().set("lastname", "skywalker"))
                    + *             .upsert();
                    + *     
                    + * 
                    + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveUpdateOperation { + + /** + * Start creating an update operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ReactiveUpdate}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveUpdate update(Class domainType); + + /** + * Compose findAndModify execution by calling one of the terminating methods. + */ + interface TerminatingFindAndModify { + + /** + * Find, modify and return the first matching document. + * + * @return {@link Mono#empty()} if nothing found. Never {@literal null}. + */ + Mono findAndModify(); + } + + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + Mono replaceFirst(); + } + + /** + * Compose findAndReplace execution by calling one of the terminating methods. + * + * @author Mark Paluch + * @since 2.1 + */ + interface TerminatingFindAndReplace extends TerminatingReplace { + + /** + * Find, replace and return the first matching document. + * + * @return {@link Mono#empty()} if nothing found. Never {@literal null}. + */ + Mono findAndReplace(); + } + + /** + * Compose update execution by calling one of the terminating methods. + */ + interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModifyWithOptions { + + /** + * Update all matching documents in the collection. + * + * @return never {@literal null}. + */ + Mono all(); + + /** + * Update the first document in the collection. + * + * @return never {@literal null}. + */ + Mono first(); + + /** + * Creates a new document if no documents match the filter query or updates the matching ones. + * + * @return never {@literal null}. + */ + Mono upsert(); + } + + /** + * Declare the {@link org.springframework.data.mongodb.core.query.Update} to apply. + */ + interface UpdateWithUpdate { + + /** + * Set the {@link UpdateDefinition} to be applied. + * + * @param update must not be {@literal null}. + * @return new instance of {@link TerminatingUpdate}. Never {@literal null}. + * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate + */ + TerminatingUpdate apply(UpdateDefinition update); + + /** + * Specify {@code replacement} object. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + * @since 2.1 + */ + FindAndReplaceWithProjection replaceWith(T replacement); + } + + /** + * Explicitly define the name of the collection to perform operation in (optional). + */ + interface UpdateWithCollection { + + /** + * Explicitly set the name of the collection to perform the query on.
                    + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link UpdateWithCollection}. Never {@literal null}. + * @throws IllegalArgumentException if collection is {@literal null} or empty. + */ + UpdateWithQuery inCollection(String collection); + } + + /** + * Define a filter query for the {@link org.springframework.data.mongodb.core.query.Update} (optional). + */ + interface UpdateWithQuery extends UpdateWithUpdate { + + /** + * Filter documents by given {@literal query}. + * + * @param query must not be {@literal null}. + * @return new instance of {@link UpdateWithQuery}. Never {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Define {@link FindAndModifyOptions} (optional). + */ + interface FindAndModifyWithOptions { + + /** + * Explicitly define {@link FindAndModifyOptions} for the + * {@link org.springframework.data.mongodb.core.query.Update}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link TerminatingFindAndModify}. Never {@literal null}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingFindAndModify withOptions(FindAndModifyOptions options); + } + + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + + /** + * Define {@link FindAndReplaceOptions}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { + + /** + * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options); + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithProjection extends FindAndReplaceWithOptions { + + /** + * Define the target type fields should be mapped to.
                    + * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindAndReplaceWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + FindAndReplaceWithOptions as(Class resultType); + + } + + interface ReactiveUpdate extends UpdateWithCollection, UpdateWithQuery, UpdateWithUpdate {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java new file mode 100644 index 0000000000..51cd99dc93 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java @@ -0,0 +1,208 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.result.UpdateResult; + +/** + * Implementation of {@link ReactiveUpdateOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation { + + private static final Query ALL_QUERY = new Query(); + + private final ReactiveMongoTemplate template; + + ReactiveUpdateOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveUpdate update(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); + } + + static class ReactiveUpdateSupport + implements ReactiveUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, + FindAndReplaceWithOptions, FindAndReplaceWithProjection, TerminatingFindAndReplace { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final org.springframework.data.mongodb.core.query.UpdateDefinition update; + @Nullable private final String collection; + @Nullable private final FindAndModifyOptions findAndModifyOptions; + @Nullable private final FindAndReplaceOptions findAndReplaceOptions; + @Nullable private final Object replacement; + private final Class targetType; + + ReactiveUpdateSupport(ReactiveMongoTemplate template, Class domainType, Query query, UpdateDefinition update, + String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions, + Object replacement, Class targetType) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + } + + @Override + public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.UpdateDefinition update) { + + Assert.notNull(update, "Update must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public UpdateWithQuery inCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public Mono first() { + return doUpdate(false, false); + } + + @Override + public Mono upsert() { + return doUpdate(true, true); + } + + @Override + public Mono findAndModify() { + + String collectionName = getCollectionName(); + + return template.findAndModify(query, update, + findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType, + collectionName); + } + + @Override + public Mono findAndReplace() { + return template.findAndReplace(query, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.none(), (Class) domainType, + getCollectionName(), targetType); + } + + @Override + public UpdateWithUpdate matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public Mono all() { + return doUpdate(true, false); + } + + @Override + public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection replaceWith(T replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, options, + replacement, targetType); + } + + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); + } + + @Override + public FindAndReplaceWithOptions as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, resultType); + } + + @Override + public Mono replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + private Mono doUpdate(boolean multi, boolean upsert) { + return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java new file mode 100644 index 0000000000..00c5815fc9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadConcern}. + *

                    + * Typically implemented by cursor or query preparer objects. + * + * @author Mark Paluch + * @since 4.1 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadConcernAware { + + /** + * @return {@literal true} if a {@link ReadConcern} is set. + */ + default boolean hasReadConcern() { + return getReadConcern() != null; + } + + /** + * @return the {@link ReadConcern} to apply or {@literal null} if none set. + */ + @Nullable + ReadConcern getReadConcern(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java new file mode 100644 index 0000000000..74bca9abea --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.ReadPreference; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadPreference}. + *

                    + * Typically implemented by cursor or query preparer objects. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadPreferenceAware { + + /** + * @return {@literal true} if a {@link ReadPreference} is set. + */ + default boolean hasReadPreference() { + return getReadPreference() != null; + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none set. + */ + @Nullable + ReadPreference getReadPreference(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDBCollectionInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDBCollectionInvoker.java deleted file mode 100644 index 5bbba03de3..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDBCollectionInvoker.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import org.springframework.data.mongodb.util.MongoClientVersion; - -import com.mongodb.DBCollection; -import com.mongodb.DBObject; - -/** - * {@link ReflectiveDBCollectionInvoker} provides reflective access to {@link DBCollection} API that is not consistently - * available for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -class ReflectiveDBCollectionInvoker { - - private static final Method GEN_INDEX_NAME_METHOD; - private static final Method RESET_INDEX_CHACHE_METHOD; - - static { - - GEN_INDEX_NAME_METHOD = findMethod(DBCollection.class, "genIndexName", DBObject.class); - RESET_INDEX_CHACHE_METHOD = findMethod(DBCollection.class, "resetIndexCache"); - } - - private ReflectiveDBCollectionInvoker() {} - - /** - * Convenience method to generate an index name from the set of fields it is over. Will fall back to a MongoDB Java - * driver version 2 compatible way of generating index name in case of {@link MongoClientVersion#isMongo3Driver()}. - * - * @param keys the names of the fields used in this index - * @return - */ - public static String generateIndexName(DBObject keys) { - - if (isMongo3Driver()) { - return genIndexName(keys); - } - return (String) invokeMethod(GEN_INDEX_NAME_METHOD, null, keys); - } - - /** - * In case of MongoDB Java driver version 2 all indices that have not yet been applied to this collection will be - * cleared. Since this method is not available for the MongoDB Java driver version 3 the operation will throw - * {@link UnsupportedOperationException}. - * - * @param dbCollection - * @throws UnsupportedOperationException - */ - public static void resetIndexCache(DBCollection dbCollection) { - - if (isMongo3Driver()) { - throw new UnsupportedOperationException("The mongo java driver 3 does no loger support resetIndexCache!"); - } - - invokeMethod(RESET_INDEX_CHACHE_METHOD, dbCollection); - } - - /** - * Borrowed from MongoDB Java driver version 2. See http://github.com/mongodb/mongo-java-driver/blob/r2.13.0/src/main/com/mongodb/DBCollection.java#L754 - * - * @param keys - * @return - */ - private static String genIndexName(DBObject keys) { - - StringBuilder name = new StringBuilder(); - - for (String s : keys.keySet()) { - - if (name.length() > 0) { - name.append('_'); - } - - name.append(s).append('_'); - Object val = keys.get(s); - - if (val instanceof Number || val instanceof String) { - name.append(val.toString().replace(' ', '_')); - } - } - - return name.toString(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDbInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDbInvoker.java deleted file mode 100644 index 5abe74c0a0..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveDbInvoker.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.CannotGetMongoDbConnectionException; -import org.springframework.data.mongodb.util.MongoClientVersion; - -import com.mongodb.DB; -import com.mongodb.Mongo; - -/** - * {@link ReflectiveDbInvoker} provides reflective access to {@link DB} API that is not consistently available for - * various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -final class ReflectiveDbInvoker { - - private static final Method DB_IS_AUTHENTICATED_METHOD; - private static final Method DB_AUTHENTICATE_METHOD; - private static final Method DB_REQUEST_DONE_METHOD; - private static final Method DB_ADD_USER_METHOD; - private static final Method DB_REQUEST_START_METHOD; - - static { - - DB_IS_AUTHENTICATED_METHOD = findMethod(DB.class, "isAuthenticated"); - DB_AUTHENTICATE_METHOD = findMethod(DB.class, "authenticate", String.class, char[].class); - DB_REQUEST_DONE_METHOD = findMethod(DB.class, "requestDone"); - DB_ADD_USER_METHOD = findMethod(DB.class, "addUser", String.class, char[].class); - DB_REQUEST_START_METHOD = findMethod(DB.class, "requestStart"); - } - - private ReflectiveDbInvoker() {} - - /** - * Authenticate against database using provided credentials in case of a MongoDB Java driver version 2. - * - * @param mongo must not be {@literal null}. - * @param db must not be {@literal null}. - * @param credentials must not be {@literal null}. - * @param authenticationDatabaseName - */ - public static void authenticate(Mongo mongo, DB db, UserCredentials credentials, String authenticationDatabaseName) { - - String databaseName = db.getName(); - - DB authDb = databaseName.equals(authenticationDatabaseName) ? db : mongo.getDB(authenticationDatabaseName); - - synchronized (authDb) { - - Boolean isAuthenticated = (Boolean) invokeMethod(DB_IS_AUTHENTICATED_METHOD, authDb); - if (!isAuthenticated) { - - String username = credentials.getUsername(); - String password = credentials.hasPassword() ? credentials.getPassword() : null; - - Boolean authenticated = (Boolean) invokeMethod(DB_AUTHENTICATE_METHOD, authDb, username, - password == null ? null : password.toCharArray()); - if (!authenticated) { - throw new CannotGetMongoDbConnectionException("Failed to authenticate to database [" + databaseName + "], " - + credentials.toString(), databaseName, credentials); - } - } - } - } - - /** - * Starts a new 'consistent request' in case of MongoDB Java driver version 2. Will do nothing for MongoDB Java driver - * version 3 since the operation is no longer available. - * - * @param db - */ - public static void requestStart(DB db) { - - if (isMongo3Driver()) { - return; - } - - invokeMethod(DB_REQUEST_START_METHOD, db); - } - - /** - * Ends the current 'consistent request'. a new 'consistent request' in case of MongoDB Java driver version 2. Will do - * nothing for MongoDB Java driver version 3 since the operation is no longer available - * - * @param db - */ - public static void requestDone(DB db) { - - if (MongoClientVersion.isMongo3Driver()) { - return; - } - - invokeMethod(DB_REQUEST_DONE_METHOD, db); - } - - /** - * @param db - * @param username - * @param password - * @throws UnsupportedOperationException - */ - public static void addUser(DB db, String username, char[] password) { - - if (isMongo3Driver()) { - throw new UnsupportedOperationException( - "Please use DB.command(…) to call either the createUser or updateUser command!"); - } - - invokeMethod(DB_ADD_USER_METHOD, db, username, password); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMapReduceInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMapReduceInvoker.java deleted file mode 100644 index 0146326134..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMapReduceInvoker.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import org.springframework.util.Assert; - -import com.mongodb.MapReduceCommand; - -/** - * {@link ReflectiveMapReduceInvoker} provides reflective access to {@link MapReduceCommand} API that is not - * consistently available for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -final class ReflectiveMapReduceInvoker { - - private static final Method ADD_EXTRA_OPTION_METHOD; - - static { - - ADD_EXTRA_OPTION_METHOD = findMethod(MapReduceCommand.class, "addExtraOption", String.class, Object.class); - } - - private ReflectiveMapReduceInvoker() {} - - /** - * Sets the extra option for MongoDB Java driver version 2. Will do nothing for MongoDB Java driver version 2. - * - * @param cmd can be {@literal null} for MongoDB Java driver version 2. - * @param key - * @param value - */ - public static void addExtraOption(MapReduceCommand cmd, String key, Object value) { - - if (isMongo3Driver()) { - return; - } - - Assert.notNull(cmd, "MapReduceCommand must not be null!"); - invokeMethod(ADD_EXTRA_OPTION_METHOD, cmd, key, value); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvoker.java deleted file mode 100644 index 14f040e20a..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvoker.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import org.springframework.beans.DirectFieldAccessor; -import org.springframework.util.ReflectionUtils; - -import com.mongodb.MongoOptions; - -/** - * {@link ReflectiveMongoOptionsInvoker} provides reflective access to {@link MongoOptions} API that is not consistently - * available for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -@SuppressWarnings("deprecation") -class ReflectiveMongoOptionsInvoker { - - private static final Method GET_AUTO_CONNECT_RETRY_METHOD; - private static final Method SET_AUTO_CONNECT_RETRY_METHOD; - private static final Method GET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD; - private static final Method SET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD; - - static { - - SET_AUTO_CONNECT_RETRY_METHOD = ReflectionUtils - .findMethod(MongoOptions.class, "setAutoConnectRetry", boolean.class); - GET_AUTO_CONNECT_RETRY_METHOD = ReflectionUtils.findMethod(MongoOptions.class, "isAutoConnectRetry"); - SET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD = ReflectionUtils.findMethod(MongoOptions.class, - "setMaxAutoConnectRetryTime", long.class); - GET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD = ReflectionUtils.findMethod(MongoOptions.class, - "getMaxAutoConnectRetryTime"); - } - - private ReflectiveMongoOptionsInvoker() {} - - /** - * Sets the retry connection flag for MongoDB Java driver version 2. Will do nothing for MongoDB Java driver version 3 - * since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @param autoConnectRetry - */ - public static void setAutoConnectRetry(MongoOptions options, boolean autoConnectRetry) { - - if (isMongo3Driver()) { - return; - } - - invokeMethod(SET_AUTO_CONNECT_RETRY_METHOD, options, autoConnectRetry); - } - - /** - * Sets the maxAutoConnectRetryTime attribute for MongoDB Java driver version 2. Will do nothing for MongoDB Java - * driver version 3 since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @param maxAutoConnectRetryTime - */ - public static void setMaxAutoConnectRetryTime(MongoOptions options, long maxAutoConnectRetryTime) { - - if (isMongo3Driver()) { - return; - } - - invokeMethod(SET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD, options, maxAutoConnectRetryTime); - } - - /** - * Sets the slaveOk attribute for MongoDB Java driver version 2. Will do nothing for MongoDB Java driver version 3 - * since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @param slaveOk - */ - public static void setSlaveOk(MongoOptions options, boolean slaveOk) { - - if (isMongo3Driver()) { - return; - } - - new DirectFieldAccessor(options).setPropertyValue("slaveOk", slaveOk); - } - - /** - * Gets the slaveOk attribute for MongoDB Java driver version 2. Throws {@link UnsupportedOperationException} for - * MongoDB Java driver version 3 since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @return - * @throws UnsupportedOperationException - */ - public static boolean getSlaveOk(MongoOptions options) { - - if (isMongo3Driver()) { - throw new UnsupportedOperationException( - "Cannot get value for autoConnectRetry which has been removed in MongoDB Java driver version 3."); - } - - return ((Boolean) new DirectFieldAccessor(options).getPropertyValue("slaveOk")).booleanValue(); - } - - /** - * Gets the autoConnectRetry attribute for MongoDB Java driver version 2. Throws {@link UnsupportedOperationException} - * for MongoDB Java driver version 3 since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @return - * @throws UnsupportedOperationException - */ - public static boolean getAutoConnectRetry(MongoOptions options) { - - if (isMongo3Driver()) { - throw new UnsupportedOperationException( - "Cannot get value for autoConnectRetry which has been removed in MongoDB Java driver version 3."); - } - - return ((Boolean) invokeMethod(GET_AUTO_CONNECT_RETRY_METHOD, options)).booleanValue(); - } - - /** - * Gets the maxAutoConnectRetryTime attribute for MongoDB Java driver version 2. Throws - * {@link UnsupportedOperationException} for MongoDB Java driver version 3 since the method has been removed. - * - * @param options can be {@literal null} for MongoDB Java driver version 3. - * @return - * @throws UnsupportedOperationException - */ - public static long getMaxAutoConnectRetryTime(MongoOptions options) { - - if (isMongo3Driver()) { - throw new UnsupportedOperationException( - "Cannot get value for maxAutoConnectRetryTime which has been removed in MongoDB Java driver version 3."); - } - - return ((Long) invokeMethod(GET_MAX_AUTO_CONNECT_RETRY_TIME_METHOD, options)).longValue(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteConcernInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteConcernInvoker.java deleted file mode 100644 index 49abdf60f2..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteConcernInvoker.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; - -import org.springframework.beans.DirectFieldAccessor; - -import com.mongodb.WriteConcern; - -/** - * {@link ReflectiveWriteConcernInvoker} provides reflective access to {@link WriteConcern} API that is not consistently - * available for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -class ReflectiveWriteConcernInvoker { - - private static final WriteConcern NONE_OR_UNACKNOWLEDGED; - - static { - - NONE_OR_UNACKNOWLEDGED = isMongo3Driver() ? WriteConcern.UNACKNOWLEDGED : (WriteConcern) new DirectFieldAccessor( - new WriteConcern()).getPropertyValue("NONE"); - } - - /** - * @return {@link WriteConcern#NONE} for MongoDB Java driver version 2, otherwise {@link WriteConcern#UNACKNOWLEDGED}. - */ - public static WriteConcern noneOrUnacknowledged() { - return NONE_OR_UNACKNOWLEDGED; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteResultInvoker.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteResultInvoker.java deleted file mode 100644 index 24c8eb7241..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReflectiveWriteResultInvoker.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import com.mongodb.MongoException; -import com.mongodb.WriteResult; - -/** - * {@link ReflectiveWriteResultInvoker} provides reflective access to {@link WriteResult} API that is not consistently - * available for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -final class ReflectiveWriteResultInvoker { - - private static final Method GET_ERROR_METHOD; - private static final Method WAS_ACKNOWLEDGED_METHOD; - - private ReflectiveWriteResultInvoker() {} - - static { - - GET_ERROR_METHOD = findMethod(WriteResult.class, "getError"); - WAS_ACKNOWLEDGED_METHOD = findMethod(WriteResult.class, "wasAcknowledged"); - } - - /** - * @param writeResult can be {@literal null} for MongoDB Java driver version 3. - * @return null in case of MongoDB Java driver version 3 since errors are thrown as {@link MongoException}. - */ - public static String getError(WriteResult writeResult) { - - if (isMongo3Driver()) { - return null; - } - - return (String) invokeMethod(GET_ERROR_METHOD, writeResult); - } - - /** - * @param writeResult - * @return return in case of MongoDB Java driver version 2. - */ - public static boolean wasAcknowledged(WriteResult writeResult) { - return isMongo3Driver() ? ((Boolean) invokeMethod(WAS_ACKNOWLEDGED_METHOD, writeResult)).booleanValue() : true; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java new file mode 100644 index 0000000000..a2e2ba24c0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; + +/** + * Options for {@link org.springframework.data.mongodb.core.MongoOperations#replace(Query, Object) replace operations}. Defaults to + *

                    + *
                    upsert
                    + *
                    false
                    + *
                    + * + * @author Jakub Zurawa + * @author Christoph Strob + * @since 4.2 + */ +public class ReplaceOptions { + + private boolean upsert; + + private static final ReplaceOptions NONE = new ReplaceOptions() { + + private static final String ERROR_MSG = "ReplaceOptions.none() cannot be changed; Please use ReplaceOptions.options() instead"; + + @Override + public ReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link ReplaceOptions} instance. + *
                    + *
                    upsert
                    + *
                    false
                    + *
                    + * + * @return new instance of {@link ReplaceOptions}. + */ + public static ReplaceOptions replaceOptions() { + return new ReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link ReplaceOptions} instance. + * + * @return unmodifiable {@link ReplaceOptions} instance. + */ + public static ReplaceOptions none() { + return NONE; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + public ReplaceOptions upsert() { + + this.upsert = true; + return this; + } + + /** + * Get the bit indicating if to create a new document if not exists. + * + * @return {@literal true} if set. + */ + public boolean isUpsert() { + return upsert; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index 673717ddb4..a01760368a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,22 +19,24 @@ import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; +import org.springframework.lang.Nullable; -import com.mongodb.DB; /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. - * + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public interface ScriptOperations { /** - * Store given {@link ExecutableMongoScript} generating a syntheitcal name so that it can be called by it + * Store given {@link ExecutableMongoScript} generating a synthetic name so that it can be called by it * subsequently. - * + * * @param script must not be {@literal null}. * @return {@link NamedMongoScript} with name under which the {@code JavaScript} function can be called. */ @@ -42,7 +44,7 @@ public interface ScriptOperations { /** * Registers the given {@link NamedMongoScript} in the database. - * + * * @param script the {@link NamedMongoScript} to be registered. * @return */ @@ -50,34 +52,36 @@ public interface ScriptOperations { /** * Executes the {@literal script} by either calling it via its {@literal name} or directly sending it. - * + * * @param script must not be {@literal null}. * @param args arguments to pass on for script execution. * @return the script evaluation result. * @throws org.springframework.dao.DataAccessException */ + @Nullable Object execute(ExecutableMongoScript script, Object... args); /** * Call the {@literal JavaScript} by its name. - * + * * @param scriptName must not be {@literal null} or empty. * @param args * @return */ + @Nullable Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. - * + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. + * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); /** * Returns names of {@literal JavaScript} functions that can be called. - * + * * @return empty {@link Set} if no scripts found. */ Set getScriptNames(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java new file mode 100644 index 0000000000..85ddce7656 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java @@ -0,0 +1,268 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.IntFunction; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.ScrollPosition.Direction; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Utilities to run scroll queries and create {@link Window} results. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class ScrollUtils { + + /** + * Create the actual query to run keyset-based pagination. Affects projection, sorting, and the criteria. + * + * @param query + * @param idPropertyName + * @return + */ + static KeysetScrollQuery createKeysetPaginationQuery(Query query, String idPropertyName) { + + KeysetScrollPosition keyset = query.getKeyset(); + KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection()); + Document sortObject = director.getSortObject(idPropertyName, query); + Document fieldsObject = director.getFieldsObject(query.getFieldsObject(), sortObject); + Document queryObject = director.createQuery(keyset, query.getQueryObject(), sortObject); + + return new KeysetScrollQuery(queryObject, fieldsObject, sortObject); + } + + static Window createWindow(Query query, List result, Class sourceType, EntityOperations operations) { + + Document sortObject = query.getSortObject(); + KeysetScrollPosition keyset = query.getKeyset(); + Direction direction = keyset.getDirection(); + KeysetScrollDirector director = KeysetScrollDirector.of(direction); + + List resultsToUse = director.postPostProcessResults(result, query.getLimit()); + + IntFunction positionFunction = value -> { + + T last = resultsToUse.get(value); + Entity entity = operations.forEntity(last); + + Map keys = entity.extractKeys(sortObject, sourceType); + return ScrollPosition.of(keys, direction); + }; + + return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit())); + } + + static Window createWindow(List result, int limit, IntFunction positionFunction) { + return Window.from(getSubList(result, limit), positionFunction, hasMoreElements(result, limit)); + } + + static boolean hasMoreElements(List result, int limit) { + return !result.isEmpty() && result.size() > limit; + } + + static List getSubList(List result, int limit) { + + if (limit > 0 && result.size() > limit) { + return result.subList(0, limit); + } + + return result; + } + + record KeysetScrollQuery(Document query, Document fields, Document sort) { + + } + + /** + * Director for keyset scrolling. + */ + static class KeysetScrollDirector { + + private static final KeysetScrollDirector FORWARD = new KeysetScrollDirector(); + private static final KeysetScrollDirector REVERSE = new ReverseKeysetScrollDirector(); + + /** + * Factory method to obtain the right {@link KeysetScrollDirector}. + * + * @param direction + * @return + */ + public static KeysetScrollDirector of(ScrollPosition.Direction direction) { + return direction == Direction.FORWARD ? FORWARD : REVERSE; + } + + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = query.isSorted() ? query.getSortObject() : new Document(); + sortObject.put(idPropertyName, 1); + + return sortObject; + } + + public Document getFieldsObject(Document fieldsObject, Document sortObject) { + + // make sure we can extract the keyset + if (!fieldsObject.isEmpty()) { + for (String field : sortObject.keySet()) { + fieldsObject.put(field, 1); + } + } + + return fieldsObject; + } + + public Document createQuery(KeysetScrollPosition keyset, Document queryObject, Document sortObject) { + + Map keysetValues = keyset.getKeys(); + List or = (List) queryObject.getOrDefault("$or", new ArrayList<>()); + List sortKeys = new ArrayList<>(sortObject.keySet()); + + // first query doesn't come with a keyset + if (keysetValues.isEmpty()) { + return queryObject; + } + + if (!keysetValues.keySet().containsAll(sortKeys)) { + throw new IllegalStateException("KeysetScrollPosition does not contain all keyset values"); + } + + // build matrix query for keyset paging that contains sort^2 queries + // reflecting a query that follows sort order semantics starting from the last returned keyset + for (int i = 0; i < sortKeys.size(); i++) { + + Document sortConstraint = new Document(); + + for (int j = 0; j < sortKeys.size(); j++) { + + String sortSegment = sortKeys.get(j); + int sortOrder = sortObject.getInteger(sortSegment); + Object o = keysetValues.get(sortSegment); + + if (j >= i) { // tail segment + if (o instanceof BsonNull) { + throw new IllegalStateException( + "Cannot resume from KeysetScrollPosition. Offending key: '%s' is 'null'".formatted(sortSegment)); + } + sortConstraint.put(sortSegment, new Document(getComparator(sortOrder), o)); + break; + } + + sortConstraint.put(sortSegment, o); + } + + if (!sortConstraint.isEmpty()) { + or.add(sortConstraint); + } + } + + if (!or.isEmpty()) { + queryObject.put("$or", or); + } + + return queryObject; + } + + protected String getComparator(int sortOrder) { + return sortOrder == 1 ? "$gt" : "$lt"; + } + + protected List postPostProcessResults(List list, int limit) { + return getFirst(limit, list); + } + + } + + /** + * Reverse scrolling director variant applying {@link KeysetScrollPosition.Direction#BACKWARD}. In reverse scrolling, + * we need to flip directions for the actual query so that we do not get everything from the top position and apply + * the limit but rather flip the sort direction, apply the limit and then reverse the result to restore the actual + * sort order. + */ + private static class ReverseKeysetScrollDirector extends KeysetScrollDirector { + + @Override + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = super.getSortObject(idPropertyName, query); + + // flip sort direction for backward scrolling + + for (String field : sortObject.keySet()) { + sortObject.put(field, sortObject.getInteger(field) == 1 ? -1 : 1); + } + + return sortObject; + } + + @Override + public List postPostProcessResults(List list, int limit) { + + // flip direction of the result list as we need to accomodate for the flipped sort order for proper offset + // querying. + Collections.reverse(list); + + return getLast(limit, list); + } + + } + + /** + * Return the first {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getFirst(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(0, count); + } + + return list; + } + + /** + * Return the last {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getLast(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(list.size() - count, list.size()); + } + + return list; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java new file mode 100644 index 0000000000..55a87ecadf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; + +/** + * Callback interface for executing operations within a {@link com.mongodb.session.ClientSession}. + * + * @author Christoph Strobl + * @since 2.1 + * @see com.mongodb.session.ClientSession + */ +@FunctionalInterface +public interface SessionCallback { + + /** + * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred + * directly into the operation so that no further interaction is necessary. + *
                    + * Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others + * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like + * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. + * {@link MongoOperations#getCollection(String)} we leave responsibility for {@link com.mongodb.session.ClientSession} + * again up to the caller. + * + * @param operations will never be {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + T doInSession(MongoOperations operations); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java new file mode 100644 index 0000000000..33ad9d7318 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Consumer; + +import org.springframework.lang.Nullable; + +import com.mongodb.client.ClientSession; + +/** + * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. + *
                    + * The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface SessionScoped { + + /** + * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
                    + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action. Can be {@literal null}. + */ + @Nullable + default T execute(SessionCallback action) { + return execute(action, session -> {}); + } + + /** + * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
                    + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param doFinally callback object that accepts {@link ClientSession} after invoking {@link SessionCallback}. This + * {@link Consumer} is guaranteed to be notified in any case (successful and exceptional outcome of + * {@link SessionCallback}). + * @param return type. + * @return a result object returned by the action. Can be {@literal null}. + */ + @Nullable + T execute(SessionCallback action, Consumer doFinally); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java new file mode 100644 index 0000000000..2b51b5e077 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.DisposableBean; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.ConnectionString; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoDatabase; + +/** + * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySupport + implements DisposableBean { + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance for the given {@code connectionString}. + * + * @param connectionString connection coordinates for a database connection. Must contain a database name and must not + * be {@literal null} or empty. + * @see MongoDB Connection String reference + */ + public SimpleMongoClientDatabaseFactory(String connectionString) { + this(new ConnectionString(connectionString)); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param connectionString connection coordinates for a database connection. Must contain also a database name and not + * be {@literal null}. + */ + public SimpleMongoClientDatabaseFactory(ConnectionString connectionString) { + this(MongoClients.create(connectionString), connectionString.getDatabase(), true); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName) { + this(mongoClient, databaseName, false); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + * @param mongoInstanceCreated + */ + SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { + super(mongoClient, databaseName, mongoInstanceCreated, MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR); + } + + @Override + public ClientSession getSession(ClientSessionOptions options) { + return getMongoClient().startSession(options); + } + + @Override + protected void closeClient() { + getMongoClient().close(); + } + + @Override + protected MongoDatabase doGetMongoDatabase(String dbName) { + return getMongoClient().getDatabase(dbName); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java deleted file mode 100644 index 572dea0385..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.net.UnknownHostException; - -import org.springframework.beans.factory.DisposableBean; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoException; -import com.mongodb.MongoURI; -import com.mongodb.WriteConcern; - -/** - * Factory to create {@link DB} instances from a {@link Mongo} instance. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory { - - private final Mongo mongo; - private final String databaseName; - private final boolean mongoInstanceCreated; - private final UserCredentials credentials; - private final PersistenceExceptionTranslator exceptionTranslator; - private final String authenticationDatabaseName; - - private WriteConcern writeConcern; - - /** - * Create an instance of {@link SimpleMongoDbFactory} given the {@link Mongo} instance and database name. - * - * @param mongo Mongo instance, must not be {@literal null}. - * @param databaseName database name, not be {@literal null} or empty. - * @deprecated since 1.7. Please use {@link #SimpleMongoDbFactory(MongoClient, String)}. - */ - @Deprecated - public SimpleMongoDbFactory(Mongo mongo, String databaseName) { - this(mongo, databaseName, null); - } - - /** - * Create an instance of SimpleMongoDbFactory given the Mongo instance, database name, and username/password - * - * @param mongo Mongo instance, must not be {@literal null}. - * @param databaseName Database name, must not be {@literal null} or empty. - * @param credentials username and password. - * @deprecated since 1.7. The credentials used should be provided by {@link MongoClient#getCredentialsList()}. - */ - @Deprecated - public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials) { - this(mongo, databaseName, credentials, false, null); - } - - /** - * Create an instance of SimpleMongoDbFactory given the Mongo instance, database name, and username/password - * - * @param mongo Mongo instance, must not be {@literal null}. - * @param databaseName Database name, must not be {@literal null} or empty. - * @param credentials username and password. - * @param authenticationDatabaseName the database name to use for authentication - * @deprecated since 1.7. The credentials used should be provided by {@link MongoClient#getCredentialsList()}. - */ - @Deprecated - public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials, - String authenticationDatabaseName) { - this(mongo, databaseName, credentials, false, authenticationDatabaseName); - } - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoURI}. - * - * @param uri must not be {@literal null}. - * @throws MongoException - * @throws UnknownHostException - * @see MongoURI - * @deprecated since 1.7. Please use {@link #SimpleMongoDbFactory(MongoClientURI)} instead. - */ - @Deprecated - public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException { - this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true, - uri.getDatabase()); - } - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClientURI}. - * - * @param uri must not be {@literal null}. - * @throws UnknownHostException - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClientURI uri) throws UnknownHostException { - this(new MongoClient(uri), uri.getDatabase(), true); - } - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClient}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null}. - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClient mongoClient, String databaseName) { - this(mongoClient, databaseName, false); - } - - private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials, - boolean mongoInstanceCreated, String authenticationDatabaseName) { - - if (mongo instanceof MongoClient && (credentials != null && !UserCredentials.NO_CREDENTIALS.equals(credentials))) { - throw new InvalidDataAccessApiUsageException( - "Usage of 'UserCredentials' with 'MongoClient' is no longer supported. Please use 'MongoCredential' for 'MongoClient' or just 'Mongo'."); - } - - Assert.notNull(mongo, "Mongo must not be null"); - Assert.hasText(databaseName, "Database name must not be empty"); - Assert.isTrue(databaseName.matches("[\\w-]+"), - "Database name must only contain letters, numbers, underscores and dashes!"); - - this.mongo = mongo; - this.databaseName = databaseName; - this.mongoInstanceCreated = mongoInstanceCreated; - this.credentials = credentials == null ? UserCredentials.NO_CREDENTIALS : credentials; - this.exceptionTranslator = new MongoExceptionTranslator(); - this.authenticationDatabaseName = StringUtils.hasText(authenticationDatabaseName) ? authenticationDatabaseName - : databaseName; - - Assert.isTrue(this.authenticationDatabaseName.matches("[\\w-]+"), - "Authentication database name must only contain letters, numbers, underscores and dashes!"); - } - - /** - * @param client - * @param databaseName - * @param mongoInstanceCreated - * @since 1.7 - */ - private SimpleMongoDbFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { - - Assert.notNull(client, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); - - this.mongo = client; - this.databaseName = databaseName; - this.mongoInstanceCreated = mongoInstanceCreated; - this.exceptionTranslator = new MongoExceptionTranslator(); - this.credentials = UserCredentials.NO_CREDENTIALS; - this.authenticationDatabaseName = databaseName; - } - - /** - * Configures the {@link WriteConcern} to be used on the {@link DB} instance being created. - * - * @param writeConcern the writeConcern to set - */ - public void setWriteConcern(WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb() - */ - public DB getDb() throws DataAccessException { - return getDb(databaseName); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String) - */ - @SuppressWarnings("deprecation") - public DB getDb(String dbName) throws DataAccessException { - - Assert.hasText(dbName, "Database name must not be empty."); - - DB db = MongoDbUtils.getDB(mongo, dbName, credentials, authenticationDatabaseName); - - if (writeConcern != null) { - db.setWriteConcern(writeConcern); - } - - return db; - } - - /** - * Clean up the Mongo instance if it was created by the factory itself. - * - * @see DisposableBean#destroy() - */ - public void destroy() throws Exception { - if (mongoInstanceCreated) { - mongo.close(); - } - } - - private static String parseChars(char[] chars) { - return chars == null ? null : String.valueOf(chars); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ - @Override - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java new file mode 100644 index 0000000000..84edf13d57 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java @@ -0,0 +1,272 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.ConnectionString; +import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.0 + */ +public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory { + + private final MongoClient mongo; + private final String databaseName; + private final boolean mongoInstanceCreated; + + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; + private @Nullable WriteConcern writeConcern; + + /** + * Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link ConnectionString}. + * + * @param connectionString must not be {@literal null}. + */ + public SimpleReactiveMongoDatabaseFactory(ConnectionString connectionString) { + this(MongoClients.create(connectionString), connectionString.getDatabase(), true); + } + + /** + * Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null}. + * @since 1.7 + */ + public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databaseName) { + this(mongoClient, databaseName, false); + } + + private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { + + Assert.notNull(client, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); + Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); + + this.mongo = client; + this.databaseName = databaseName; + this.mongoInstanceCreated = mongoInstanceCreated; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } + + /** + * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. + * + * @param writeConcern the writeConcern to set + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + @Override + public Mono getMongoDatabase() throws DataAccessException { + return getMongoDatabase(databaseName); + } + + @Override + public Mono getMongoDatabase(String dbName) throws DataAccessException { + + Assert.hasText(dbName, "Database name must not be empty"); + + return Mono.fromSupplier(() -> { + + MongoDatabase db = mongo.getDatabase(dbName); + + return writeConcern != null ? db.withWriteConcern(writeConcern) : db; + }); + } + + /** + * Clean up the Mongo instance if it was created by the factory itself. + * + * @see DisposableBean#destroy() + */ + @Override + public void destroy() throws Exception { + + if (mongoInstanceCreated) { + mongo.close(); + } + } + + @Override + public CodecRegistry getCodecRegistry() { + return this.mongo.getDatabase(databaseName).getCodecRegistry(); + } + + @Override + public Mono getSession(ClientSessionOptions options) { + return Mono.from(mongo.startSession(options)); + } + + @Override + public ReactiveMongoDatabaseFactory withSession(ClientSession session) { + return new ClientSessionBoundMongoDbFactory(session, this); + } + + /** + * {@link ClientSession} bound {@link ReactiveMongoDatabaseFactory} decorating the database with a + * {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static final class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory { + + private final ClientSession session; + private final ReactiveMongoDatabaseFactory delegate; + + ClientSessionBoundMongoDbFactory(ClientSession session, ReactiveMongoDatabaseFactory delegate) { + + this.session = session; + this.delegate = delegate; + } + + @Override + public Mono getMongoDatabase() throws DataAccessException { + return delegate.getMongoDatabase().map(this::decorateDatabase); + } + + @Override + public Mono getMongoDatabase(String dbName) throws DataAccessException { + return delegate.getMongoDatabase(dbName).map(this::decorateDatabase); + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return delegate.getExceptionTranslator(); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } + + @Override + public Mono getSession(ClientSessionOptions options) { + return delegate.getSession(options); + } + + @Override + public ReactiveMongoDatabaseFactory withSession(ClientSession session) { + return delegate.withSession(session); + } + + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + + private MongoDatabase decorateDatabase(MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); + } + + public ClientSession getSession() { + return this.session; + } + + public ReactiveMongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "SimpleReactiveMongoDatabaseFactory.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + + ", delegate=" + this.getDelegate() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java new file mode 100644 index 0000000000..c69fb4ad15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java @@ -0,0 +1,32 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +/** + * {@link CursorPreparer} that exposes its {@link Document sort document}. + * + * @author Christoph Strobl + * @since 4.4.3 + */ +interface SortingQueryCursorPreparer extends CursorPreparer { + + @Nullable + Document getSortObject(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java new file mode 100644 index 0000000000..e50e1088cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java @@ -0,0 +1,65 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Optional; + +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; + +/** + * Immutable object holding additional options to be applied when creating a MongoDB + * views. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class ViewOptions { + + private final @Nullable Collation collation; + + static ViewOptions none() { + return new ViewOptions(); + } + + /** + * Creates new instance of {@link ViewOptions}. + */ + public ViewOptions() { + this(null); + } + + private ViewOptions(@Nullable Collation collation) { + this.collation = collation; + } + + /** + * Get the {@link Collation} to be set. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * @param collation the {@link Collation} to use for language-specific string comparison. + * @return new instance of {@link ViewOptions}. + */ + public ViewOptions collation(Collation collation) { + return new ViewOptions(collation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java new file mode 100644 index 0000000000..d6e4119b20 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.WriteConcern; + +/** + * Interface indicating a component that contains and exposes an {@link WriteConcern}. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface WriteConcernAware { + + /** + * @return the {@link WriteConcern} to apply or {@literal null} if none set. + */ + @Nullable + WriteConcern getWriteConcern(); + + /** + * @return {@literal true} if a {@link com.mongodb.WriteConcern} is set. + */ + default boolean hasWriteConcern() { + return getWriteConcern() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java index fa3858237c..8df4171844 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2012 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,14 @@ */ package org.springframework.data.mongodb.core; +import org.springframework.lang.Nullable; + import com.mongodb.WriteConcern; /** * A strategy interface to determine the {@link WriteConcern} to use for a given {@link MongoAction}. Return the passed * in default {@link WriteConcern} (a property on {@link MongoAction}) if no determination can be made. - * + * * @author Mark Pollack * @author Oliver Gierke */ @@ -28,10 +30,11 @@ public interface WriteConcernResolver { /** * Resolve the {@link WriteConcern} given the {@link MongoAction}. - * + * * @param action describes the context of the Mongo action. Contains a default {@link WriteConcern} to use if one * should not be resolved. * @return a {@link WriteConcern} based on the passed in {@link MongoAction} value, maybe {@literal null}. */ + @Nullable WriteConcern resolve(MongoAction action); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java index 9d4e4b5b30..fbefe4a075 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +16,13 @@ package org.springframework.data.mongodb.core; /** - * Enum to represent how strict the check of {@link com.mongodb.WriteResult} shall be. It can either be skipped entirely - * (use {@link #NONE}), or errors can be logged ({@link #LOG}) or cause an exception to be thrown {@link #EXCEPTION}. - * + * Enum to represent how strict the check of {@link com.mongodb.WriteConcernResult} shall be. It can either be skipped + * entirely (use {@link #NONE}) or cause an exception to be thrown {@link #EXCEPTION}. + * * @author Thomas Risberg * @author Oliver Gierke */ public enum WriteResultChecking { - NONE, LOG, EXCEPTION + NONE, EXCEPTION } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java new file mode 100644 index 0000000000..d4cdece411 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java @@ -0,0 +1,328 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.bson.Document; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Support class for {@link AggregationExpression} implementations. + * + * @author Christoph Strobl + * @author Matt Morrissette + * @author Mark Paluch + * @since 1.10 + */ +abstract class AbstractAggregationExpression implements AggregationExpression { + + private final Object value; + + protected AbstractAggregationExpression(Object value) { + this.value = value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return toDocument(this.value, context); + } + + public Document toDocument(Object value, AggregationOperationContext context) { + return new Document(getMongoMethod(), unpack(value, context)); + } + + protected static List asFields(String... fieldRefs) { + + if (ObjectUtils.isEmpty(fieldRefs)) { + return Collections.emptyList(); + } + + return Fields.fields(fieldRefs).asList(); + } + + @SuppressWarnings("unchecked") + private Object unpack(Object value, AggregationOperationContext context) { + + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + + if (value instanceof Fields fields) { + + List mapped = new ArrayList<>(fields.size()); + + for (Field field : fields) { + mapped.add(unpack(field, context)); + } + + return mapped; + } + + if (value instanceof Sort sort) { + + Document sortDoc = new Document(); + for (Order order : sort) { + + // Check reference + FieldReference reference = context.getReference(order.getProperty()); + sortDoc.put(reference.getRaw(), order.isAscending() ? 1 : -1); + } + return sortDoc; + } + + if (value instanceof List) { + + List sourceList = (List) value; + List mappedList = new ArrayList<>(sourceList.size()); + + for (Object o : sourceList) { + mappedList.add(unpack(o, context)); + } + + return mappedList; + } + + if (value instanceof Map) { + + Document targetDocument = new Document(); + + Map sourceMap = (Map) value; + sourceMap.forEach((k, v) -> targetDocument.append(k, unpack(v, context))); + + return targetDocument; + } + + if (value instanceof SystemVariable) { + return value.toString(); + } + + return value; + } + + @SuppressWarnings("unchecked") + protected List append(Object value, Expand expandList) { + + if (this.value instanceof List) { + + List clone = new ArrayList<>((List) this.value); + + if (value instanceof Collection collection && Expand.EXPAND_VALUES.equals(expandList)) { + clone.addAll(collection); + } else { + clone.add(value); + } + + return clone; + } + + return Arrays.asList(this.value, value); + } + + /** + * Expand a nested list of values to single entries or keep the list. + */ + protected enum Expand { + EXPAND_VALUES, KEEP_SOURCE + } + + protected List append(Object value) { + return append(value, Expand.EXPAND_VALUES); + } + + @SuppressWarnings({ "unchecked" }) + protected Map append(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return append((Map) this.value, key, value); + } + + private Map append(Map existing, String key, Object value) { + + Map clone = new LinkedHashMap<>(existing); + clone.put(key, value); + return clone; + } + + @SuppressWarnings("rawtypes") + protected Map appendTo(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + if (this.value instanceof Map map) { + + Map target = new HashMap<>(map); + if (!target.containsKey(key)) { + target.put(key, value); + return target; + } + target.computeIfPresent(key, (k, v) -> { + + if (v instanceof List list) { + List targetList = new ArrayList<>(list); + targetList.add(value); + return targetList; + } + return Arrays.asList(v, value); + }); + return target; + } + throw new IllegalStateException( + String.format("Cannot append value to %s type", ObjectUtils.nullSafeClassName(this.value))); + + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected Map remove(String key) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>((java.util.Map) this.value); + clone.remove(key); + return clone; + } + + /** + * Append the given key at the position in the underlying {@link LinkedHashMap}. + * + * @param index + * @param key + * @param value + * @return + * @since 3.1 + */ + @SuppressWarnings({ "unchecked" }) + protected Map appendAt(int index, String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>(); + + int i = 0; + for (Map.Entry entry : ((Map) this.value).entrySet()) { + + if (i == index) { + clone.put(key, value); + } + if (!entry.getKey().equals(key)) { + clone.put(entry.getKey(), entry.getValue()); + } + i++; + } + if (i <= index) { + clone.put(key, value); + } + return clone; + + } + + @SuppressWarnings({ "rawtypes" }) + protected List values() { + + if (value instanceof List) { + return new ArrayList((List) value); + } + + if (value instanceof java.util.Map) { + return new ArrayList(((java.util.Map) value).values()); + } + + return new ArrayList<>(Collections.singletonList(value)); + } + + /** + * Get the value at a given index. + * + * @param index + * @param + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected T get(int index) { + return (T) values().get(index); + } + + /** + * Get the value for a given key. + * + * @param key + * @param + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected T get(Object key) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return (T) ((Map) this.value).get(key); + } + + protected boolean isArgumentMap() { + return this.value instanceof Map; + } + + /** + * Get the argument map. + * + * @since 2.1 + * @return + */ + @SuppressWarnings("unchecked") + protected Map argumentMap() { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return Collections.unmodifiableMap((java.util.Map) value); + } + + /** + * Check if the given key is available. + * + * @param key + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected boolean contains(Object key) { + + if (!(this.value instanceof java.util.Map)) { + return false; + } + + return ((Map) this.value).containsKey(key); + } + + protected abstract String getMongoMethod(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java new file mode 100644 index 0000000000..cf6485c230 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -0,0 +1,1169 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal accumulator} aggregation operations. + * + * @author Christoph Strobl + * @author Julia Lee + * @since 1.10 + * @soundtrack Rage Against The Machine - Killing In The Name + */ +public class AccumulatorOperators { + + /** + * Take the numeric value referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link AccumulatorOperatorFactory}. + */ + public static AccumulatorOperatorFactory valueOf(String fieldReference) { + return new AccumulatorOperatorFactory(fieldReference); + } + + /** + * Take the numeric value referenced resulting from given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link AccumulatorOperatorFactory}. + */ + public static AccumulatorOperatorFactory valueOf(AggregationExpression expression) { + return new AccumulatorOperatorFactory(expression); + } + + /** + * @author Christoph Strobl + * @author Julia Lee + */ + public static class AccumulatorOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link AccumulatorOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public AccumulatorOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link AccumulatorOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public AccumulatorOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates and + * returns the sum. + * + * @return new instance of {@link Sum}. + */ + public Sum sum() { + return usesFieldRef() ? Sum.sumOf(fieldReference) : Sum.sumOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * average value. + * + * @return new instance of {@link Avg}. + */ + public Avg avg() { + return usesFieldRef() ? Avg.avgOf(fieldReference) : Avg.avgOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * maximum value. + * + * @return new instance of {@link Max}. + */ + public Max max() { + return usesFieldRef() ? Max.maxOf(fieldReference) : Max.maxOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Max max(int numberOfResults) { + return max().limit(numberOfResults); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * minimum value. + * + * @return new instance of {@link Min}. + */ + public Min min() { + return usesFieldRef() ? Min.minOf(fieldReference) : Min.minOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Min min(int numberOfResults) { + return min().limit(numberOfResults); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the + * population standard deviation of the input values. + * + * @return new instance of {@link StdDevPop}. + */ + public StdDevPop stdDevPop() { + return usesFieldRef() ? StdDevPop.stdDevPopOf(fieldReference) : StdDevPop.stdDevPopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the + * sample standard deviation of the input values. + * + * @return new instance of {@link StdDevSamp}. + */ + public StdDevSamp stdDevSamp() { + return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * associated numeric value expression. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? Percentile.percentileOf(fieldReference) + : Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the median of the associated numeric value expression. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? Median.medianOf(fieldReference) : Median.medianOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + + /** + * {@link AggregationExpression} for {@code $sum}. + * + * @author Christoph Strobl + */ + public static class Sum extends AbstractAggregationExpression { + + private Sum(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$sum"; + } + + /** + * Creates new {@link Sum}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Sum}. + */ + public static Sum sumOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Sum(asFields(fieldReference)); + } + + /** + * Creates new {@link Sum}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Sum}. + */ + public static Sum sumOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Sum(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Sum} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Sum}. + */ + public Sum and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Sum(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Sum} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Sum}. + */ + public Sum and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Sum(append(expression)); + } + + /** + * Creates new {@link Sum} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param value the value to add. + * @return new instance of {@link Sum}. + * @since 2.2 + */ + public Sum and(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Sum(append(value)); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $avg}. + * + * @author Christoph Strobl + */ + public static class Avg extends AbstractAggregationExpression { + + private Avg(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$avg"; + } + + /** + * Creates new {@link Avg}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Avg}. + */ + public static Avg avgOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Avg(asFields(fieldReference)); + } + + /** + * Creates new {@link Avg}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Avg}. + */ + public static Avg avgOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Avg(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Avg} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Avg}. + */ + public Avg and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Avg(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Avg} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Avg}. + */ + public Avg and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Avg(append(expression)); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $max}. + * + * @author Christoph Strobl + */ + public static class Max extends AbstractAggregationExpression { + + private Max(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return contains("n") ? "$maxN" : "$max"; + } + + /** + * Creates new {@link Max}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Max}. + */ + public static Max maxOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Max}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Max}. + */ + public static Max maxOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Max(Collections.singletonMap("input", expression)); + } + + /** + * Creates new {@link Max} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Max}. + */ + public Max and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Max} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Max}. + */ + public Max and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Max(appendTo("input", expression)); + } + + /** + * Creates new {@link Max} that returns the given number of maximum values ({@literal $maxN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Max}. + */ + public Max limit(int numberOfResults) { + return new Max(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $min}. + * + * @author Christoph Strobl + */ + public static class Min extends AbstractAggregationExpression { + + private Min(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return contains("n") ? "$minN" : "$min"; + } + + /** + * Creates new {@link Min}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Min}. + */ + public static Min minOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Min}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Min}. + */ + public static Min minOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Min(Collections.singletonMap("input", expression)); + } + + /** + * Creates new {@link Min} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Min}. + */ + public Min and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Min} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Min}. + */ + public Min and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Min(appendTo("input", expression)); + } + + /** + * Creates new {@link Min} that returns the given number of minimum values ({@literal $minN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Min}. + */ + public Min limit(int numberOfResults) { + return new Min(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $stdDevPop}. + * + * @author Christoph Strobl + */ + public static class StdDevPop extends AbstractAggregationExpression { + + private StdDevPop(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$stdDevPop"; + } + + /** + * Creates new {@link StdDevPop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StdDevPop}. + */ + public static StdDevPop stdDevPopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StdDevPop(asFields(fieldReference)); + } + + /** + * Creates new {@link StdDevPop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StdDevPop}. + */ + public static StdDevPop stdDevPopOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StdDevPop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link StdDevPop} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StdDevPop}. + */ + public StdDevPop and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StdDevPop(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link StdDevSamp} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StdDevPop}. + */ + public StdDevPop and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StdDevPop(append(expression)); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $stdDevSamp}. + * + * @author Christoph Strobl + */ + public static class StdDevSamp extends AbstractAggregationExpression { + + private StdDevSamp(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$stdDevSamp"; + } + + /** + * Creates new {@link StdDevSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StdDevSamp}. + */ + public static StdDevSamp stdDevSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StdDevSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link StdDevSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StdDevSamp}. + */ + public static StdDevSamp stdDevSampOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StdDevSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link StdDevSamp} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StdDevSamp}. + */ + public StdDevSamp and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StdDevSamp(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link StdDevSamp} with all previously added arguments appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StdDevSamp}. + */ + public StdDevSamp and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StdDevSamp(append(expression)); + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument(Object value, AggregationOperationContext context) { + + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); + } + + return super.toDocument(value, context); + } + } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
                    + * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
                    + * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } + + /** + * {@link AggregationExpression} for {@code $percentile}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Percentile extends AbstractAggregationExpression { + + private Percentile(Object value) { + super(value); + } + + /** + * Creates new {@link Percentile}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Creates new {@link Percentile}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Define the percentile value(s) that must resolve to percentages in the range {@code 0.0 - 1.0} inclusive. + * + * @param percentages must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile percentages(Double... percentages) { + + Assert.notEmpty(percentages, "Percentages must not be null or empty"); + return new Percentile(append("p", Arrays.asList(percentages))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Percentile(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Percentile(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$percentile"; + } + } + + /** + * {@link AggregationExpression} for {@code $median}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Median extends AbstractAggregationExpression { + + private Median(Object value) { + super(value); + } + + /** + * Creates new {@link Median}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Median(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
                    + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Median(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$median"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java new file mode 100644 index 0000000000..0dc1588bf8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -0,0 +1,201 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender; +import org.springframework.lang.Nullable; + +/** + * Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + *
                    + * AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
                    + * 
                    + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation + * Framework: $addFields + */ +public class AddFieldsOperation extends DocumentEnhancingOperation { + + /** + * Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields. + * + * @param source must not be {@literal null}. + */ + private AddFieldsOperation(Map source) { + super(source); + } + + /** + * Create new instance of {@link AddFieldsOperation} + * + * @param field must not be {@literal null}. + * @param value can be {@literal null}. + */ + public AddFieldsOperation(Object field, @Nullable Object value) { + this(Collections.singletonMap(field, value)); + } + + /** + * Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static AddFieldsOperationBuilder builder() { + return new AddFieldsOperationBuilder(); + } + + /** + * Concatenate another field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static ValueAppender addField(String field) { + return new AddFieldsOperationBuilder().addField(field); + } + + /** + * Append the value for a specific field to the operation. + * + * @param field the target field to add. + * @param value the value to assign. + * @return new instance of {@link AddFieldsOperation}. + */ + public AddFieldsOperation addField(Object field, Object value) { + + LinkedHashMap target = new LinkedHashMap<>(getValueMap()); + target.put(field, value); + + return new AddFieldsOperation(target); + } + + /** + * Concatenate additional fields to add. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public AddFieldsOperationBuilder and() { + return new AddFieldsOperationBuilder(getValueMap()); + } + + @Override + protected String mongoOperator() { + return "$addFields"; + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public static class AddFieldsOperationBuilder { + + private final Map valueMap; + + private AddFieldsOperationBuilder() { + this.valueMap = new LinkedHashMap<>(); + } + + private AddFieldsOperationBuilder(Map source) { + this.valueMap = new LinkedHashMap<>(source); + } + + public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) { + return addField(field).withValue(value); + } + + public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) { + return addField(field).withValueOf(value); + } + + /** + * Define the field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public ValueAppender addField(String field) { + + return new ValueAppender() { + + @Override + public AddFieldsOperationBuilder withValue(Object value) { + + valueMap.put(field, value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOf(Object value) { + + valueMap.put(field, value instanceof String stringValue ? Fields.fields(stringValue) : value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) { + + valueMap.put(field, new ExpressionProjection(operation, values)); + return AddFieldsOperationBuilder.this; + } + }; + } + + public AddFieldsOperation build() { + return new AddFieldsOperation(valueMap); + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public interface ValueAppender { + + /** + * Define the value to assign as is. + * + * @param value can be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValue(@Nullable Object value); + + /** + * Define the value to assign. Plain {@link String} values are treated as {@link Field field references}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOf(Object value); + + /** + * Adds a generic projection for the current field. + * + * @param operation the operation key, e.g. {@code $add}. + * @param values the values to be set for the projection operation. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 1c98ebd315..45de38ed21 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2016 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,24 +17,28 @@ import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; -import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; -import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder; +import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.SerializationUtils; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * An {@code Aggregation} is a representation of a list of aggregation steps to be performed by the MongoDB Aggregation * Framework. @@ -44,6 +48,11 @@ * @author Oliver Gierke * @author Mark Paluch * @author Alessio Fachechi + * @author Christoph Strobl + * @author Nikolay Bogdanov + * @author Gustavo de Geus + * @author Jérôme Guyon + * @author Sangyong Choi * @since 1.3 */ public class Aggregation { @@ -60,10 +69,37 @@ public class Aggregation { */ public static final String CURRENT = SystemVariable.CURRENT.toString(); - public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext(); + /** + * A variable to conditionally exclude a field. In a {@code $projection}, a field set to the variable + * {@literal REMOVE} is excluded from the output. + * + *
                    +	 * 
                    +	 *
                    +	 * db.books.aggregate( [
                    +	 * {
                    +	 *     $project: {
                    +	 *         title: 1,
                    +	 *         "author.first": 1,
                    +	 *         "author.last" : 1,
                    +	 *         "author.middle": {
                    +	 *             $cond: {
                    +	 *                 if: { $eq: [ "", "$author.middle" ] },
                    +	 *                 then: "$$REMOVE",
                    +	 *                 else: "$author.middle"
                    +	 *             }
                    +	 *         }
                    +	 *     }
                    +	 * } ] )
                    +	 * 
                    +	 * 
                    + */ + public static final String REMOVE = SystemVariable.REMOVE.toString(); + + public static final AggregationOperationContext DEFAULT_CONTEXT = AggregationOperationRenderer.DEFAULT_CONTEXT; public static final AggregationOptions DEFAULT_OPTIONS = newAggregationOptions().build(); - protected final List operations; + protected final AggregationPipeline pipeline; private final AggregationOptions options; /** @@ -84,18 +120,29 @@ public static Aggregation newAggregation(AggregationOperation... operations) { return new Aggregation(operations); } + /** + * Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s. + * + * @param operations can be {@literal empty} but must not be {@literal null}. + * @return new instance of {@link AggregationUpdate}. + * @since 3.0 + */ + public static AggregationUpdate newUpdate(AggregationOperation... operations) { + return AggregationUpdate.from(Arrays.asList(operations)); + } + /** * Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are * supported in MongoDB version 2.6+. * * @param options must not be {@literal null}. - * @return + * @return new instance of {@link Aggregation}. * @since 1.6 */ public Aggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); - return new Aggregation(this.operations, options); + Assert.notNull(options, "AggregationOptions must not be null"); + return new Aggregation(this.pipeline.getOperations(), options); } /** @@ -133,7 +180,7 @@ protected Aggregation(AggregationOperation... aggregationOperations) { */ protected static List asAggregationList(AggregationOperation... aggregationOperations) { - Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty!"); + Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty"); return Arrays.asList(aggregationOperations); } @@ -150,63 +197,210 @@ protected Aggregation(List aggregationOperations) { /** * Creates a new {@link Aggregation} from the given {@link AggregationOperation}s. * - * @param aggregationOperations must not be {@literal null} or empty. + * @param aggregationOperations must not be {@literal null}. * @param options must not be {@literal null} or empty. */ protected Aggregation(List aggregationOperations, AggregationOptions options) { - Assert.notNull(aggregationOperations, "AggregationOperations must not be null!"); - Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided"); - Assert.notNull(options, "AggregationOptions must not be null!"); + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + Assert.notNull(options, "AggregationOptions must not be null"); - this.operations = aggregationOperations; + this.pipeline = new AggregationPipeline(aggregationOperations); this.options = options; } + /** + * Get the {@link AggregationOptions}. + * + * @return never {@literal null}. + * @since 2.1 + */ + public AggregationOptions getOptions() { + return options; + } + /** * A pointer to the previous {@link AggregationOperation}. * * @return */ public static String previousOperation() { - return "_id"; + return FieldName.ID.name(); + } + + /** + * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
                    + * Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is + * an alias for {@code $addFields}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + * @see AddFieldsOperation + * @since 3.0 + */ + public static AddFieldsOperationBuilder addFields() { + return AddFieldsOperation.builder(); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link Bson bson value} as is.
                    + * + *
                    +	 * Aggregation.stage(Aggregates.search(exists(fieldPath("..."))));
                    +	 * 
                    + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param aggregationOperation the must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(Bson aggregationOperation) { + return new BasicAggregationOperation(aggregationOperation); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link String json value} as is.
                    + * + *
                    +	 * Aggregation.stage("{ $search : { near : { path : 'released' , origin : ... } } }");
                    +	 * 
                    + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param json the JSON representation of the pipeline stage. Must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(String json) { + return new BasicAggregationOperation(json); } /** * Creates a new {@link ProjectionOperation} including the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(String... fields) { return project(fields(fields)); } /** - * Creates a new {@link ProjectionOperation} includeing the given {@link Fields}. + * Creates a new {@link ProjectionOperation} including the given {@link Fields}. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(Fields fields) { return new ProjectionOperation(fields); } + /** + * Creates a new {@link ProjectionOperation} including all top level fields of the given given {@link Class}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link ProjectionOperation}. + * @since 2.2 + */ + public static ProjectionOperation project(Class type) { + + Assert.notNull(type, "Type must not be null"); + return new ProjectionOperation(type); + } + /** * Factory method to create a new {@link UnwindOperation} for the field with the given name. * - * @param fieldName must not be {@literal null} or empty. - * @return + * @param field must not be {@literal null} or empty. + * @return new instance of {@link UnwindOperation}. */ public static UnwindOperation unwind(String field) { return new UnwindOperation(field(field)); } + /** + * Factory method to create a new {@link ReplaceRootOperation} for the field with the given name. + * + * @param fieldName must not be {@literal null} or empty. + * @return new instance of {@link ReplaceRootOperation}. + * @since 1.10 + */ + public static ReplaceRootOperation replaceRoot(String fieldName) { + return ReplaceRootOperation.builder().withValueOf(fieldName); + } + + /** + * Factory method to create a new {@link ReplaceRootOperation} for the field with the given + * {@link AggregationExpression}. + * + * @param aggregationExpression must not be {@literal null}. + * @return new instance of {@link ReplaceRootOperation}. + * @since 1.10 + */ + public static ReplaceRootOperation replaceRoot(AggregationExpression aggregationExpression) { + return ReplaceRootOperation.builder().withValueOf(aggregationExpression); + } + + /** + * Factory method to create a new {@link ReplaceRootDocumentOperationBuilder} to configure a + * {@link ReplaceRootOperation}. + * + * @return the {@literal ReplaceRootDocumentOperationBuilder}. + * @since 1.10 + */ + public static ReplaceRootOperationBuilder replaceRoot() { + return ReplaceRootOperation.builder(); + } + + /** + * Factory method to create a new {@link UnwindOperation} for the field with the given name and + * {@code preserveNullAndEmptyArrays}. Note that extended unwind is supported in MongoDB version 3.2+. + * + * @param field must not be {@literal null} or empty. + * @param preserveNullAndEmptyArrays {@literal true} to output the document if path is {@literal null}, missing or + * array is empty. + * @return new {@link UnwindOperation} + * @since 1.10 + */ + public static UnwindOperation unwind(String field, boolean preserveNullAndEmptyArrays) { + return new UnwindOperation(field(field), preserveNullAndEmptyArrays); + } + + /** + * Factory method to create a new {@link UnwindOperation} for the field with the given name including the name of a + * new field to hold the array index of the element as {@code arrayIndex}. Note that extended unwind is supported in + * MongoDB version 3.2+. + * + * @param field must not be {@literal null} or empty. + * @param arrayIndex must not be {@literal null} or empty. + * @return new {@link UnwindOperation} + * @since 1.10 + */ + public static UnwindOperation unwind(String field, String arrayIndex) { + return new UnwindOperation(field(field), field(arrayIndex), false); + } + + /** + * Factory method to create a new {@link UnwindOperation} for the field with the given name, including the name of a + * new field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. + * Note that extended unwind is supported in MongoDB version 3.2+. + * + * @param field must not be {@literal null} or empty. + * @param arrayIndex must not be {@literal null} or empty. + * @param preserveNullAndEmptyArrays {@literal true} to output the document if path is {@literal null}, missing or + * array is empty. + * @return new {@link UnwindOperation} + * @since 1.10 + */ + public static UnwindOperation unwind(String field, String arrayIndex, boolean preserveNullAndEmptyArrays) { + return new UnwindOperation(field(field), field(arrayIndex), preserveNullAndEmptyArrays); + } + /** * Creates a new {@link GroupOperation} for the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link GroupOperation}. */ public static GroupOperation group(String... fields) { return group(fields(fields)); @@ -222,11 +416,37 @@ public static GroupOperation group(Fields fields) { return new GroupOperation(fields); } + /** + * Creates a new {@link GraphLookupOperation.GraphLookupOperationFromBuilder} to construct a + * {@link GraphLookupOperation} given {@literal fromCollection}. + * + * @param fromCollection must not be {@literal null} or empty. + * @return new instance of {@link StartWithBuilder} for creating a {@link GraphLookupOperation}. + * @since 1.10 + */ + public static StartWithBuilder graphLookup(String fromCollection) { + return GraphLookupOperation.builder().from(fromCollection); + } + + /** + * Creates a new {@link VectorSearchOperation} by starting from the {@code indexName} to use. + * + * @param indexName must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + * @since 4.5 + */ + public static VectorSearchOperation.PathContributor vectorSearch(String indexName) { + + Assert.hasText(indexName, "Index name must not be null or empty"); + + return VectorSearchOperation.search(indexName); + } + /** * Factory method to create a new {@link SortOperation} for the given {@link Sort}. * * @param sort must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Sort sort) { return new SortOperation(sort); @@ -237,19 +457,41 @@ public static SortOperation sort(Sort sort) { * * @param direction must not be {@literal null}. * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Direction direction, String... fields) { - return new SortOperation(new Sort(direction, fields)); + return new SortOperation(Sort.by(direction, fields)); + } + + /** + * Creates a new {@link SortByCountOperation} given {@literal groupByField}. + * + * @param field must not be {@literal null} or empty. + * @return new instance of {@link SortByCountOperation}. + * @since 2.1 + */ + public static SortByCountOperation sortByCount(String field) { + return new SortByCountOperation(field(field)); + } + + /** + * Creates a new {@link SortByCountOperation} given {@link AggregationExpression group and sort expression}. + * + * @param groupAndSortExpression must not be {@literal null}. + * @return new instance of {@link SortByCountOperation}. + * @since 2.1 + */ + public static SortByCountOperation sortByCount(AggregationExpression groupAndSortExpression) { + return new SortByCountOperation(groupAndSortExpression); } /** * Creates a new {@link SkipOperation} skipping the given number of elements. * * @param elementsToSkip must not be less than zero. - * @return + * @return new instance of {@link SkipOperation}. */ - public static SkipOperation skip(int elementsToSkip) { + public static SkipOperation skip(long elementsToSkip) { return new SkipOperation(elementsToSkip); } @@ -257,22 +499,160 @@ public static SkipOperation skip(int elementsToSkip) { * Creates a new {@link LimitOperation} limiting the result to the given number of elements. * * @param maxElements must not be less than zero. - * @return + * @return new instance of {@link LimitOperation}. */ public static LimitOperation limit(long maxElements) { return new LimitOperation(maxElements); } + /** + * Creates a new {@link SampleOperation} to select the specified number of documents from its input randomly. + * + * @param sampleSize must not be less than zero. + * @return new instance of {@link SampleOperation}. + * @since 2.0 + */ + public static SampleOperation sample(long sampleSize) { + return new SampleOperation(sampleSize); + } + /** * Creates a new {@link MatchOperation} using the given {@link Criteria}. * * @param criteria must not be {@literal null}. - * @return + * @return new instance of {@link MatchOperation}. */ public static MatchOperation match(Criteria criteria) { return new MatchOperation(criteria); } + /** + * Creates a new {@link MatchOperation} using the given {@link CriteriaDefinition}. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link MatchOperation}. + * @since 1.10 + */ + public static MatchOperation match(CriteriaDefinition criteria) { + return new MatchOperation(criteria); + } + + /** + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MatchOperation}. + * @since 3.3 + */ + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); + } + + /** + * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The + * {@code distanceField} defines output field that contains the calculated distance. + * + * @param query must not be {@literal null}. + * @param distanceField must not be {@literal null} or empty. + * @return new instance of {@link GeoNearOperation}. + * @since 1.7 + */ + public static GeoNearOperation geoNear(NearQuery query, String distanceField) { + return new GeoNearOperation(query, distanceField); + } + + /** + * Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}. + * + * @return new instance of {@link MergeOperationBuilder}. + * @see MergeOperation + * @since 3.0 + */ + public static MergeOperationBuilder merge() { + return MergeOperation.builder(); + } + + /** + * Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in + * the pipeline. + * + * @param outCollectionName collection name to export aggregation results. The {@link OutOperation} creates a new + * collection in the current database if one does not already exist. The collection is not visible until the + * aggregation completes. If the aggregation fails, MongoDB does not create the collection. Must not be + * {@literal null}. + * @return new instance of {@link OutOperation}. + */ + public static OutOperation out(String outCollectionName) { + return new OutOperation(outCollectionName); + } + + /** + * Creates a new {@link BucketOperation} given {@literal groupByField}. + * + * @param groupByField must not be {@literal null} or empty. + * @return new instance of {@link BucketOperation}. + * @since 1.10 + */ + public static BucketOperation bucket(String groupByField) { + return new BucketOperation(field(groupByField)); + } + + /** + * Creates a new {@link BucketOperation} given {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @return new instance of {@link BucketOperation}. + * @since 1.10 + */ + public static BucketOperation bucket(AggregationExpression groupByExpression) { + return new BucketOperation(groupByExpression); + } + + /** + * Creates a new {@link BucketAutoOperation} given {@literal groupByField}. + * + * @param groupByField must not be {@literal null} or empty. + * @param buckets number of buckets, must be a positive integer. + * @return new instance of {@link BucketAutoOperation}. + * @since 1.10 + */ + public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { + return new BucketAutoOperation(field(groupByField), buckets); + } + + /** + * Creates a new {@link BucketAutoOperation} given {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + * @return new instance of {@link BucketAutoOperation}. + * @since 1.10 + */ + public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpression, int buckets) { + return new BucketAutoOperation(groupByExpression, buckets); + } + + /** + * Creates a new {@link FacetOperation}. + * + * @return new instance of {@link FacetOperation}. + * @since 1.10 + */ + public static FacetOperation facet() { + return FacetOperation.EMPTY; + } + + /** + * Creates a new {@link FacetOperationBuilder} given {@link Aggregation}. + * + * @param aggregationOperations the sub-pipeline, must not be {@literal null}. + * @return new instance of {@link FacetOperation}. + * @since 1.10 + */ + public static FacetOperationBuilder facet(AggregationOperation... aggregationOperations) { + return facet().and(aggregationOperations); + } + /** * Creates a new {@link LookupOperation}. * @@ -301,11 +681,58 @@ public static LookupOperation lookup(Field from, Field localField, Field foreign return new LookupOperation(from, localField, foreignField, as); } + /** + * Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API. + * + *
                    +	 * Aggregation.lookup().from("restaurants").localField("restaurant_name").foreignField("name")
                    +	 * 		.let(newVariable("orders_drink").forField("drink"))
                    +	 * 		.pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
                    +	 * 		.as("matches")
                    +	 * 
                    + * + * @return new instance of {@link LookupOperationBuilder}. + * @since 4.1 + */ + public static LookupOperationBuilder lookup() { + return new LookupOperationBuilder(); + } + + /** + * Creates a new {@link CountOperationBuilder}. + * + * @return never {@literal null}. + * @since 1.10 + */ + public static CountOperationBuilder count() { + return new CountOperationBuilder(); + } + + /** + * Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored + * within the document itself. + * + *
                    +	 *
                    +	 * Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
                    +	 * 		.then(RedactOperation.PRUNE) //
                    +	 * 		.otherwise(RedactOperation.DESCEND));
                    +	 * 
                    + * + * @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or + * {@literal $$KEEP}. Must not be {@literal null}. + * @return new instance of {@link RedactOperation}. Never {@literal null}. + * @since 3.0 + */ + public static RedactOperation redact(AggregationExpression condition) { + return new RedactOperation(condition); + } + /** * Creates a new {@link Fields} instance for the given field names. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link Fields}. * @see Fields#fields(String...) */ public static Fields fields(String... fields) { @@ -317,29 +744,16 @@ public static Fields fields(String... fields) { * * @param name must not be {@literal null} or empty. * @param target must not be {@literal null} or empty. - * @return + * @return new instance of {@link Fields}. */ public static Fields bind(String name, String target) { return Fields.from(field(name, target)); } - /** - * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The - * {@code distanceField} defines output field that contains the calculated distance. - * - * @param query must not be {@literal null}. - * @param distanceField must not be {@literal null} or empty. - * @return - * @since 1.7 - */ - public static GeoNearOperation geoNear(NearQuery query, String distanceField) { - return new GeoNearOperation(query, distanceField); - } - /** * Returns a new {@link AggregationOptions.Builder}. * - * @return + * @return new instance of {@link AggregationOptions.Builder}. * @since 1.6 */ public static AggregationOptions.Builder newAggregationOptions() { @@ -347,129 +761,42 @@ public static AggregationOptions.Builder newAggregationOptions() { } /** - * Converts this {@link Aggregation} specification to a {@link DBObject}. + * Renders this {@link Aggregation} specification to an aggregation pipeline returning a {@link List} of + * {@link Document}. * - * @param inputCollectionName the name of the input collection - * @return the {@code DBObject} representing this aggregation - */ - public DBObject toDbObject(String inputCollectionName, AggregationOperationContext rootContext) { - - AggregationOperationContext context = rootContext; - List operationDocuments = new ArrayList(operations.size()); - - for (AggregationOperation operation : operations) { - - operationDocuments.add(operation.toDBObject(context)); - - if (operation instanceof FieldsExposingAggregationOperation) { - - FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; - - if (operation instanceof InheritsFieldsAggregationOperation) { - context = new InheritingExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context); - } else { - context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context); - } - } - } - - DBObject command = new BasicDBObject("aggregate", inputCollectionName); - command.put("pipeline", operationDocuments); - - command = options.applyAndReturnPotentiallyChangedCommand(command); - - return command; - } - - /* - * (non-Javadoc) - * @see java.lang.Object#toString() + * @return the aggregation pipeline representing this aggregation. + * @since 2.1 */ - @Override - public String toString() { - return SerializationUtils - .serializeToJsonSafely(toDbObject("__collection__", new NoOpAggregationOperationContext())); + public List toPipeline(AggregationOperationContext rootContext) { + return pipeline.toDocuments(rootContext); } /** - * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. - * - * @author Oliver Gierke + * @return the {@link AggregationPipeline}. + * @since 3.0.2 */ - private static class NoOpAggregationOperationContext implements AggregationOperationContext { - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject) - */ - @Override - public DBObject getMappedObject(DBObject dbObject) { - return dbObject; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ - @Override - public FieldReference getReference(Field field) { - return new FieldReference(new ExposedField(field, true)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ - @Override - public FieldReference getReference(String name) { - return new FieldReference(new ExposedField(new AggregationField(name), true)); - } + public AggregationPipeline getPipeline() { + return pipeline; } /** - * Describes the system variables available in MongoDB aggregation framework pipeline expressions. + * Converts this {@link Aggregation} specification to a {@link Document}.
                    + * MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render + * an aggregation pipeline. * - * @author Thomas Darimont - * @see http://docs.mongodb.org/manual/reference/aggregation-variables + * @param inputCollectionName the name of the input collection. + * @return the {@code Document} representing this aggregation. */ - enum SystemVariable { - - ROOT, CURRENT; + public Document toDocument(String inputCollectionName, AggregationOperationContext rootContext) { - private static final String PREFIX = "$$"; + Document command = new Document("aggregate", inputCollectionName); + command.put("pipeline", toPipeline(rootContext)); - /** - * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} - * otherwise. - * - * @param fieldRef may be {@literal null}. - * @return - */ - public static boolean isReferingToSystemVariable(String fieldRef) { - - if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { - return false; - } - - int indexOfFirstDot = fieldRef.indexOf('.'); - String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot); - - for (SystemVariable value : values()) { - if (value.name().equals(candidate)) { - return true; - } - } - - return false; - } + return options.applyAndReturnPotentiallyChangedCommand(command); + } - /* - * (non-Javadoc) - * @see java.lang.Enum#toString() - */ - @Override - public String toString() { - return PREFIX.concat(name()); - } + @Override + public String toString() { + return SerializationUtils.serializeToJsonSafely(toDocument("__collection__", DEFAULT_CONTEXT)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java index c25ae17bab..1cb38ef362 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,59 @@ */ package org.springframework.data.mongodb.core.aggregation; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.data.mongodb.MongoExpression; /** * An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like * {@code project} and {@code group}. - * + *

                    + * The {@link AggregationExpression expressions} {@link #toDocument(AggregationOperationContext)} method is called during + * the mapping process to obtain the mapped, ready to use representation that can be handed over to the driver as part + * of an {@link AggregationOperation pipleine stage}. + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl */ -interface AggregationExpression { +public interface AggregationExpression extends MongoExpression { + + /** + * Create an {@link AggregationExpression} out of a given {@link MongoExpression} to ensure the resulting + * {@link MongoExpression#toDocument() Document} is mapped against the {@link AggregationOperationContext}.
                    + * If the given expression is already an {@link AggregationExpression} the very same instance is returned. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + static AggregationExpression from(MongoExpression expression) { + + if (expression instanceof AggregationExpression aggregationExpression) { + return aggregationExpression; + } + + return context -> context.getMappedObject(expression.toDocument()); + } + + /** + * Obtain the as is (unmapped) representation of the {@link AggregationExpression}. Use + * {@link #toDocument(AggregationOperationContext)} with a matching {@link AggregationOperationContext context} to + * engage domain type mapping including field name resolution. + * + * @see org.springframework.data.mongodb.MongoExpression#toDocument() + */ + @Override + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } /** - * Turns the {@link AggregationExpression} into a {@link DBObject} within the given + * Turns the {@link AggregationExpression} into a {@link Document} within the given * {@link AggregationOperationContext}. - * - * @param context - * @return + * + * @param context must not be {@literal null}. + * @return the MongoDB native ({@link Document}) form of the expression. */ - DBObject toDbObject(AggregationOperationContext context); + Document toDocument(AggregationOperationContext context); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java new file mode 100644 index 0000000000..1ae935a92b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * A {@link CriteriaDefinition criteria} to use {@code $expr} within a + * {@link org.springframework.data.mongodb.core.query.Query}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class AggregationExpressionCriteria implements CriteriaDefinition { + + private final AggregationExpression expression; + + AggregationExpressionCriteria(AggregationExpression expression) { + this.expression = expression; + } + + /** + * @param expression must not be {@literal null}. + * @return new instance of {@link AggregationExpressionCriteria}. + */ + public static AggregationExpressionCriteria whereExpr(AggregationExpression expression) { + return new AggregationExpressionCriteria(expression); + } + + @Override + public Document getCriteriaObject() { + + if (expression instanceof Expr expr) { + return new Document(getKey(), expr.get(0)); + } + return new Document(getKey(), expression); + } + + @Override + public String getKey() { + return "$expr"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java index 0d1ce4996a..00db38329f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,59 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.data.mongodb.core.spel.ExpressionNode; import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport; import org.springframework.data.mongodb.core.spel.ExpressionTransformer; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; - /** * Interface to type an {@link ExpressionTransformer} to the contained * {@link AggregationExpressionTransformationContext}. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + * @since 1.4 */ -interface AggregationExpressionTransformer extends - ExpressionTransformer> { +interface AggregationExpressionTransformer + extends ExpressionTransformer> { /** * A special {@link ExpressionTransformationContextSupport} to be aware of the {@link AggregationOperationContext}. - * + * * @author Oliver Gierke * @author Thomas Darimont */ - public static class AggregationExpressionTransformationContext extends - ExpressionTransformationContextSupport { + class AggregationExpressionTransformationContext + extends ExpressionTransformationContextSupport { private final AggregationOperationContext aggregationContext; /** * Creates an {@link AggregationExpressionTransformationContext}. - * + * * @param currentNode must not be {@literal null}. - * @param parentNode - * @param previousOperationObject - * @param aggregationContext must not be {@literal null}. + * @param parentNode may be {@literal null}. + * @param previousOperationObject may be {@literal null}. + * @param context must not be {@literal null}. */ - public AggregationExpressionTransformationContext(T currentNode, ExpressionNode parentNode, - DBObject previousOperationObject, AggregationOperationContext context) { + public AggregationExpressionTransformationContext(T currentNode, @Nullable ExpressionNode parentNode, + @Nullable Document previousOperationObject, AggregationOperationContext context) { super(currentNode, parentNode, previousOperationObject); - Assert.notNull(context, "AggregationOperationContext must not be null!"); + Assert.notNull(context, "AggregationOperationContext must not be null"); this.aggregationContext = context; } /** * Returns the underlying {@link AggregationOperationContext}. - * - * @return + * + * @return never {@literal null}. */ public AggregationOperationContext getAggregationContext() { return aggregationContext; @@ -72,8 +75,8 @@ public AggregationOperationContext getAggregationContext() { /** * Returns the {@link FieldReference} for the current {@link ExpressionNode}. - * - * @return + * + * @return never {@literal null}. */ public FieldReference getFieldReference() { return aggregationContext.getReference(getCurrentNode().getName()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java deleted file mode 100644 index 0b88c039ce..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.aggregation; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.util.Assert; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * An enum of supported {@link AggregationExpression}s in aggregation pipeline stages. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.10 - */ -public enum AggregationFunctionExpressions { - - SIZE; - - /** - * Returns an {@link AggregationExpression} build from the current {@link Enum} name and the given parameters. - * - * @param parameters must not be {@literal null} - * @return - */ - public AggregationExpression of(Object... parameters) { - - Assert.notNull(parameters, "Parameters must not be null!"); - return new FunctionExpression(name().toLowerCase(), parameters); - } - - /** - * An {@link AggregationExpression} representing a function call. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.10 - */ - static class FunctionExpression implements AggregationExpression { - - private final String name; - private final List values; - - /** - * Creates a new {@link FunctionExpression} for the given name and values. - * - * @param name must not be {@literal null} or empty. - * @param values must not be {@literal null}. - */ - public FunctionExpression(String name, Object[] values) { - - Assert.hasText(name, "Name must not be null!"); - Assert.notNull(values, "Values must not be null!"); - - this.name = name; - this.values = Arrays.asList(values); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Expression#toDbObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ - @Override - public DBObject toDbObject(AggregationOperationContext context) { - - List args = new ArrayList(values.size()); - - for (Object value : values) { - args.add(unpack(value, context)); - } - - return new BasicDBObject("$" + name, args); - } - - private static Object unpack(Object value, AggregationOperationContext context) { - - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDbObject(context); - } - - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } - - return value; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java index 8d67f94f0c..923a1e73cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,54 @@ */ package org.springframework.data.mongodb.core.aggregation; -import com.mongodb.DBObject; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; /** * Represents one single operation in an aggregation pipeline. - * + * * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl * @since 1.3 */ public interface AggregationOperation { /** - * Turns the {@link AggregationOperation} into a {@link DBObject} by using the given + * Turns the {@link AggregationOperation} into a {@link Document} by using the given * {@link AggregationOperationContext}. - * - * @return the DBObject + * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. + * @return the Document + * @deprecated since 2.2 in favor of {@link #toPipelineStages(AggregationOperationContext)}. + */ + @Deprecated + Document toDocument(AggregationOperationContext context); + + /** + * Turns the {@link AggregationOperation} into list of {@link Document stages} by using the given + * {@link AggregationOperationContext}. This allows a single {@link AggregationOptions} to add additional stages for + * eg. {@code $sort} or {@code $limit}. + * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. + * @return the pipeline stages to run through. Never {@literal null}. + * @since 2.2 + */ + default List toPipelineStages(AggregationOperationContext context) { + return Collections.singletonList(toDocument(context)); + } + + /** + * Return the MongoDB operator that is used for this {@link AggregationOperation}. Aggregation operations should + * implement this method to avoid document rendering. + * + * @return the operator used for this {@link AggregationOperation}. + * @since 3.0.2 */ - DBObject toDBObject(AggregationOperationContext context); + default String getOperator() { + return toDocument(Aggregation.DEFAULT_CONTEXT).keySet().iterator().next(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java index d2a2a952be..a49c7e46d5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,146 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.beans.PropertyDescriptor; +import java.lang.reflect.Method; +import java.util.Arrays; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.beans.BeanUtils; +import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; -import com.mongodb.DBObject; +import com.mongodb.MongoClientSettings; /** * The context for an {@link AggregationOperation}. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 */ -public interface AggregationOperationContext { +public interface AggregationOperationContext extends CodecRegistryProvider { + + /** + * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata etc. + * + * @param document will never be {@literal null}. + * @return must not be {@literal null}. + */ + default Document getMappedObject(Document document) { + return getMappedObject(document, null); + } /** - * Returns the mapped {@link DBObject}, potentially converting the source considering mapping metadata etc. - * - * @param dbObject will never be {@literal null}. + * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata for the given + * type. + * + * @param document will never be {@literal null}. + * @param type can be {@literal null}. * @return must not be {@literal null}. + * @since 2.2 */ - DBObject getMappedObject(DBObject dbObject); + Document getMappedObject(Document document, @Nullable Class type); /** - * Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given - * field. - * + * Returns a {@link FieldReference} for the given field. + * * @param field must not be {@literal null}. - * @return + * @return the {@link FieldReference} for the given {@link Field}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(Field field); /** - * Returns the {@link FieldReference} for the field with the given name or {@literal null} if the context does not - * expose a field with the given name. - * + * Returns the {@link FieldReference} for the field with the given name. + * * @param name must not be {@literal null} or empty. - * @return + * @return the {@link FieldReference} for the field with given {@literal name}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(String name); + + /** + * Returns the {@link Fields} exposed by the type. May be a {@literal class} or an {@literal interface}. The default + * implementation uses {@link BeanUtils#getPropertyDescriptors(Class) property descriptors} discover fields from a + * {@link Class}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + * @since 2.2 + * @see BeanUtils#getPropertyDescriptor(Class, String) + */ + default Fields getFields(Class type) { + + Assert.notNull(type, "Type must not be null"); + + return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) // + .filter(it -> { // object and default methods + Method method = it.getReadMethod(); + if (method == null) { + return false; + } + if (ReflectionUtils.isObjectMethod(method)) { + return false; + } + return !method.isDefault(); + }) // + .map(PropertyDescriptor::getName) // + .toArray(String[]::new)); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that exposes {@link ExposedFields fields}. + *

                    + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that inherits exposed fields from this + * context and exposes {@link ExposedFields fields}. + *

                    + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * are not present in one of the previous stages or the input source, throughout the pipeline. + * + * @return a more relaxed {@link AggregationOperationContext}. + * @since 3.0 + * @deprecated since 4.3.1, {@link FieldLookupPolicy} should be specified explicitly when creating the + * AggregationOperationContext. + */ + @Deprecated(since = "4.3.1", forRemoval = true) + default AggregationOperationContext continueOnMissingFieldReference() { + return this; + } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java new file mode 100644 index 0000000000..fd5f7ed979 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java @@ -0,0 +1,131 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.lang.Nullable; + +/** + * Rendering support for {@link AggregationOperation} into a {@link List} of {@link org.bson.Document}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + */ +class AggregationOperationRenderer { + + static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext(); + + /** + * Render a {@link List} of {@link AggregationOperation} given {@link AggregationOperationContext} into their + * {@link Document} representation. + * + * @param operations must not be {@literal null}. + * @param rootContext must not be {@literal null}. + * @return the {@link List} of {@link Document}. + */ + static List toDocument(List operations, AggregationOperationContext rootContext) { + + List operationDocuments = new ArrayList(operations.size()); + + AggregationOperationContext contextToUse = rootContext; + + for (AggregationOperation operation : operations) { + + operationDocuments.addAll(operation.toPipelineStages(contextToUse)); + + if (operation instanceof FieldsExposingAggregationOperation exposedFieldsOperation) { + + ExposedFields fields = exposedFieldsOperation.getFields(); + + if (operation instanceof InheritsFieldsAggregationOperation || exposedFieldsOperation.inheritsFields()) { + contextToUse = contextToUse.inheritAndExpose(fields); + } else { + contextToUse = fields.exposesNoFields() ? ConverterAwareNoOpContext.instance(rootContext) + : contextToUse.expose(fields); + } + } + + } + + return operationDocuments; + } + + private static class ConverterAwareNoOpContext implements AggregationOperationContext { + + AggregationOperationContext ctx; + + static ConverterAwareNoOpContext instance(AggregationOperationContext ctx) { + + if(ctx instanceof ConverterAwareNoOpContext noOpContext) { + return noOpContext; + } + + return new ConverterAwareNoOpContext(ctx); + } + + ConverterAwareNoOpContext(AggregationOperationContext ctx) { + this.ctx = ctx; + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return ctx.getMappedObject(document, null); + } + + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } + + /** + * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. + * + * @author Oliver Gierke + * @author Christoph Strobl + */ + private static class NoOpAggregationOperationContext implements AggregationOperationContext { + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return document; + } + + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java index c7c17b8355..327d40b8c7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,137 +15,434 @@ */ package org.springframework.data.mongodb.core.aggregation; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import java.time.Duration; +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** * Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support - * aggregation options can be found in the MongoDB reference documentation - * http://docs.mongodb.org/manual/reference/command/aggregate/#aggregate - * + * aggregation options can be found in the + * MongoDB reference documentation. + *

                    + * As off 4.3 {@link #allowDiskUse} can be {@literal null}, indicating use of server default, and may only be applied if + * {@link #isAllowDiskUseSet() explicitly set}. For compatibility reasons {@link #isAllowDiskUse()} will remain + * returning {@literal false} if the no value has been set. + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + * @author Yadhukrishna S Pai + * @author Soumya Prakash Behera * @see Aggregation#withOptions(AggregationOptions) * @see TypedAggregation#withOptions(AggregationOptions) * @since 1.6 */ -public class AggregationOptions { +public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware { + private static final String BATCH_SIZE = "batchSize"; private static final String CURSOR = "cursor"; private static final String EXPLAIN = "explain"; private static final String ALLOW_DISK_USE = "allowDiskUse"; + private static final String COLLATION = "collation"; + private static final String COMMENT = "comment"; + private static final String MAX_TIME = "maxTimeMS"; + private static final String HINT = "hint"; - private final boolean allowDiskUse; + private final Optional allowDiskUse; private final boolean explain; - private final DBObject cursor; + private final Optional cursor; + private final Optional collation; + private final Optional comment; + private final Optional hint; + + private Optional readConcern; + + private Optional readPreference; + private Duration maxTime = Duration.ZERO; + private ResultOptions resultOptions = ResultOptions.READ; + private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED; /** * Creates a new {@link AggregationOptions}. - * + * * @param allowDiskUse whether to off-load intensive sort-operations to disk. * @param explain whether to get the execution plan for the aggregation instead of the actual results. * @param cursor can be {@literal null}, used to pass additional options to the aggregation. */ - public AggregationOptions(boolean allowDiskUse, boolean explain, DBObject cursor) { + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor) { + this(allowDiskUse, explain, cursor, null); + } + + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @since 2.0 + */ + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation) { + this(allowDiskUse, explain, cursor, collation, null, null); + } - this.allowDiskUse = allowDiskUse; + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @since 2.2 + */ + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment) { + this(allowDiskUse, explain, cursor, collation, comment, null); + } + + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @param hint can be {@literal null}, used to provide an index that would be forcibly used by query optimizer. + * @since 3.1 + */ + private AggregationOptions(@Nullable Boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment, @Nullable Object hint) { + + this.allowDiskUse = Optional.ofNullable(allowDiskUse); this.explain = explain; - this.cursor = cursor; + this.cursor = Optional.ofNullable(cursor); + this.collation = Optional.ofNullable(collation); + this.comment = Optional.ofNullable(comment); + this.hint = Optional.ofNullable(hint); + this.readConcern = Optional.empty(); + this.readPreference = Optional.empty(); } /** - * Enables writing to temporary files. When set to true, aggregation stages can write data to the _tmp subdirectory in - * the dbPath directory. - * - * @return + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursorBatchSize initial cursor batch size. + * @since 2.0 + */ + public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatchSize) { + this(allowDiskUse, explain, createCursor(cursorBatchSize), null); + } + + /** + * Creates new {@link AggregationOptions} given {@link Document} containing aggregation options. + * + * @param document must not be {@literal null}. + * @return the {@link AggregationOptions}. + * @since 2.0 + */ + public static AggregationOptions fromDocument(Document document) { + + Assert.notNull(document, "Document must not be null"); + + Boolean allowDiskUse = document.get(ALLOW_DISK_USE, Boolean.class); + boolean explain = document.getBoolean(EXPLAIN, false); + Document cursor = document.get(CURSOR, Document.class); + Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class)) + : null; + String comment = document.getString(COMMENT); + Document hint = document.get(HINT, Document.class); + + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (document.containsKey(MAX_TIME)) { + options.maxTime = Duration.ofMillis(document.getLong(MAX_TIME)); + } + return options; + } + + /** + * Obtain a new {@link Builder} for constructing {@link AggregationOptions}. + * + * @return never {@literal null}. + * @since 2.0 + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Enables writing to temporary files. When set to {@literal true}, aggregation stages can write data to the + * {@code _tmp} subdirectory in the {@code dbPath} directory. + * + * @return {@literal true} if enabled; {@literal false} otherwise (or if not set). */ public boolean isAllowDiskUse() { - return allowDiskUse; + return allowDiskUse.orElse(false); + } + + /** + * Return whether {@link #isAllowDiskUse} is configured. + * + * @return {@literal true} if is {@code allowDiskUse} is configured, {@literal false} otherwise. + * @since 4.2.5 + */ + public boolean isAllowDiskUseSet() { + return allowDiskUse.isPresent(); } /** * Specifies to return the information on the processing of the pipeline. - * - * @return + * + * @return {@literal true} if enabled. */ public boolean isExplain() { return explain; } + /** + * The initial cursor batch size, if available, otherwise {@literal null}. + * + * @return the batch size or {@literal null}. + * @since 2.0 + */ + @Nullable + public Integer getCursorBatchSize() { + + if (cursor.filter(val -> val.containsKey(BATCH_SIZE)).isPresent()) { + return cursor.get().get(BATCH_SIZE, Integer.class); + } + + return null; + } + /** * Specify a document that contains options that control the creation of the cursor object. - * - * @return + * + * @return never {@literal null}. */ - public DBObject getCursor() { + public Optional getCursor() { return cursor; } + /** + * Get collation settings for string comparison. + * + * @return never {@literal null}. + * @since 2.0 + */ + public Optional getCollation() { + return collation; + } + + /** + * Get the comment for the aggregation. + * + * @return never {@literal null}. + * @since 2.2 + */ + public Optional getComment() { + return comment; + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 3.1 + * @deprecated since 4.1, use {@link #getHintObject()} instead. + */ + public Optional getHint() { + return hint.map(it -> { + if (it instanceof Document doc) { + return doc; + } + if (it instanceof String hintString) { + if (BsonUtils.isJsonDocument(hintString)) { + return BsonUtils.parse(hintString, null); + } + } + throw new IllegalStateException("Unable to read hint of type %s".formatted(it.getClass())); + }); + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 4.1 + */ + public Optional getHintObject() { + return hint; + } + + @Override + public boolean hasReadConcern() { + return readConcern.isPresent(); + } + + @Override + public ReadConcern getReadConcern() { + return readConcern.orElse(null); + } + + @Override + public boolean hasReadPreference() { + return readPreference.isPresent(); + } + + @Override + public ReadPreference getReadPreference() { + return readPreference.orElse(null); + } + + /** + * @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior. + * @since 3.0 + */ + public Duration getMaxTime() { + return maxTime; + } + + /** + * @return {@literal true} to skip results when running an aggregation. Useful in combination with {@code $merge} or + * {@code $out}. + * @since 3.0.2 + */ + public boolean isSkipResults() { + return ResultOptions.SKIP.equals(resultOptions); + } + + /** + * @return the domain type mapping strategy do apply. Never {@literal null}. + * @since 3.2 + */ + public DomainTypeMapping getDomainTypeMapping() { + return domainTypeMapping; + } + /** * Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration * applied. - * + * * @param command the aggregation command. * @return */ - DBObject applyAndReturnPotentiallyChangedCommand(DBObject command) { + Document applyAndReturnPotentiallyChangedCommand(Document command) { - DBObject result = new BasicDBObject(command.toMap()); + Document result = new Document(command); - if (allowDiskUse && !result.containsField(ALLOW_DISK_USE)) { - result.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet() && !result.containsKey(ALLOW_DISK_USE)) { + result.put(ALLOW_DISK_USE, isAllowDiskUse()); } - if (explain && !result.containsField(EXPLAIN)) { + if (explain && !result.containsKey(EXPLAIN)) { result.put(EXPLAIN, explain); } - if (cursor != null && !result.containsField(CURSOR)) { - result.put("cursor", cursor); + if (result.containsKey(HINT)) { + hint.ifPresent(val -> result.append(HINT, val)); + } + + if (!result.containsKey(CURSOR)) { + cursor.ifPresent(val -> result.put(CURSOR, val)); + } + + if (!result.containsKey(COLLATION)) { + collation.map(Collation::toDocument).ifPresent(val -> result.append(COLLATION, val)); + } + + if (hasExecutionTimeLimit() && !result.containsKey(MAX_TIME)) { + result.append(MAX_TIME, maxTime.toMillis()); } return result; } /** - * Returns a {@link DBObject} representation of this {@link AggregationOptions}. - * - * @return + * Returns a {@link Document} representation of this {@link AggregationOptions}. + * + * @return never {@literal null}. */ - public DBObject toDbObject() { + public Document toDocument() { - DBObject dbo = new BasicDBObject(); - dbo.put(ALLOW_DISK_USE, allowDiskUse); - dbo.put(EXPLAIN, explain); - dbo.put(CURSOR, cursor); + Document document = new Document(); + if (isAllowDiskUseSet()) { + document.put(ALLOW_DISK_USE, isAllowDiskUse()); + } + document.put(EXPLAIN, explain); + + cursor.ifPresent(val -> document.put(CURSOR, val)); + collation.ifPresent(val -> document.append(COLLATION, val.toDocument())); + comment.ifPresent(val -> document.append(COMMENT, val)); + hint.ifPresent(val -> document.append(HINT, val)); + + if (hasExecutionTimeLimit()) { + document.append(MAX_TIME, maxTime.toMillis()); + } - return dbo; + return document; } - /* (non-Javadoc) - * @see java.lang.Object#toString() + /** + * @return {@literal true} if {@link #maxTime} is set to a positive value. + * @since 3.0 */ + public boolean hasExecutionTimeLimit() { + return !maxTime.isZero() && !maxTime.isNegative(); + } + @Override public String toString() { - return toDbObject().toString(); + return toDocument().toJson(); + } + + static Document createCursor(int cursorBatchSize) { + return new Document("batchSize", cursorBatchSize); } /** * A Builder for {@link AggregationOptions}. - * + * * @author Thomas Darimont + * @author Mark Paluch */ public static class Builder { - private boolean allowDiskUse; + private Boolean allowDiskUse; private boolean explain; - private DBObject cursor; + private @Nullable Document cursor; + private @Nullable Collation collation; + private @Nullable String comment; + private @Nullable Object hint; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; + private @Nullable Duration maxTime; + private @Nullable ResultOptions resultOptions; + private @Nullable DomainTypeMapping domainTypeMapping; /** * Defines whether to off-load intensive sort-operations to disk. - * - * @param allowDiskUse - * @return + * + * @param allowDiskUse use {@literal true} to allow disk use during the aggregation. + * @return this. */ public Builder allowDiskUse(boolean allowDiskUse) { @@ -155,9 +452,9 @@ public Builder allowDiskUse(boolean allowDiskUse) { /** * Defines whether to get the execution plan for the aggregation instead of the actual results. - * - * @param explain - * @return + * + * @param explain use {@literal true} to enable explain feature. + * @return this. */ public Builder explain(boolean explain) { @@ -167,23 +464,236 @@ public Builder explain(boolean explain) { /** * Additional options to the aggregation. - * - * @param cursor - * @return + * + * @param cursor must not be {@literal null}. + * @return this. */ - public Builder cursor(DBObject cursor) { + public Builder cursor(Document cursor) { this.cursor = cursor; return this; } + /** + * Define the initial cursor batch size. + * + * @param batchSize use a positive int. + * @return this. + * @since 2.0 + */ + public Builder cursorBatchSize(int batchSize) { + + this.cursor = createCursor(batchSize); + return this; + } + + /** + * Define collation settings for string comparison. + * + * @param collation can be {@literal null}. + * @return this. + * @since 2.0 + */ + public Builder collation(@Nullable Collation collation) { + + this.collation = collation; + return this; + } + + /** + * Define a comment to describe the execution. + * + * @param comment can be {@literal null}. + * @return this. + * @since 2.2 + */ + public Builder comment(@Nullable String comment) { + + this.comment = comment; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param hint can be {@literal null}. + * @return this. + * @since 3.1 + */ + public Builder hint(@Nullable Document hint) { + + this.hint = hint; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param indexName can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder hint(@Nullable String indexName) { + + this.hint = indexName; + return this; + } + + /** + * Define a {@link ReadConcern} to apply to the aggregation. + * + * @param readConcern can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readConcern(@Nullable ReadConcern readConcern) { + + this.readConcern = readConcern; + return this; + } + + /** + * Define a {@link ReadPreference} to apply to the aggregation. + * + * @param readPreference can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readPreference(@Nullable ReadPreference readPreference) { + + this.readPreference = readPreference; + return this; + } + + /** + * Set the time limit for processing. + * + * @param maxTime {@link Duration#ZERO} is used for the default unbounded behavior. {@link Duration#isNegative() + * Negative} values will be ignored. + * @return this. + * @since 3.0 + */ + public Builder maxTime(@Nullable Duration maxTime) { + + this.maxTime = maxTime; + return this; + } + + /** + * Run the aggregation, but do NOT read the aggregation result from the store.
                    + * If the expected result of the aggregation is rather large, eg. when using an {@literal $out} operation, this + * option allows to execute the aggregation without having the cursor return the operation result. + * + * @return this. + * @since 3.0.2 + */ + public Builder skipOutput() { + + this.resultOptions = ResultOptions.SKIP; + return this; + } + + /** + * Apply a strict domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations throwing errors for non-existent, but referenced fields. + * + * @return this. + * @since 3.2 + */ + public Builder strictMapping() { + + this.domainTypeMapping = DomainTypeMapping.STRICT; + return this; + } + + /** + * Apply a relaxed domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations using the user provided name if a referenced field does not exist. + * + * @return this. + * @since 3.2 + */ + public Builder relaxedMapping() { + + this.domainTypeMapping = DomainTypeMapping.RELAXED; + return this; + } + + /** + * Apply no domain type mapping at all taking the pipeline as-is. + * + * @return this. + * @since 3.2 + */ + public Builder noMapping() { + + this.domainTypeMapping = DomainTypeMapping.NONE; + return this; + } + /** * Returns a new {@link AggregationOptions} instance with the given configuration. - * - * @return + * + * @return new instance of {@link AggregationOptions}. */ public AggregationOptions build() { - return new AggregationOptions(allowDiskUse, explain, cursor); + + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (maxTime != null) { + options.maxTime = maxTime; + } + if (resultOptions != null) { + options.resultOptions = resultOptions; + } + if (domainTypeMapping != null) { + options.domainTypeMapping = domainTypeMapping; + } + if (readConcern != null) { + options.readConcern = Optional.of(readConcern); + } + if (readPreference != null) { + options.readPreference = Optional.of(readPreference); + } + + return options; } } + + /** + * @since 3.0 + */ + private enum ResultOptions { + + /** + * Just do it!, and do not read the operation result. + */ + SKIP, + /** + * Read the aggregation result from the cursor. + */ + READ + } + + /** + * Aggregation pipeline Domain type mappings supported by the mapping layer. + * + * @since 3.2 + */ + public enum DomainTypeMapping { + + /** + * Mapping throws errors for non-existent, but referenced fields. + */ + STRICT, + + /** + * Fields that do not exist in the model are treated as-is. + */ + RELAXED, + + /** + * Do not attempt to map fields against the model and treat the entire pipeline as-is. + */ + NONE + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java new file mode 100644 index 0000000000..68662ec0df --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java @@ -0,0 +1,167 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * The {@link AggregationPipeline} holds the collection of {@link AggregationOperation aggregation stages}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0.2 + */ +public class AggregationPipeline { + + private final List pipeline; + + public static AggregationPipeline of(AggregationOperation... stages) { + return new AggregationPipeline(Arrays.asList(stages)); + } + + /** + * Create an empty pipeline + */ + public AggregationPipeline() { + this(new ArrayList<>()); + } + + /** + * Create a new pipeline with given {@link AggregationOperation stages}. + * + * @param aggregationOperations must not be {@literal null}. + */ + public AggregationPipeline(List aggregationOperations) { + + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + pipeline = new ArrayList<>(aggregationOperations); + } + + /** + * Append the given {@link AggregationOperation stage} to the pipeline. + * + * @param aggregationOperation must not be {@literal null}. + * @return this. + */ + public AggregationPipeline add(AggregationOperation aggregationOperation) { + + Assert.notNull(aggregationOperation, "AggregationOperation must not be null"); + + pipeline.add(aggregationOperation); + return this; + } + + /** + * Get the list of {@link AggregationOperation aggregation stages}. + * + * @return never {@literal null}. + */ + public List getOperations() { + return Collections.unmodifiableList(pipeline); + } + + List toDocuments(AggregationOperationContext context) { + + verify(); + return AggregationOperationRenderer.toDocument(pipeline, context); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + */ + public boolean isOutOrMerge() { + + if (isEmpty()) { + return false; + } + + AggregationOperation operation = pipeline.get(pipeline.size() - 1); + return isOut(operation) || isMerge(operation); + } + + void verify() { + + // check $out/$merge is the last operation if it exists + for (AggregationOperation operation : pipeline) { + + if (isOut(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline"); + } + + if (isMerge(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline"); + } + } + } + + /** + * Return whether this aggregation pipeline defines a {@code $unionWith} stage that may contribute documents from + * other collections. Checking for presence of union stages is useful when attempting to determine the aggregation + * element type for mapping metadata computation. + * + * @return {@literal true} the aggregation pipeline makes use of {@code $unionWith}. + * @since 3.1 + */ + public boolean containsUnionWith() { + return containsOperation(AggregationPipeline::isUnionWith); + } + + /** + * @return {@literal true} if the pipeline does not contain any stages. + * @since 3.1 + */ + public boolean isEmpty() { + return pipeline.isEmpty(); + } + + private boolean containsOperation(Predicate predicate) { + + if (isEmpty()) { + return false; + } + + for (AggregationOperation element : pipeline) { + if (predicate.test(element)) { + return true; + } + } + + return false; + } + + private boolean isLast(AggregationOperation aggregationOperation) { + return pipeline.indexOf(aggregationOperation) == pipeline.size() - 1; + } + + private static boolean isUnionWith(AggregationOperation operator) { + return operator instanceof UnionWithOperation || operator.getOperator().equals("$unionWith"); + } + + private static boolean isMerge(AggregationOperation operator) { + return operator instanceof MergeOperation || operator.getOperator().equals("$merge"); + } + + private static boolean isOut(AggregationOperation operator) { + return operator instanceof OutOperation || operator.getOperator().equals("$out"); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java index 2fbf96c4cb..438eb9e49f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,35 +19,37 @@ import java.util.Iterator; import java.util.List; +import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; - /** * Collects the results of executing an aggregation operation. - * + * * @author Tobias Trelle * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch * @param The class in which the results are mapped onto. * @since 1.3 */ public class AggregationResults implements Iterable { private final List mappedResults; - private final DBObject rawResults; - private final String serverUsed; + private final Document rawResults; + private final @Nullable String serverUsed; /** * Creates a new {@link AggregationResults} instance from the given mapped and raw results. - * + * * @param mappedResults must not be {@literal null}. * @param rawResults must not be {@literal null}. */ - public AggregationResults(List mappedResults, DBObject rawResults) { + public AggregationResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults); - Assert.notNull(rawResults); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = Collections.unmodifiableList(mappedResults); this.rawResults = rawResults; @@ -56,8 +58,8 @@ public AggregationResults(List mappedResults, DBObject rawResults) { /** * Returns the aggregation results. - * - * @return + * + * @return the list of already mapped results or an empty one if none found. */ public List getMappedResults() { return mappedResults; @@ -65,45 +67,44 @@ public List getMappedResults() { /** * Returns the unique mapped result. Assumes no result or exactly one. - * - * @return + * + * @return the single already mapped result object or raise an error if more than one found. * @throws IllegalArgumentException in case more than one result is available. */ + @Nullable public T getUniqueMappedResult() { - Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!"); + Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one"); return mappedResults.size() == 1 ? mappedResults.get(0) : null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } /** * Returns the server that has been used to perform the aggregation. - * - * @return + * + * @return can be {@literal null}. */ + @Nullable public String getServerUsed() { return serverUsed; } /** * Returns the raw result that was returned by the server. - * - * @return + * + * @return the raw response. * @since 1.6 */ - public DBObject getRawResults() { + public Document getRawResults() { return rawResults; } + @Nullable private String parseServerUsed() { Object object = rawResults.get("serverUsed"); - return object instanceof String ? (String) object : null; + return object instanceof String stringValue ? stringValue : null; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java new file mode 100644 index 0000000000..1626d672bc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -0,0 +1,71 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * An {@link AggregationExpression} that renders a MongoDB Aggregation Framework expression from the AST of a + * SpEL + * expression.
                    + *
                    + * Samples:
                    + *
                    + * 
                    + * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
                    + * expressionOf("qty > 100 && qty < 250);
                    + *
                    + * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
                    + * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
                    + * 
                    + * 
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @see SpelExpressionTransformer + * @since 1.10 + */ +public class AggregationSpELExpression implements AggregationExpression { + + private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer(); + private final String rawExpression; + private final Object[] parameters; + + private AggregationSpELExpression(String rawExpression, Object[] parameters) { + + this.rawExpression = rawExpression; + this.parameters = parameters; + } + + /** + * Creates new {@link AggregationSpELExpression} for the given {@literal expressionString} and {@literal parameters}. + * + * @param expressionString must not be {@literal null}. + * @param parameters can be empty. + * @return new instance of {@link AggregationSpELExpression}. + */ + public static AggregationSpELExpression expressionOf(String expressionString, Object... parameters) { + + Assert.notNull(expressionString, "ExpressionString must not be null"); + return new AggregationSpELExpression(expressionString, parameters); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return (Document) TRANSFORMER.transform(rawExpression, context, parameters); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java new file mode 100644 index 0000000000..15d700309e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -0,0 +1,300 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.StringJoiner; +import java.util.stream.Collectors; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Abstraction for {@code db.collection.update()} using an aggregation pipeline. Aggregation pipeline updates use a more + * expressive update statement expressing conditional updates based on current field values or updating one field using + * the value of another field(s). + * + *
                    + * AggregationUpdate update = AggregationUpdate.update().set("average")
                    + * 		.toValue(ArithmeticOperators.valueOf("tests").avg()).set("grade")
                    + * 		.toValue(ConditionalOperators
                    + * 				.switchCases(CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"),
                    + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"),
                    + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"),
                    + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D"))
                    + * 				.defaultTo("F"));
                    + * 
                    + * + * The above sample is equivalent to the JSON update statement: + * + *
                    + * db.collection.update(
                    + *    { },
                    + *    [
                    + *      { $set: { average : { $avg: "$tests" } } },
                    + *      { $set: { grade: { $switch: {
                    + *                            branches: [
                    + *                                { case: { $gte: [ "$average", 90 ] }, then: "A" },
                    + *                                { case: { $gte: [ "$average", 80 ] }, then: "B" },
                    + *                                { case: { $gte: [ "$average", 70 ] }, then: "C" },
                    + *                                { case: { $gte: [ "$average", 60 ] }, then: "D" }
                    + *                            ],
                    + *                            default: "F"
                    + *      } } } }
                    + *    ],
                    + *    { multi: true }
                    + * )
                    + * 
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @see MongoDB + * Reference Documentation + * @since 3.0 + */ +public class AggregationUpdate extends Aggregation implements UpdateDefinition { + + private boolean isolated = false; + private final Set keysTouched = new HashSet<>(); + + /** + * Create new {@link AggregationUpdate}. + */ + protected AggregationUpdate() { + this(new ArrayList<>()); + } + + /** + * Create new {@link AggregationUpdate} with the given aggregation pipeline to apply. + * + * @param pipeline must not be {@literal null}. + */ + protected AggregationUpdate(List pipeline) { + + super(pipeline); + + for (AggregationOperation operation : pipeline) { + if (operation instanceof FieldsExposingAggregationOperation exposingAggregationOperation) { + exposingAggregationOperation.getFields().forEach(it -> keysTouched.add(it.getName())); + } + } + } + + /** + * Start defining the update pipeline to execute. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate update() { + return new AggregationUpdate(); + } + + /** + * Create a new AggregationUpdate from the given {@link AggregationOperation}s. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate from(List pipeline) { + return new AggregationUpdate(pipeline); + } + + /** + * Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + * @param setOperation must not be {@literal null}. + * @return this. + * @see $set Aggregation Reference + */ + public AggregationUpdate set(SetOperation setOperation) { + + Assert.notNull(setOperation, "SetOperation must not be null"); + + setOperation.getFields().forEach(it -> { + keysTouched.add(it.getName()); + }); + pipeline.add(setOperation); + return this; + } + + /** + * {@code $unset} removes/excludes fields from documents. + * + * @param unsetOperation must not be {@literal null}. + * @return this. + * @see $unset Aggregation + * Reference + */ + public AggregationUpdate unset(UnsetOperation unsetOperation) { + + Assert.notNull(unsetOperation, "UnsetOperation must not be null"); + + pipeline.add(unsetOperation); + keysTouched.addAll(unsetOperation.removedFieldNames()); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the specified document. The operation replaces all existing + * fields in the input document, including the _id field. + * + * @param replaceWithOperation must not be {@literal null}. + * @return this. + * @see $replaceWith Aggregation + * Reference + */ + public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) { + + Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null"); + pipeline.add(replaceWithOperation); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the value. + * + * @param value must not be {@literal null}. + * @return this. + */ + public AggregationUpdate replaceWith(Object value) { + + Assert.notNull(value, "Value must not be null"); + return replaceWith(ReplaceWithOperation.replaceWithValue(value)); + } + + /** + * Fluent API variant for {@code $set} adding a single {@link SetOperation pipeline operation} every time. To update + * multiple fields within one {@link SetOperation} use {@link #set(SetOperation)}. + * + * @param key must not be {@literal null}. + * @return new instance of {@link SetValueAppender}. + * @see #set(SetOperation) + */ + public SetValueAppender set(String key) { + + Assert.notNull(key, "Key must not be null"); + + return new SetValueAppender() { + + @Override + public AggregationUpdate toValue(@Nullable Object value) { + return set(SetOperation.builder().set(key).toValue(value)); + } + + @Override + public AggregationUpdate toValueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return set(SetOperation.builder().set(key).toValueOf(value)); + } + }; + } + + /** + * Short for {@link #unset(UnsetOperation)}. + * + * @param keys the fields to remove. + * @return this. + */ + public AggregationUpdate unset(String... keys) { + + Assert.notNull(keys, "Keys must not be null"); + Assert.noNullElements(keys, "Keys must not contain null elements"); + + return unset(new UnsetOperation(Arrays.stream(keys).map(Fields::field).collect(Collectors.toList()))); + } + + /** + * Prevents a write operation that affects multiple documents from yielding to other reads or writes + * once the first document is written.
                    + * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}. + * + * @return never {@literal null}. + */ + public AggregationUpdate isolated() { + + isolated = true; + return this; + } + + @Override + public Boolean isIsolated() { + return isolated; + } + + @Override + public Document getUpdateObject() { + return new Document("", toPipeline(Aggregation.DEFAULT_CONTEXT)); + } + + @Override + public boolean modifies(String key) { + return keysTouched.contains(key); + } + + @Override + public void inc(String key) { + set(new SetOperation(key, ArithmeticOperators.valueOf(key).add(1))); + } + + @Override + public List getArrayFilters() { + return Collections.emptyList(); + } + + @Override + public String toString() { + + StringJoiner joiner = new StringJoiner(",\n", "[\n", "\n]"); + toPipeline(Aggregation.DEFAULT_CONTEXT).stream().map(SerializationUtils::serializeToJsonSafely) + .forEach(joiner::add); + return joiner.toString(); + } + + /** + * Fluent API AggregationUpdate builder. + * + * @author Christoph Strobl + */ + public interface SetValueAppender { + + /** + * Define the target value as is. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValue(@Nullable Object value); + + /** + * Define the target value as value, an {@link AggregationExpression} or a {@link Field} reference. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValueOf(Object value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java new file mode 100644 index 0000000000..e84f7ed1b0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java @@ -0,0 +1,48 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.data.domain.Range; +import org.springframework.util.Assert; + +/** + * Utility methods for aggregation operation implementations. + * + * @author Oliver Gierke + */ +interface AggregationUtils { + + /** + * Converts the given {@link Range} into an array of values. + * + * @param range must not be {@literal null}. + * @return never {@literal null}. + */ + static List toRangeValues(Range range) { + + Assert.notNull(range, "Range must not be null"); + + List result = new ArrayList(2); + result.add(range.getLowerBound().getValue() + .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded"))); + range.getUpperBound().getValue().ifPresent(result::add); + + return result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java new file mode 100644 index 0000000000..ed79202345 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * A special field that points to a variable {@code $$} expression. + * + * @author Christoph Strobl + * @since 4.1.3 + */ +public interface AggregationVariable extends Field { + + String PREFIX = "$$"; + + /** + * @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget() + * target}. + */ + @Override + default boolean isAliased() { + return !ObjectUtils.nullSafeEquals(getName(), getTarget()); + } + + @Override + default String getName() { + return getTarget(); + } + + @Override + default boolean isInternal() { + return false; + } + + /** + * Create a new {@link AggregationVariable} for the given name. + *

                    + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable variable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + }; + } + + /** + * Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name. + *

                    + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable localVariable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + + @Override + public boolean isInternal() { + return true; + } + }; + } + + /** + * Check if the given field name reference may be variable. + * + * @param fieldRef can be {@literal null}. + * @return true if given value matches the variable identification pattern. + */ + static boolean isVariable(@Nullable String fieldRef) { + return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*"); + } + + /** + * Check if the given field may be variable. + * + * @param field can be {@literal null}. + * @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a + * {@link #isVariable(String) variable}. + */ + static boolean isVariable(Field field) { + + if (field instanceof AggregationVariable) { + return true; + } + return isVariable(field.getTarget()); + } + + private static String prefixVariable(String variable) { + + var trimmed = variable.stripLeading(); + return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java new file mode 100644 index 0000000000..e2c31c6346 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -0,0 +1,3221 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Median; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Julia Lee + * @since 1.10 + */ +public class ArithmeticOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArithmeticOperatorFactory}. + */ + public static ArithmeticOperatorFactory valueOf(String fieldReference) { + return new ArithmeticOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArithmeticOperatorFactory}. + */ + public static ArithmeticOperatorFactory valueOf(AggregationExpression expression) { + return new ArithmeticOperatorFactory(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + + /** + * @author Christoph Strobl + */ + public static class ArithmeticOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link ArithmeticOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ArithmeticOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link ArithmeticOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ArithmeticOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that returns the absolute value of the associated number. + * + * @return new instance of {@link Abs}. + */ + public Abs abs() { + return usesFieldRef() ? Abs.absoluteValueOf(fieldReference) : Abs.absoluteValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that adds the value of {@literal fieldReference} to the associated + * number. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createAdd().add(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that adds the resulting value of the given + * {@link AggregationExpression} to the associated number. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createAdd().add(expression); + } + + /** + * Creates new {@link AggregationExpression} that adds the given {@literal value} to the associated number. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createAdd().add(value); + } + + private Add createAdd() { + return usesFieldRef() ? Add.valueOf(fieldReference) : Add.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the smallest integer greater than or equal to the + * associated number. + * + * @return new instance of {@link Ceil}. + */ + public Ceil ceil() { + return usesFieldRef() ? Ceil.ceilValueOf(fieldReference) : Ceil.ceilValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by number referenced via + * {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createDivide().divideBy(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by number extracted via + * {@literal expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createDivide().divideBy(expression); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by given {@literal value}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createDivide().divideBy(value); + } + + private Divide createDivide() { + return usesFieldRef() ? Divide.valueOf(fieldReference) : Divide.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that raises Euler’s number (i.e. e ) on the associated number. + * + * @return new instance of {@link Exp}. + */ + public Exp exp() { + return usesFieldRef() ? Exp.expValueOf(fieldReference) : Exp.expValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the largest integer less than or equal to the associated + * number. + * + * @return new instance of {@link Floor}. + */ + public Floor floor() { + return usesFieldRef() ? Floor.floorValueOf(fieldReference) : Floor.floorValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty"); + + return integral().unit(unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the associated + * number. + * + * @return new instance of {@link Ln}. + */ + public Ln ln() { + return usesFieldRef() ? Ln.lnValueOf(fieldReference) : Ln.lnValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the log of the associated number in the specified base + * referenced via {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createLog().log(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the log of the associated number in the specified base + * extracted by given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createLog().log(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the log of a the associated number in the specified + * {@literal base}. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(Number base) { + + Assert.notNull(base, "Base must not be null"); + return createLog().log(base); + } + + private Log createLog() { + return usesFieldRef() ? Log.valueOf(fieldReference) : Log.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the log base 10 for the associated number. + * + * @return new instance of {@link Log10}. + */ + public Log10 log10() { + return usesFieldRef() ? Log10.log10ValueOf(fieldReference) : Log10.log10ValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by another and returns the + * remainder. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createMod().mod(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by another and returns the + * remainder. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createMod().mod(expression); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by another and returns the + * remainder. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(Number value) { + + Assert.notNull(value, "Base must not be null"); + return createMod().mod(value); + } + + private Mod createMod() { + return usesFieldRef() ? Mod.valueOf(fieldReference) : Mod.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that multiplies the associated number with another. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createMultiply().multiplyBy(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that multiplies the associated number with another. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createMultiply().multiplyBy(expression); + } + + /** + * Creates new {@link AggregationExpression} that multiplies the associated number with another. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createMultiply().multiplyBy(value); + } + + private Multiply createMultiply() { + return usesFieldRef() ? Multiply.valueOf(fieldReference) : Multiply.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createPow().pow(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createPow().pow(expression); + } + + /** + * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createPow().pow(value); + } + + private Pow createPow() { + return usesFieldRef() ? Pow.valueOf(fieldReference) : Pow.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the square root of the associated number. + * + * @return new instance of {@link Sqrt}. + */ + public Sqrt sqrt() { + return usesFieldRef() ? Sqrt.sqrtOf(fieldReference) : Sqrt.sqrtOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public Subtract subtract(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createSubtract().subtract(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public Subtract subtract(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createSubtract().subtract(expression); + } + + /** + * Creates new {@link AggregationExpression} that subtracts value from the associated number. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public Subtract subtract(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createSubtract().subtract(value); + } + + private Subtract createSubtract() { + return usesFieldRef() ? Subtract.valueOf(fieldReference) : Subtract.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that truncates a number to its integer. + * + * @return new instance of {@link Trunc}. + */ + public Trunc trunc() { + return usesFieldRef() ? Trunc.truncValueOf(fieldReference) : Trunc.truncValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates and returns the sum of numeric values. + * + * @return new instance of {@link Sum}. + */ + public Sum sum() { + return usesFieldRef() ? AccumulatorOperators.Sum.sumOf(fieldReference) + : AccumulatorOperators.Sum.sumOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the average value of the numeric values. + * + * @return new instance of {@link Avg}. + */ + public Avg avg() { + return usesFieldRef() ? AccumulatorOperators.Avg.avgOf(fieldReference) + : AccumulatorOperators.Avg.avgOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the maximum value. + * + * @return new instance of {@link Max}. + */ + public Max max() { + return usesFieldRef() ? AccumulatorOperators.Max.maxOf(fieldReference) + : AccumulatorOperators.Max.maxOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the minimum value. + * + * @return new instance of {@link Min}. + */ + public Min min() { + return usesFieldRef() ? AccumulatorOperators.Min.minOf(fieldReference) + : AccumulatorOperators.Min.minOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the population standard deviation of the input values. + * + * @return new instance of {@link StdDevPop}. + */ + public StdDevPop stdDevPop() { + return usesFieldRef() ? AccumulatorOperators.StdDevPop.stdDevPopOf(fieldReference) + : AccumulatorOperators.StdDevPop.stdDevPopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sample standard deviation of the input values. + * + * @return new instance of {@link StdDevSamp}. + */ + public StdDevSamp stdDevSamp() { + return usesFieldRef() ? AccumulatorOperators.StdDevSamp.stdDevSampOf(fieldReference) + : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal + * place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + public Round round() { + return usesFieldRef() ? Round.roundValueOf(fieldReference) : Round.roundValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a specified decimal place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + public Round roundToPlace(int place) { + return round().place(place); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin(AngularUnit unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh(AngularUnit unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + * @since 3.3 + */ + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + * @since 3.3 + */ + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos(AngularUnit unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh(AngularUnit unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse cosine of a numeric value. + * + * @return new instance of {@link ACos}. + * @since 3.4 + */ + public ACos acos() { + return usesFieldRef() ? ACos.acosOf(fieldReference) : ACos.acosOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a numeric value. + * + * @return new instance of {@link ACosh}. + * @since 3.4 + */ + public ACosh acosh() { + return usesFieldRef() ? ACosh.acoshOf(fieldReference) : ACosh.acoshOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + * @since 3.3 + */ + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. + * + * @param value the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. + * + * @param fieldReference the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. + * + * @param expression the expression evaluating to a numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createATan2().atan2of(expression); + } + + private ATan2 createATan2() { + + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + * @since 3.3 + */ + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan(AngularUnit unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Tanh}. + * @since 3.3 + */ + public Tanh tanh(AngularUnit unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? AccumulatorOperators.Percentile.percentileOf(fieldReference) + : AccumulatorOperators.Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? AccumulatorOperators.Median.medianOf(fieldReference) + : AccumulatorOperators.Median.medianOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $abs}. + * + * @author Christoph Strobl + */ + public static class Abs extends AbstractAggregationExpression { + + private Abs(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$abs"; + } + + /** + * Creates new {@link Abs}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Abs}. + */ + public static Abs absoluteValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Abs(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Abs}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Abs}. + */ + public static Abs absoluteValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Abs(expression); + } + + /** + * Creates new {@link Abs}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Abs}. + */ + public static Abs absoluteValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Abs(value); + } + } + + /** + * {@link AggregationExpression} for {@code $add}. + * + * @author Christoph Strobl + */ + public static class Add extends AbstractAggregationExpression { + + protected Add(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$add"; + } + + /** + * Creates new {@link Add}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public static Add valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Add(asFields(fieldReference)); + } + + /** + * Creates new {@link Add}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public static Add valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Add(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Add}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public static Add valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Add(Collections.singletonList(value)); + } + + /** + * Add the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Add(append(Fields.field(fieldReference))); + } + + /** + * Add the evaluation result of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Add(append(expression)); + } + + /** + * Add the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Add}. + */ + public Add add(Number value) { + return new Add(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $ceil}. + * + * @author Christoph Strobl + */ + public static class Ceil extends AbstractAggregationExpression { + + private Ceil(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$ceil"; + } + + /** + * Creates new {@link Ceil}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Ceil}. + */ + public static Ceil ceilValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Ceil(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Ceil}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Ceil}. + */ + public static Ceil ceilValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Ceil(expression); + } + + /** + * Creates new {@link Ceil}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Ceil}. + */ + public static Ceil ceilValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Ceil(value); + } + } + + /** + * {@link AggregationExpression} for {@code $divide}. + * + * @author Christoph Strobl + */ + public static class Divide extends AbstractAggregationExpression { + + private Divide(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$divide"; + } + + /** + * Creates new {@link Divide}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public static Divide valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Divide(asFields(fieldReference)); + } + + /** + * Creates new {@link Divide}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public static Divide valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Divide(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Divide}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public static Divide valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Divide(Collections.singletonList(value)); + } + + /** + * Divide by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Divide(append(Fields.field(fieldReference))); + } + + /** + * Divide by the evaluation results of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Divide(append(expression)); + } + + /** + * Divide by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + public Divide divideBy(Number value) { + return new Divide(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $exp}. + * + * @author Christoph Strobl + */ + public static class Exp extends AbstractAggregationExpression { + + private Exp(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$exp"; + } + + /** + * Creates new {@link Exp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Exp}. + */ + public static Exp expValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Exp(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Exp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Exp}. + */ + public static Exp expValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Exp(expression); + } + + /** + * Creates new {@link Exp}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Exp}. + */ + public static Exp expValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Exp(value); + } + } + + /** + * {@link AggregationExpression} for {@code $floor}. + * + * @author Christoph Strobl + */ + public static class Floor extends AbstractAggregationExpression { + + private Floor(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$floor"; + } + + /** + * Creates new {@link Floor}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Floor}. + */ + public static Floor floorValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Floor(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Floor}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Floor}. + */ + public static Floor floorValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Floor(expression); + } + + /** + * Creates new {@link Floor}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Floor}. + */ + public static Floor floorValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Floor(value); + } + } + + /** + * {@link AggregationExpression} for {@code $ln}. + * + * @author Christoph Strobl + */ + public static class Ln extends AbstractAggregationExpression { + + private Ln(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$ln"; + } + + /** + * Creates new {@link Ln}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Ln}. + */ + public static Ln lnValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Ln(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Ln}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Ln}. + */ + public static Ln lnValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Ln(expression); + } + + /** + * Creates new {@link Ln}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Ln}. + */ + public static Ln lnValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Ln(value); + } + } + + /** + * {@link AggregationExpression} for {@code $log}. + * + * @author Christoph Strobl + */ + public static class Log extends AbstractAggregationExpression { + + private Log(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$log"; + } + + /** + * Creates new {@link Min}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public static Log valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Log(asFields(fieldReference)); + } + + /** + * Creates new {@link Log}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public static Log valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Log(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Log}. + * + * @param value must not be {@literal null}. + * @return + */ + public static Log valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Log(Collections.singletonList(value)); + } + + /** + * Use the value stored at the given field as log base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Log(append(Fields.field(fieldReference))); + } + + /** + * Use the evaluated value of the given {@link AggregationExpression} as log base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Log(append(expression)); + } + + /** + * Use the given value as log base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Log}. + */ + public Log log(Number base) { + return new Log(append(base)); + } + } + + /** + * {@link AggregationExpression} for {@code $log10}. + * + * @author Christoph Strobl + */ + public static class Log10 extends AbstractAggregationExpression { + + private Log10(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$log10"; + } + + /** + * Creates new {@link Log10}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log10}. + */ + public static Log10 log10ValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Log10(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Log10}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log10}. + */ + public static Log10 log10ValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Log10(expression); + } + + /** + * Creates new {@link Log10}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Log10}. + */ + public static Log10 log10ValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Log10(value); + } + } + + /** + * {@link AggregationExpression} for {@code $mod}. + * + * @author Christoph Strobl + */ + public static class Mod extends AbstractAggregationExpression { + + private Mod(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$mod"; + } + + /** + * Creates new {@link Mod}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public static Mod valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Mod(asFields(fieldReference)); + } + + /** + * Creates new {@link Mod}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public static Mod valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Mod(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Mod}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public static Mod valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Mod(Collections.singletonList(value)); + } + + /** + * Use the value stored at the given field as mod base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Mod(append(Fields.field(fieldReference))); + } + + /** + * Use evaluated value of the given {@link AggregationExpression} as mod base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Mod(append(expression)); + } + + /** + * Use the given value as mod base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + public Mod mod(Number base) { + return new Mod(append(base)); + } + } + + /** + * {@link AggregationExpression} for {@code $multiply}. + * + * @author Christoph Strobl + */ + public static class Multiply extends AbstractAggregationExpression { + + private Multiply(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$multiply"; + } + + /** + * Creates new {@link Multiply}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public static Multiply valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Multiply(asFields(fieldReference)); + } + + /** + * Creates new {@link Multiply}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public static Multiply valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Multiply(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Multiply}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public static Multiply valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Multiply(Collections.singletonList(value)); + } + + /** + * Multiply by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Multiply(append(Fields.field(fieldReference))); + } + + /** + * Multiply by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Multiply(append(expression)); + } + + /** + * Multiply by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + public Multiply multiplyBy(Number value) { + return new Multiply(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $pow}. + * + * @author Christoph Strobl + */ + public static class Pow extends AbstractAggregationExpression { + + private Pow(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$pow"; + } + + /** + * Creates new {@link Pow}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static Pow valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Pow(asFields(fieldReference)); + } + + /** + * Creates new {@link Pow}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static Pow valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Pow(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Pow}. + * + * @param value must not be {@literal null}. + * @return + */ + public static Pow valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Pow(Collections.singletonList(value)); + } + + /** + * Pow by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Pow(append(Fields.field(fieldReference))); + } + + /** + * Pow by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Pow(append(expression)); + } + + /** + * Pow by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Pow pow(Number value) { + return new Pow(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $sqrt}. + * + * @author Christoph Strobl + */ + public static class Sqrt extends AbstractAggregationExpression { + + private Sqrt(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$sqrt"; + } + + /** + * Creates new {@link Sqrt}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Sqrt}. + */ + public static Sqrt sqrtOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Sqrt(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Sqrt}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Sqrt}. + */ + public static Sqrt sqrtOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Sqrt(expression); + } + + /** + * Creates new {@link Sqrt}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Sqrt}. + */ + public static Sqrt sqrtOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Sqrt(value); + } + } + + /** + * {@link AggregationExpression} for {@code $subtract}. + * + * @author Christoph Strobl + */ + public static class Subtract extends AbstractAggregationExpression { + + private Subtract(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$subtract"; + } + + /** + * Creates new {@link Subtract}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public static Subtract valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Subtract(asFields(fieldReference)); + } + + /** + * Creates new {@link Subtract}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public static Subtract valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Subtract(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Subtract}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Subtract}. + */ + public static Subtract valueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Subtract(Collections.singletonList(value)); + } + + /** + * Subtract the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Subtract subtract(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Subtract(append(Fields.field(fieldReference))); + } + + /** + * Subtract the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Subtract subtract(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Subtract(append(expression)); + } + + /** + * Subtract the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + public Subtract subtract(Number value) { + return new Subtract(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $trunc}. + * + * @author Christoph Strobl + */ + public static class Trunc extends AbstractAggregationExpression { + + private Trunc(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$trunc"; + } + + /** + * Creates new {@link Trunc}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Trunc}. + */ + public static Trunc truncValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Trunc(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Trunc}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trunc}. + */ + public static Trunc truncValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Trunc(expression); + } + + /** + * Creates new {@link Trunc}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Trunc}. + */ + public static Trunc truncValueOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Trunc(value); + } + } + + /** + * {@link Round} rounds a number to a whole integer or to a specified decimal place. + *

                      + *
                    • If {@link Round#place(int)} resolves to a positive integer, {@code $round} rounds to the given decimal + * places.
                    • + *
                    • If {@link Round#place(int)} resolves to a negative integer, {@code $round} rounds to the left of the + * decimal.
                    • + *
                    • If {@link Round#place(int)} resolves to a zero, {@code $round} rounds using the first digit to the right of the + * decimal.
                    • + *
                    + * + * @since 3.0 + */ + public static class Round extends AbstractAggregationExpression { + + private Round(Object value) { + super(value); + } + + /** + * Round the value of the field that resolves to an integer, double, decimal, or long. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Round(Collections.singletonList(Fields.field(fieldReference))); + } + + /** + * Round the outcome of the given expression hat resolves to an integer, double, decimal, or long. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(Collections.singletonList(expression)); + } + + /** + * Round the given numeric (integer, double, decimal, or long) value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round round(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Round(Collections.singletonList(value)); + } + + /** + * The place to round to. Can be between -20 and 100, exclusive. + * + * @param place value between -20 and 100, exclusive. + * @return new instance of {@link Round}. + */ + public Round place(int place) { + return new Round(append(place)); + } + + /** + * The place to round to defined by an expression that resolves to an integer between -20 and 100, exclusive. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public Round placeOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(append(expression)); + } + + /** + * The place to round to defined by via a field reference that resolves to an integer between -20 and 100, + * exclusive. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public Round placeOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + return new Round(append(Fields.field(fieldReference))); + } + + @Override + protected String getMongoMethod() { + return "$round"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularUnit { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
                    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $sinh : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularUnit unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + *
                    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $sinh : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
                    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + *
                    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + *
                    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
                    + * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $cos : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularUnit unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + *
                    + * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $cosh : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularUnit unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
                    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACos extends AbstractAggregationExpression { + + private ACos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ACos(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + *
                    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(AggregationExpression expression) { + return new ACos(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(Number value) { + return new ACos(value); + } + + @Override + protected String getMongoMethod() { + return "$acos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACosh extends AbstractAggregationExpression { + + private ACosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(String fieldReference) { + return new ACosh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + *
                    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(AggregationExpression expression) { + return new ACosh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(Object value) { + return new ACosh(value); + } + + @Override + protected String getMongoMethod() { + return "$acosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
                    + * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $tan : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularUnit unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2((Collections.singletonList(expression))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + *
                    + * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *
                    +		 * { $tanh : { $degreesToRadians : "$angle" } }
                    +		 * 
                    + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
                    + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. + *
                    + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java new file mode 100644 index 0000000000..a8cb58d17c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java @@ -0,0 +1,2109 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal array} aggregation operations. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Shashank Sharma + * @author Divya Srivastava + * @since 1.0 + */ +public class ArrayOperators { + + /** + * Take the array referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayOperatorFactory}. + */ + public static ArrayOperatorFactory arrayOf(String fieldReference) { + return new ArrayOperatorFactory(fieldReference); + } + + /** + * Take the array referenced resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayOperatorFactory}. + */ + public static ArrayOperatorFactory arrayOf(AggregationExpression expression) { + return new ArrayOperatorFactory(expression); + } + + /** + * Take the given {@link Collection values} {@link AggregationExpression}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ArrayOperatorFactory}. + * @since 2.2 + */ + public static ArrayOperatorFactory arrayOf(Collection values) { + return new ArrayOperatorFactory(values); + } + + /** + * @author Christoph Strobl + */ + public static class ArrayOperatorFactory { + + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + private final @Nullable Collection values; + + /** + * Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ArrayOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + this.values = null; + } + + /** + * Creates new {@link ArrayOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ArrayOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + this.values = null; + } + + /** + * Creates new {@link ArrayOperatorFactory} for given values. + * + * @param values must not be {@literal null}. + * @since 2.2 + */ + public ArrayOperatorFactory(Collection values) { + + Assert.notNull(values, "Values must not be null"); + this.fieldReference = null; + this.expression = null; + this.values = values; + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and returns the element at the + * specified array {@literal position}. + * + * @param position the element index. + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(int position) { + return createArrayElemAt().elementAt(position); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and returns the element at the position + * resulting form the given {@literal expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createArrayElemAt().elementAt(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and returns the element at the position + * defined by the referenced {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createArrayElemAt().elementAt(fieldReference); + } + + private ArrayElemAt createArrayElemAt() { + + if (usesFieldRef()) { + return ArrayElemAt.arrayOf(fieldReference); + } + + return usesExpression() ? ArrayElemAt.arrayOf(expression) : ArrayElemAt.arrayOf(values); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and concats the given + * {@literal arrayFieldReference} to it. + * + * @param arrayFieldReference must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public ConcatArrays concat(String arrayFieldReference) { + + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); + return createConcatArrays().concat(arrayFieldReference); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and concats the array resulting form + * the given {@literal expression} to it. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public ConcatArrays concat(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createConcatArrays().concat(expression); + } + + private ConcatArrays createConcatArrays() { + + if (usesFieldRef()) { + return ConcatArrays.arrayOf(fieldReference); + } + + return usesExpression() ? ConcatArrays.arrayOf(expression) : ConcatArrays.arrayOf(values); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and selects a subset of the array to + * return based on the specified condition. + * + * @return new instance of {@link AsBuilder} to create a {@link Filter}. + */ + public AsBuilder filter() { + + if (usesFieldRef()) { + return Filter.filter(fieldReference); + } + + if (usesExpression()) { + return Filter.filter(expression); + } + + Assert.state(values != null, "Values must not be null"); + return Filter.filter(new ArrayList<>(values)); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and an check if its an array. + * + * @return new instance of {@link IsArray}. + */ + public IsArray isArray() { + + Assert.state(values == null, "Does it make sense to call isArray on an array; Maybe just skip it"); + + return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and retrieves its length. + * + * @return new instance of {@link Size}. + */ + public Size length() { + + if (usesFieldRef()) { + return Size.lengthOfArray(fieldReference); + } + + return usesExpression() ? Size.lengthOfArray(expression) : Size.lengthOfArray(values); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and selects a subset from it. + * + * @return new instance of {@link Slice}. + */ + public Slice slice() { + + if (usesFieldRef()) { + return Slice.sliceArrayOf(fieldReference); + } + + return usesExpression() ? Slice.sliceArrayOf(expression) : Slice.sliceArrayOf(values); + } + + /** + * Creates new {@link AggregationExpression} that searches the associated array for an occurrence of a specified + * value and returns the array index (zero-based) of the first occurrence. + * + * @param value must not be {@literal null}. + * @return new instance of {@link IndexOfArray}. + */ + public IndexOfArray indexOf(Object value) { + + if (usesFieldRef()) { + return IndexOfArray.arrayOf(fieldReference).indexOf(value); + } + + return usesExpression() ? IndexOfArray.arrayOf(expression).indexOf(value) + : IndexOfArray.arrayOf(values).indexOf(value); + } + + /** + * Creates new {@link AggregationExpression} that returns an array with the elements in reverse order. + * + * @return new instance of {@link ReverseArray}. + */ + public ReverseArray reverse() { + + if (usesFieldRef()) { + return ReverseArray.reverseArrayOf(fieldReference); + } + + return usesExpression() ? ReverseArray.reverseArrayOf(expression) + : ReverseArray.reverseArrayOf(Collections.singletonList(values)); + } + + /** + * Start creating new {@link AggregationExpression} that applies an {@link AggregationExpression} to each element in + * an array and combines them into a single value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. + */ + public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpression expression) { + + return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference) + : Reduce.arrayOf(ArrayOperatorFactory.this.expression)).withInitialValue(initialValue).reduce(expression); + } + + /** + * Start creating new {@link AggregationExpression} that applies an {@link AggregationExpression} to each element in + * an array and combines them into a single value. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. + */ + public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression... expressions) { + + return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference) : Reduce.arrayOf(expression)) + .withInitialValue(initialValue).reduce(expressions); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort + * order}. + * + * @return new instance of {@link SortArray}. + * @since 4.0 + */ + public SortArray sort(Sort sort) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).by(sort); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).by(sort); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given + * {@link Direction order}. + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray sort(Direction direction) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).direction(direction); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).direction(direction); + } + + /** + * Creates new {@link AggregationExpression} that transposes an array of input arrays so that the first element of + * the output array would be an array containing, the first element of the first input array, the first element of + * the second input array, etc. + * + * @param arrays must not be {@literal null}. + * @return new instance of {@link Zip}. + */ + public Zip zipWith(Object... arrays) { + + if (usesFieldRef()) { + return Zip.arrayOf(fieldReference).zip(arrays); + } + + return (usesExpression() ? Zip.arrayOf(expression) : Zip.arrayOf(values)).zip(arrays); + } + + /** + * Creates new {@link AggregationExpression} that returns a boolean indicating whether a specified value is in the + * associated array. + * + * @param value must not be {@literal null}. + * @return new instance of {@link In}. + */ + public In containsValue(Object value) { + + if (usesFieldRef()) { + return In.arrayOf(fieldReference).containsValue(value); + } + + return (usesExpression() ? In.arrayOf(expression) : In.arrayOf(values)).containsValue(value); + } + + /** + * Creates new {@link AggregationExpression} that converts the associated expression into an object. + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link ArrayToObject}. + * @since 2.1 + */ + public ArrayToObject toObject() { + + if (usesFieldRef()) { + return ArrayToObject.arrayValueOfToObject(fieldReference); + } + + return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values); + } + + /** + * Creates new {@link AggregationExpression} that return the first element in the associated array. + * NOTE: Requires MongoDB 4.4 or later. + * + * @return new instance of {@link First}. + * @since 3.4 + */ + public First first() { + + if (usesFieldRef()) { + return First.firstOf(fieldReference); + } + + return usesExpression() ? First.firstOf(expression) : First.first(values); + } + + /** + * Creates new {@link AggregationExpression} that return the last element in the given array. NOTE: + * Requires MongoDB 4.4 or later. + * + * @return new instance of {@link Last}. + * @since 3.4 + */ + public Last last() { + + if (usesFieldRef()) { + return Last.lastOf(fieldReference); + } + + return usesExpression() ? Last.lastOf(expression) : Last.last(values); + } + + /** + * @author Christoph Strobl + */ + public interface ReduceInitialValueBuilder { + + /** + * Define the initial cumulative value set before in is applied to the first element of the input array. + * + * @param initialValue must not be {@literal null}. + * @return + */ + Reduce startingWith(Object initialValue); + } + + /** + * @return {@literal true} if {@link #fieldReference} is not {@literal null}. + */ + private boolean usesFieldRef() { + return fieldReference != null; + } + + /** + * @return {@literal true} if {@link #expression} is not {@literal null}. + * @since 2.2 + */ + private boolean usesExpression() { + return expression != null; + } + } + + /** + * {@link AggregationExpression} for {@code $arrayElementAt}. + * + * @author Christoph Strobl + */ + public static class ArrayElemAt extends AbstractAggregationExpression { + + private ArrayElemAt(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$arrayElemAt"; + } + + /** + * Creates new {@link ArrayElemAt}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + public static ArrayElemAt arrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ArrayElemAt(asFields(fieldReference)); + } + + /** + * Creates new {@link ArrayElemAt}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + public static ArrayElemAt arrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ArrayElemAt(Collections.singletonList(expression)); + } + + /** + * Creates new {@link ArrayElemAt}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + * @since 2.2 + */ + public static ArrayElemAt arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ArrayElemAt(Collections.singletonList(values)); + } + + /** + * Use the element with given index number. + * + * @param index the index number + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(int index) { + return new ArrayElemAt(append(index)); + } + + /** + * Use the element at the index number evaluated from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ArrayElemAt(append(expression)); + } + + /** + * Use the element at the index number taken from the given field. + * + * @param arrayFieldReference the field name. + * @return new instance of {@link ArrayElemAt}. + */ + public ArrayElemAt elementAt(String arrayFieldReference) { + + Assert.notNull(arrayFieldReference, "ArrayReference must not be null"); + return new ArrayElemAt(append(Fields.field(arrayFieldReference))); + } + } + + /** + * {@link AggregationExpression} for {@code $concatArrays}. + * + * @author Christoph Strobl + */ + public static class ConcatArrays extends AbstractAggregationExpression { + + private ConcatArrays(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$concatArrays"; + } + + /** + * Creates new {@link ConcatArrays}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public static ConcatArrays arrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ConcatArrays(asFields(fieldReference)); + } + + /** + * Creates new {@link ConcatArrays}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public static ConcatArrays arrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ConcatArrays(Collections.singletonList(expression)); + } + + /** + * Creates new {@link ConcatArrays}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + * @since 2.2 + */ + public static ConcatArrays arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ConcatArrays(Collections.singletonList(values)); + } + + /** + * Concat with the array stored at the given field. + * + * @param arrayFieldReference must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public ConcatArrays concat(String arrayFieldReference) { + + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); + return new ConcatArrays(append(Fields.field(arrayFieldReference))); + } + + /** + * Concat with the array resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + public ConcatArrays concat(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ConcatArrays(append(expression)); + } + } + + /** + * {@code $filter} {@link AggregationExpression} allows to select a subset of the array to return based on the + * specified condition. + * + * @author Christoph Strobl + * @since 1.10 + */ + public static class Filter implements AggregationExpression { + + private @Nullable Object input; + private @Nullable ExposedField as; + private @Nullable Object condition; + + private Filter() { + // used by builder + } + + /** + * Set the {@literal field} to apply the {@code $filter} to. + * + * @param field must not be {@literal null}. + * @return never {@literal null}. + */ + public static AsBuilder filter(String field) { + + Assert.notNull(field, "Field must not be null"); + return filter(Fields.field(field)); + } + + /** + * Set the {@literal field} to apply the {@code $filter} to. + * + * @param field must not be {@literal null}. + * @return never {@literal null}. + */ + public static AsBuilder filter(Field field) { + + Assert.notNull(field, "Field must not be null"); + return new FilterExpressionBuilder().filter(field); + } + + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + public static AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Field must not be null"); + return new FilterExpressionBuilder().filter(expression); + } + + /** + * Set the {@literal values} to apply the {@code $filter} to. + * + * @param values must not be {@literal null}. + * @return new instance of {@link AsBuilder} to create the {@link Filter}. + */ + public static AsBuilder filter(List values) { + + Assert.notNull(values, "Values must not be null"); + return new FilterExpressionBuilder().filter(values); + } + + @Override + public Document toDocument(final AggregationOperationContext context) { + return toFilter(ExposedFields.from(as), context); + } + + private Document toFilter(ExposedFields exposedFields, AggregationOperationContext context) { + + Document filterExpression = new Document(); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + + filterExpression.putAll(context.getMappedObject(new Document("input", getMappedInput(context)))); + filterExpression.put("as", as.getTarget()); + + filterExpression.putAll(context.getMappedObject(new Document("cond", getMappedCondition(operationContext)))); + + return new Document("$filter", filterExpression); + } + + private Object getMappedInput(AggregationOperationContext context) { + + if (input instanceof Field field) { + return context.getReference(field).toString(); + } + + if (input instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + + return input; + } + + private Object getMappedCondition(AggregationOperationContext context) { + + if (!(condition instanceof AggregationExpression aggregationExpression)) { + return condition; + } + + NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext( + context, Collections.singleton(as)); + return aggregationExpression.toDocument(nea); + } + + /** + * @author Christoph Strobl + */ + public interface InputBuilder { + + /** + * Set the {@literal values} to apply the {@code $filter} to. + * + * @param array must not be {@literal null}. + * @return + */ + AsBuilder filter(List array); + + /** + * Set the {@literal field} holding an array to apply the {@code $filter} to. + * + * @param field must not be {@literal null}. + * @return + */ + AsBuilder filter(Field field); + + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return + * @since 4.1.1 + */ + AsBuilder filter(AggregationExpression expression); + } + + /** + * @author Christoph Strobl + */ + public interface AsBuilder { + + /** + * Set the {@literal variableName} for the elements in the input array. + * + * @param variableName must not be {@literal null}. + * @return never {@literal null}. + */ + ConditionBuilder as(String variableName); + } + + /** + * @author Christoph Strobl + */ + public interface ConditionBuilder { + + /** + * Set the {@link AggregationExpression} that determines whether to include the element in the resulting array. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + */ + Filter by(AggregationExpression expression); + + /** + * Set the {@literal expression} that determines whether to include the element in the resulting array. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + */ + Filter by(String expression); + + /** + * Set the {@literal expression} that determines whether to include the element in the resulting array. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + */ + Filter by(Document expression); + } + + /** + * @author Christoph Strobl + */ + static final class FilterExpressionBuilder implements InputBuilder, AsBuilder, ConditionBuilder { + + private final Filter filter; + + FilterExpressionBuilder() { + this.filter = new Filter(); + } + + /** + * Creates new {@link InputBuilder}. + * + * @return new instance of {@link FilterExpressionBuilder}. + */ + public static InputBuilder newBuilder() { + return new FilterExpressionBuilder(); + } + + @Override + public AsBuilder filter(List array) { + + Assert.notNull(array, "Array must not be null"); + filter.input = new ArrayList<>(array); + return this; + } + + @Override + public AsBuilder filter(Field field) { + + Assert.notNull(field, "Field must not be null"); + filter.input = field; + return this; + } + + @Override + public AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.input = expression; + return this; + } + + @Override + public ConditionBuilder as(String variableName) { + + Assert.notNull(variableName, "Variable name must not be null"); + filter.as = new ExposedField(variableName, true); + return this; + } + + @Override + public Filter by(AggregationExpression condition) { + + Assert.notNull(condition, "Condition must not be null"); + filter.condition = condition; + return filter; + } + + @Override + public Filter by(String expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.condition = expression; + return filter; + } + + @Override + public Filter by(Document expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.condition = expression; + return filter; + } + } + } + + /** + * {@link AggregationExpression} for {@code $isArray}. + * + * @author Christoph Strobl + */ + public static class IsArray extends AbstractAggregationExpression { + + private IsArray(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$isArray"; + } + + /** + * Creates new {@link IsArray}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsArray}. + */ + public static IsArray isArray(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IsArray(Fields.field(fieldReference)); + } + + /** + * Creates new {@link IsArray}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsArray}. + */ + public static IsArray isArray(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new IsArray(expression); + } + } + + /** + * {@link AggregationExpression} for {@code $size}. + * + * @author Christoph Strobl + */ + public static class Size extends AbstractAggregationExpression { + + private Size(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$size"; + } + + /** + * Creates new {@link Size}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Size}. + */ + public static Size lengthOfArray(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Size(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Size}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Size}. + */ + public static Size lengthOfArray(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Size(expression); + } + + /** + * Creates new {@link Size}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Size}. + * @since 2.2 + */ + public static Size lengthOfArray(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Size(Collections.singletonList(values)); + } + } + + /** + * {@link AggregationExpression} for {@code $slice}. + * + * @author Christoph Strobl + */ + public static class Slice extends AbstractAggregationExpression { + + private Slice(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$slice"; + } + + /** + * Creates new {@link Slice}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Slice}. + */ + public static Slice sliceArrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Slice(asFields(fieldReference)); + } + + /** + * Creates new {@link Slice}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Slice}. + */ + public static Slice sliceArrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Slice(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Slice}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Slice}. + * @since 2.2 + */ + public static Slice sliceArrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Slice(Collections.singletonList(values)); + } + + /** + * Slice the number of elements. + * + * @param count number of elements to slice. + * @return new instance of {@link Slice}. + */ + public Slice itemCount(int count) { + return new Slice(append(count)); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(count)); + } + + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(int position) { + return new SliceElementsBuilder(position); + } + + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(AggregationExpression position) { + return new SliceElementsBuilder(position); + } + + /** + * @author Christoph Strobl + */ + public class SliceElementsBuilder { + + private final Object position; + + SliceElementsBuilder(Object position) { + this.position = position; + } + + /** + * Set the number of elements given {@literal count}. + * + * @param count number of elements to slice. + * @return new instance of {@link Slice}. + */ + public Slice itemCount(int count) { + return new Slice(append(position)).itemCount(count); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(position)).itemCount(count); + } + } + } + + /** + * {@link AggregationExpression} for {@code $indexOfArray}. + * + * @author Christoph Strobl + */ + public static class IndexOfArray extends AbstractAggregationExpression { + + private IndexOfArray(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$indexOfArray"; + } + + /** + * Start creating new {@link IndexOfArray}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IndexOfArray}. + */ + public static IndexOfArrayBuilder arrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IndexOfArrayBuilder(Fields.field(fieldReference)); + } + + /** + * Start creating new {@link IndexOfArray}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfArray}. + */ + public static IndexOfArrayBuilder arrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new IndexOfArrayBuilder(expression); + } + + /** + * Start creating new {@link IndexOfArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link IndexOfArrayBuilder} to create {@link IndexOfArray}. + * @since 2.2 + */ + public static IndexOfArrayBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new IndexOfArrayBuilder(values); + } + + /** + * Lookup within a given range. + * + * @param range the lookup range. + * @return new instance of {@link IndexOfArray}. + */ + public IndexOfArray within(Range range) { + return new IndexOfArray(append(AggregationUtils.toRangeValues(range))); + } + + /** + * @author Christoph Strobl + */ + public static class IndexOfArrayBuilder { + + private final Object targetArray; + + private IndexOfArrayBuilder(Object targetArray) { + this.targetArray = targetArray; + } + + /** + * Set the {@literal value} to check for its index in the array. + * + * @param value must not be {@literal null}. + * @return new instance of {@link IndexOfArray}. + */ + public IndexOfArray indexOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new IndexOfArray(Arrays.asList(targetArray, value)); + } + } + } + + /** + * {@link AggregationExpression} for {@code $range}. + * + * @author Christoph Strobl + */ + public static class RangeOperator extends AbstractAggregationExpression { + + private RangeOperator(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$range"; + } + + /** + * Start creating new {@link RangeOperator}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. + */ + public static RangeOperatorBuilder rangeStartingAt(String fieldReference) { + return new RangeOperatorBuilder(Fields.field(fieldReference)); + } + + /** + * Start creating new {@link RangeOperator}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. + */ + public static RangeOperatorBuilder rangeStartingAt(AggregationExpression expression) { + return new RangeOperatorBuilder(expression); + } + + /** + * Start creating new {@link RangeOperator}. + * + * @param value + * @return new instance of {@link RangeOperator}. + */ + public static RangeOperatorBuilder rangeStartingAt(long value) { + return new RangeOperatorBuilder(value); + } + + public RangeOperator withStepSize(long stepSize) { + return new RangeOperator(append(stepSize)); + } + + public static class RangeOperatorBuilder { + + private final Object startPoint; + + private RangeOperatorBuilder(Object startPoint) { + this.startPoint = startPoint; + } + + /** + * Creates new {@link RangeOperator}. + * + * @param index + * @return new instance of {@link RangeOperator}. + */ + public RangeOperator to(long index) { + return new RangeOperator(Arrays.asList(startPoint, index)); + } + + /** + * Creates new {@link RangeOperator}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RangeOperator}. + */ + public RangeOperator to(AggregationExpression expression) { + return new RangeOperator(Arrays.asList(startPoint, expression)); + } + + /** + * Creates new {@link RangeOperator}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RangeOperator}. + */ + public RangeOperator to(String fieldReference) { + return new RangeOperator(Arrays.asList(startPoint, Fields.field(fieldReference))); + } + } + } + + /** + * {@link AggregationExpression} for {@code $reverseArray}. + * + * @author Christoph Strobl + */ + public static class ReverseArray extends AbstractAggregationExpression { + + private ReverseArray(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$reverseArray"; + } + + /** + * Creates new {@link ReverseArray} given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReverseArray}. + */ + public static ReverseArray reverseArrayOf(String fieldReference) { + return new ReverseArray(Fields.field(fieldReference)); + } + + /** + * Creates new {@link ReverseArray} given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReverseArray}. + */ + public static ReverseArray reverseArrayOf(AggregationExpression expression) { + return new ReverseArray(expression); + } + + /** + * Creates new {@link ReverseArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ReverseArray}. + * @since 2.2 + */ + public static ReverseArray reverseArrayOf(Collection values) { + return new ReverseArray(values); + } + } + + /** + * {@link AggregationExpression} for {@code $reduce}. + * + * @author Christoph Strobl + */ + public static class Reduce implements AggregationExpression { + + private final Object input; + private final Object initialValue; + private final List reduceExpressions; + + private Reduce(Object input, Object initialValue, List reduceExpressions) { + + this.input = input; + this.initialValue = initialValue; + this.reduceExpressions = reduceExpressions; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document document = new Document(); + + document.put("input", getMappedValue(input, context)); + document.put("initialValue", getMappedValue(initialValue, context)); + + if (reduceExpressions.iterator().next() instanceof PropertyExpression) { + + Document properties = new Document(); + for (AggregationExpression e : reduceExpressions) { + properties.putAll(e.toDocument(context)); + } + document.put("in", properties); + } else { + document.put("in", (reduceExpressions.iterator().next()).toDocument(context)); + } + + return new Document("$reduce", document); + } + + private Object getMappedValue(Object value, AggregationOperationContext context) { + + if (value instanceof Document) { + return value; + } + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else if (value instanceof Field field) { + return context.getReference(field).toString(); + } else { + return context.getMappedObject(new Document("###val###", value)).get("###val###"); + } + } + + /** + * Start creating new {@link Reduce}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. + */ + public static InitialValueBuilder arrayOf(final String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new InitialValueBuilder() { + + @Override + public ReduceBuilder withInitialValue(final Object initialValue) { + + Assert.notNull(initialValue, "Initial value must not be null"); + + return new ReduceBuilder() { + + @Override + public Reduce reduce(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + return new Reduce(Fields.field(fieldReference), initialValue, Collections.singletonList(expression)); + } + + @Override + public Reduce reduce(PropertyExpression... expressions) { + + Assert.notNull(expressions, "PropertyExpressions must not be null"); + + return new Reduce(Fields.field(fieldReference), initialValue, + Arrays. asList(expressions)); + } + }; + } + }; + } + + /** + * Start creating new {@link Reduce}. + * + * @param arrayValueExpression must not be {@literal null}. + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. + */ + public static InitialValueBuilder arrayOf(final AggregationExpression arrayValueExpression) { + + return new InitialValueBuilder() { + + @Override + public ReduceBuilder withInitialValue(final Object initialValue) { + + Assert.notNull(initialValue, "Initial value must not be null"); + + return new ReduceBuilder() { + + @Override + public Reduce reduce(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + return new Reduce(arrayValueExpression, initialValue, Collections.singletonList(expression)); + } + + @Override + public Reduce reduce(PropertyExpression... expressions) { + + Assert.notNull(expressions, "PropertyExpressions must not be null"); + return new Reduce(arrayValueExpression, initialValue, Arrays.asList(expressions)); + } + }; + } + }; + } + + /** + * @author Christoph Strobl + */ + public interface InitialValueBuilder { + + /** + * Define the initial cumulative value set before in is applied to the first element of the input array. + * + * @param initialValue must not be {@literal null}. + * @return never {@literal null}. + */ + ReduceBuilder withInitialValue(Object initialValue); + } + + /** + * @author Christoph Strobl + */ + public interface ReduceBuilder { + + /** + * Define the {@link AggregationExpression} to apply to each element in the input array in left-to-right order. + *
                    + * NOTE: During evaluation of the in expression the variable references {@link Variable#THIS} and + * {@link Variable#VALUE} are available. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Reduce}. + */ + Reduce reduce(AggregationExpression expression); + + /** + * Define the {@link PropertyExpression}s to apply to each element in the input array in left-to-right order. + *
                    + * NOTE: During evaluation of the in expression the variable references {@link Variable#THIS} and + * {@link Variable#VALUE} are available. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link Reduce}. + */ + Reduce reduce(PropertyExpression... expressions); + } + + /** + * @author Christoph Strobl + */ + public static class PropertyExpression implements AggregationExpression { + + private final String propertyName; + private final AggregationExpression aggregationExpression; + + protected PropertyExpression(String propertyName, AggregationExpression aggregationExpression) { + + Assert.notNull(propertyName, "Property name must not be null"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); + + this.propertyName = propertyName; + this.aggregationExpression = aggregationExpression; + } + + /** + * Define a result property for an {@link AggregationExpression} used in {@link Reduce}. + * + * @param name must not be {@literal null}. + * @return new instance of {@link AsBuilder} to create {@link Reduce}. + */ + public static AsBuilder property(final String name) { + + return new AsBuilder() { + + @Override + public PropertyExpression definedAs(AggregationExpression expression) { + return new PropertyExpression(name, expression); + } + }; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(propertyName, aggregationExpression.toDocument(context)); + } + + /** + * @author Christoph Strobl + */ + public interface AsBuilder { + + /** + * Set the {@link AggregationExpression} resulting in the properties value. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + */ + PropertyExpression definedAs(AggregationExpression expression); + } + } + + public enum Variable implements AggregationVariable { + + THIS { + + @Override + public String getTarget() { + return "$$this"; + } + + @Override + public String toString() { + return getName(); + } + }, + + VALUE { + + @Override + public String getTarget() { + return "$$value"; + } + + @Override + public String toString() { + return getName(); + } + }; + + @Override + public boolean isInternal() { + return true; + } + + /** + * Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier. + * eg. {@code $$value.product} + * + * @param property must not be {@literal null}. + * @return never {@literal null}. + */ + public Field referringTo(final String property) { + + return new Field() { + @Override + public String getName() { + return Variable.this.getName() + "." + property; + } + + @Override + public String getTarget() { + return Variable.this.getTarget() + "." + property; + } + + @Override + public boolean isAliased() { + return false; + } + + @Override + public String toString() { + return getName(); + } + }; + } + + public static boolean isVariable(Field field) { + + for (Variable var : values()) { + if (field.getTarget().startsWith(var.getTarget())) { + return true; + } + } + return false; + } + } + } + + /** + * {@link AggregationExpression} for {@code $zip}. + * + * @author Christoph Strobl + */ + public static class Zip extends AbstractAggregationExpression { + + protected Zip(java.util.Map value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$zip"; + } + + /** + * Start creating new {@link Zip}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ZipBuilder} to create {@link Zip}. + */ + public static ZipBuilder arrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ZipBuilder(Fields.field(fieldReference)); + } + + /** + * Start creating new {@link Zip}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ZipBuilder} to create {@link Zip}. + */ + public static ZipBuilder arrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ZipBuilder(expression); + } + + /** + * Start creating new {@link Zip}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Zip}. + * @since 2.2 + */ + public static ZipBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Expression must not be null"); + return new ZipBuilder(values); + } + + /** + * Create new {@link Zip} and set the {@code useLongestLength} property to {@literal true}. + * + * @return new instance of {@link Zip}. + */ + public Zip useLongestLength() { + return new Zip(append("useLongestLength", true)); + } + + /** + * Optionally provide a default value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Zip}. + */ + public Zip defaultTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Zip(append("defaults", Fields.field(fieldReference))); + } + + /** + * Optionally provide a default value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Zip}. + */ + public Zip defaultTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Zip(append("defaults", expression)); + } + + /** + * Optionally provide a default value. + * + * @param array must not be {@literal null}. + * @return new instance of {@link Zip}. + */ + public Zip defaultTo(Object[] array) { + + Assert.notNull(array, "Array must not be null"); + return new Zip(append("defaults", Arrays.asList(array))); + } + + public static class ZipBuilder { + + private final List sourceArrays; + + private ZipBuilder(Object sourceArray) { + + this.sourceArrays = new ArrayList<>(); + this.sourceArrays.add(sourceArray); + } + + /** + * Creates new {@link Zip} that transposes an array of input arrays so that the first element of the output array + * would be an array containing, the first element of the first input array, the first element of the second input + * array, etc. + * + * @param arrays arrays to zip the referenced one with. must not be {@literal null}. + * @return new instance of {@link Zip}. + */ + public Zip zip(Object... arrays) { + + Assert.notNull(arrays, "Arrays must not be null"); + for (Object value : arrays) { + + if (value instanceof String stringValue) { + sourceArrays.add(Fields.field(stringValue)); + } else { + sourceArrays.add(value); + } + } + + return new Zip(Collections.singletonMap("inputs", sourceArrays)); + } + } + } + + /** + * {@link AggregationExpression} for {@code $in}. + * + * @author Christoph Strobl + * @author Shashank Sharma + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/in/ + * @since 2.2 + */ + public static class In extends AbstractAggregationExpression { + + private In(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$in"; + } + + /** + * Start creating {@link In}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link InBuilder} to create {@link In}. + */ + public static InBuilder arrayOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return value -> { + + Assert.notNull(value, "Value must not be null"); + return new In(Arrays.asList(value, Fields.field(fieldReference))); + }; + } + + /** + * Start creating {@link In}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link InBuilder} to create {@link In}. + */ + public static InBuilder arrayOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return value -> { + + Assert.notNull(value, "Value must not be null"); + + return new In(Arrays.asList(value, expression)); + }; + } + + /** + * Support for Aggregation In Search an Element in List of Objects to Filter Start creating {@link In}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link InBuilder}. + * @since 2.2 + */ + public static InBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + + return value -> { + + Assert.notNull(value, "Value must not be null"); + + return new In(Arrays.asList(value, values)); + }; + } + + /** + * @author Christoph Strobl + */ + public interface InBuilder { + + /** + * Set the {@literal value} to check for existence in the array. + * + * @param value must not be {@literal value}. + * @return new instance of {@link In}. + */ + In containsValue(Object value); + } + } + + /** + * {@link AggregationExpression} for {@code $arrayToObject} that transforms an array into a single document.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ + * @since 2.1 + */ + public static class ArrayToObject extends AbstractAggregationExpression { + + private ArrayToObject(Object value) { + super(value); + } + + /** + * Converts the given array (e.g. an array of two-element arrays, a field reference to an array,...) to an object. + * + * @param array must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayToObject(Object array) { + return new ArrayToObject(array); + } + + /** + * Converts the array pointed to by the given {@link Field field reference} to an object. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(String fieldReference) { + return new ArrayToObject(Fields.field(fieldReference)); + } + + /** + * Converts the result array of the given {@link AggregationExpression expression} to an object. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(AggregationExpression expression) { + return new ArrayToObject(expression); + } + + @Override + protected String getMongoMethod() { + return "$arrayToObject"; + } + } + + /** + * {@link AggregationExpression} for {@code $first} that returns the first element in an array.
                    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class First extends AbstractAggregationExpression { + + private First(Object value) { + super(value); + } + + /** + * Returns the first element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First first(Object array) { + return new First(array); + } + + /** + * Returns the first element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(String fieldReference) { + return new First(Fields.field(fieldReference)); + } + + /** + * Returns the first element of the array computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(AggregationExpression expression) { + return new First(expression); + } + + @Override + protected String getMongoMethod() { + return "$first"; + } + } + + /** + * {@link AggregationExpression} for {@code $last} that returns the last element in an array.
                    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class Last extends AbstractAggregationExpression { + + private Last(Object value) { + super(value); + } + + /** + * Returns the last element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last last(Object array) { + return new Last(array); + } + + /** + * Returns the last element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(String fieldReference) { + return new Last(Fields.field(fieldReference)); + } + + /** + * Returns the last element of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(AggregationExpression expression) { + return new Last(expression); + } + + @Override + protected String getMongoMethod() { + return "$last"; + } + } + + /** + * {@link AggregationExpression} for {@code $sortArray} that sorts elements in an array.
                    + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SortArray extends AbstractAggregationExpression { + + private SortArray(Object value) { + super(value); + } + + /** + * Returns the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArray(Object array) { + return new SortArray(Collections.singletonMap("input", array)); + } + + /** + * Sorts the elements in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(String fieldReference) { + return sortArray(Fields.field(fieldReference)); + } + + /** + * Sorts the elements of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(AggregationExpression expression) { + return sortArray(expression); + } + + /** + * Set the order to put elements in. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public SortArray by(Sort sort) { + return new SortArray(append("sortBy", sort)); + } + + /** + * Order the values for the array in the given direction. + * + * @param direction must not be {@literal null}. + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray direction(Direction direction) { + return new SortArray(append("sortBy", direction.isAscending() ? 1 : -1)); + } + + /** + * Sort the array elements by their values in ascending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueAscending() { + return direction(Direction.ASC); + } + + /** + * Sort the array elements by their values in descending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueDescending() { + return direction(Direction.DESC); + } + + @Override + protected String getMongoMethod() { + return "$sortArray"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java new file mode 100644 index 0000000000..4d321c4715 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.bson.conversions.Bson; + +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.ObjectUtils; + +/** + * {@link AggregationOperation} implementation that can return a {@link Document} from a {@link Bson} or {@link String} + * document. + * + * @author Christoph Strobl + * @since 4.0 + */ +record BasicAggregationOperation(Object value) implements AggregationOperation { + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (value instanceof Bson bson) { + return BsonUtils.asDocument(bson, context.getCodecRegistry()); + } + + if (value instanceof String json && BsonUtils.isJsonDocument(json)) { + return BsonUtils.parse(json, context); + } + + throw new IllegalStateException( + String.format("%s cannot be converted to org.bson.Document", ObjectUtils.nullSafeClassName(value))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java new file mode 100644 index 0000000000..69689908c9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java @@ -0,0 +1,353 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.util.Assert; + +/** + * Gateway to {@literal boolean expressions} that evaluate their argument expressions as booleans and return a boolean + * as the result. + * + * @author Christoph Strobl + * @since 1.10 + */ +public class BooleanOperators { + + /** + * Take the array referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link BooleanOperatorFactory}. + */ + public static BooleanOperatorFactory valueOf(String fieldReference) { + return new BooleanOperatorFactory(fieldReference); + } + + /** + * Take the value resulting of the given {@link AggregationExpression}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link BooleanOperatorFactory}. + */ + public static BooleanOperatorFactory valueOf(AggregationExpression fieldReference) { + return new BooleanOperatorFactory(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that evaluates the boolean value of the referenced field and returns the + * opposite boolean value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Not}. + */ + public static Not not(String fieldReference) { + return Not.not(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that evaluates the boolean value of {@link AggregationExpression} result + * and returns the opposite boolean value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Not}. + */ + public static Not not(AggregationExpression expression) { + return Not.not(expression); + } + + /** + * @author Christoph Strobl + */ + public static class BooleanOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link BooleanOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public BooleanOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link BooleanOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public BooleanOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that evaluates one or more expressions and returns {@literal true} if + * all of the expressions are {@literal true}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link And}. + */ + public And and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createAnd().andExpression(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates one or more expressions and returns {@literal true} if + * all of the expressions are {@literal true}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link And}. + */ + public And and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createAnd().andField(fieldReference); + } + + private And createAnd() { + return usesFieldRef() ? And.and(Fields.field(fieldReference)) : And.and(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates one or more expressions and returns {@literal true} if + * any of the expressions are {@literal true}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public Or or(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createOr().orExpression(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates one or more expressions and returns {@literal true} if + * any of the expressions are {@literal true}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public Or or(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createOr().orField(fieldReference); + } + + private Or createOr() { + return usesFieldRef() ? Or.or(Fields.field(fieldReference)) : Or.or(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates a boolean and returns the opposite boolean value. + * + * @return new instance of {@link Not}. + */ + public Not not() { + return usesFieldRef() ? Not.not(fieldReference) : Not.not(expression); + } + + private boolean usesFieldRef() { + return this.fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $and}. + * + * @author Christoph Strobl + */ + public static class And extends AbstractAggregationExpression { + + private And(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$and"; + } + + /** + * Creates new {@link And} that evaluates one or more expressions and returns {@literal true} if all of the + * expressions are {@literal true}. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link And}. + */ + public static And and(Object... expressions) { + return new And(Arrays.asList(expressions)); + } + + /** + * Creates new {@link And} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link And}. + */ + public And andExpression(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new And(append(expression)); + } + + /** + * Creates new {@link And} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link And}. + */ + public And andField(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new And(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link And} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link And}. + */ + public And andValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new And(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $or}. + * + * @author Christoph Strobl + */ + public static class Or extends AbstractAggregationExpression { + + private Or(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$or"; + } + + /** + * Creates new {@link Or} that evaluates one or more expressions and returns {@literal true} if any of the + * expressions are {@literal true}. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public static Or or(Object... expressions) { + + Assert.notNull(expressions, "Expressions must not be null"); + return new Or(Arrays.asList(expressions)); + } + + /** + * Creates new {@link Or} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public Or orExpression(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Or(append(expression)); + } + + /** + * Creates new {@link Or} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public Or orField(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Or(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Or} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Or}. + */ + public Or orValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Or(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $not}. + * + * @author Christoph Strobl + */ + public static class Not extends AbstractAggregationExpression { + + private Not(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$not"; + } + + /** + * Creates new {@link Not} that evaluates the boolean value of the referenced field and returns the opposite boolean + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Not}. + */ + public static Not not(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Not(asFields(fieldReference)); + } + + /** + * Creates new {@link Not} that evaluates the resulting boolean value of the given {@link AggregationExpression} and + * returns the opposite boolean value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Not}. + */ + public static Not not(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Not(Collections.singletonList(expression)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java new file mode 100644 index 0000000000..36492e2a81 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -0,0 +1,253 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.BucketAutoOperationOutputBuilder; +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $bucketAuto}-operation.
                    + * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into a + * specific number of groups, called buckets, based on a specified expression. Bucket boundaries are automatically + * determined in an attempt to evenly distribute the documents into the specified number of buckets.
                    + * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating + * instances of this class directly. + * + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see BucketOperationSupport + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + */ +public class BucketAutoOperation extends BucketOperationSupport + implements FieldsExposingAggregationOperation { + + private final int buckets; + private final String granularity; + + /** + * Creates a new {@link BucketAutoOperation} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + */ + public BucketAutoOperation(Field groupByField, int buckets) { + + super(groupByField); + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); + + this.buckets = buckets; + this.granularity = null; + } + + /** + * Creates a new {@link BucketAutoOperation} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + * @param buckets number of buckets, must be a positive integer. + */ + public BucketAutoOperation(AggregationExpression groupByExpression, int buckets) { + + super(groupByExpression); + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); + + this.buckets = buckets; + this.granularity = null; + } + + private BucketAutoOperation(BucketAutoOperation bucketOperation, Outputs outputs) { + + super(bucketOperation, outputs); + + this.buckets = bucketOperation.buckets; + this.granularity = bucketOperation.granularity; + } + + private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, String granularity) { + + super(bucketOperation); + + this.buckets = buckets; + this.granularity = granularity; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document options = new Document(); + + options.put("buckets", buckets); + + if (granularity != null) { + options.put("granularity", granularity); + } + + options.putAll(super.toDocument(context)); + + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucketAuto"; + } + + /** + * Configures a number of bucket {@literal buckets} and return a new {@link BucketAutoOperation}. + * + * @param buckets must be a positive number. + * @return new instance of {@link BucketAutoOperation}. + */ + public BucketAutoOperation withBuckets(int buckets) { + + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); + return new BucketAutoOperation(this, buckets, granularity); + } + + /** + * Configures {@link Granularity granularity} that specifies the preferred number series to use to ensure that the + * calculated boundary edges end on preferred round numbers or their powers of 10 and return a new + * {@link BucketAutoOperation}.
                    + * Use either predefined {@link Granularities} or provide a own one. + * + * @param granularity must not be {@literal null}. + * @return new instance of {@link BucketAutoOperation}. + */ + public BucketAutoOperation withGranularity(Granularity granularity) { + + Assert.notNull(granularity, "Granularity must not be null"); + + return new BucketAutoOperation(this, buckets, granularity.getMongoRepresentation()); + } + + @Override + protected BucketAutoOperation newBucketOperation(Outputs outputs) { + return new BucketAutoOperation(this, outputs); + } + + @Override + public ExpressionBucketAutoOperationBuilder andOutputExpression(String expression, Object... params) { + return new ExpressionBucketAutoOperationBuilder(expression, this, params); + } + + @Override + public BucketAutoOperationOutputBuilder andOutput(AggregationExpression expression) { + return new BucketAutoOperationOutputBuilder(expression, this); + } + + @Override + public BucketAutoOperationOutputBuilder andOutput(String fieldName) { + return new BucketAutoOperationOutputBuilder(Fields.field(fieldName), this); + } + + /** + * {@link OutputBuilder} implementation for {@link BucketAutoOperation}. + */ + public static class BucketAutoOperationOutputBuilder + extends OutputBuilder { + + /** + * Creates a new {@link BucketAutoOperationOutputBuilder} fot the given value and {@link BucketAutoOperation}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + protected BucketAutoOperationOutputBuilder(Object value, BucketAutoOperation operation) { + super(value, operation); + } + + @Override + protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * {@link ExpressionBucketOperationBuilderSupport} implementation for {@link BucketAutoOperation} using SpEL + * expression based {@link Output}. + * + * @author Mark Paluch + */ + public static class ExpressionBucketAutoOperationBuilder + extends ExpressionBucketOperationBuilderSupport { + + /** + * Creates a new {@link ExpressionBucketAutoOperationBuilder} for the given value, {@link BucketAutoOperation} and + * parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters must not be {@literal null}. + */ + protected ExpressionBucketAutoOperationBuilder(String expression, BucketAutoOperation operation, + Object[] parameters) { + super(expression, operation, parameters); + } + + @Override + protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * @author Mark Paluch + */ + public interface Granularity { + + /** + * @return a String that represents a MongoDB granularity to be used with {@link BucketAutoOperation}. Never + * {@literal null}. + */ + String getMongoRepresentation(); + } + + /** + * Supported MongoDB granularities. + * + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity + * @author Mark Paluch + */ + public enum Granularities implements Granularity { + + R5, R10, R20, R40, R80, // + + SERIES_1_2_5("1-2-5"), // + + E6, E12, E24, E48, E96, E192, // + + POWERSOF2; + + private final String granularity; + + Granularities() { + this.granularity = name(); + } + + Granularities(String granularity) { + this.granularity = granularity; + } + + @Override + public String getMongoRepresentation() { + return granularity; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java new file mode 100644 index 0000000000..525789e628 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -0,0 +1,207 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.BucketOperation.BucketOperationOutputBuilder; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $bucket}-operation.
                    + * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into + * groups, called buckets, based on a specified expression and bucket boundaries.
                    + * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of + * this class directly. + * + * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @see BucketOperationSupport + * @author Mark Paluch + * @since 1.10 + */ +public class BucketOperation extends BucketOperationSupport + implements FieldsExposingAggregationOperation { + + private final List boundaries; + private final Object defaultBucket; + + /** + * Creates a new {@link BucketOperation} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + */ + public BucketOperation(Field groupByField) { + + super(groupByField); + + this.boundaries = Collections.emptyList(); + this.defaultBucket = null; + } + + /** + * Creates a new {@link BucketOperation} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + */ + public BucketOperation(AggregationExpression groupByExpression) { + + super(groupByExpression); + + this.boundaries = Collections.emptyList(); + this.defaultBucket = null; + } + + private BucketOperation(BucketOperation bucketOperation, Outputs outputs) { + + super(bucketOperation, outputs); + + this.boundaries = bucketOperation.boundaries; + this.defaultBucket = bucketOperation.defaultBucket; + } + + private BucketOperation(BucketOperation bucketOperation, List boundaries, Object defaultBucket) { + + super(bucketOperation); + + this.boundaries = new ArrayList<>(boundaries); + this.defaultBucket = defaultBucket; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document options = new Document(); + + options.put("boundaries", context.getMappedObject(new Document("$set", boundaries)).get("$set")); + + if (defaultBucket != null) { + options.put("default", context.getMappedObject(new Document("$set", defaultBucket)).get("$set")); + } + + options.putAll(super.toDocument(context)); + + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucket"; + } + + /** + * Configures a default bucket {@literal literal} and return a new {@link BucketOperation}. + * + * @param literal must not be {@literal null}. + * @return new instance of {@link BucketOperation}. + */ + public BucketOperation withDefaultBucket(Object literal) { + + Assert.notNull(literal, "Default bucket literal must not be null"); + return new BucketOperation(this, boundaries, literal); + } + + /** + * Configures {@literal boundaries} and return a new {@link BucketOperation}. Existing {@literal boundaries} are + * preserved and the new {@literal boundaries} are appended. + * + * @param boundaries must not be {@literal null}. + * @return new instance of {@link BucketOperation}. + */ + public BucketOperation withBoundaries(Object... boundaries) { + + Assert.notNull(boundaries, "Boundaries must not be null"); + Assert.noNullElements(boundaries, "Boundaries must not contain null values"); + + List newBoundaries = new ArrayList<>(this.boundaries.size() + boundaries.length); + newBoundaries.addAll(this.boundaries); + newBoundaries.addAll(Arrays.asList(boundaries)); + + return new BucketOperation(this, newBoundaries, defaultBucket); + } + + @Override + protected BucketOperation newBucketOperation(Outputs outputs) { + return new BucketOperation(this, outputs); + } + + @Override + public ExpressionBucketOperationBuilder andOutputExpression(String expression, Object... params) { + return new ExpressionBucketOperationBuilder(expression, this, params); + } + + @Override + public BucketOperationOutputBuilder andOutput(AggregationExpression expression) { + return new BucketOperationOutputBuilder(expression, this); + } + + @Override + public BucketOperationOutputBuilder andOutput(String fieldName) { + return new BucketOperationOutputBuilder(Fields.field(fieldName), this); + } + + /** + * {@link OutputBuilder} implementation for {@link BucketOperation}. + */ + public static class BucketOperationOutputBuilder + extends BucketOperationSupport.OutputBuilder { + + /** + * Creates a new {@link BucketOperationOutputBuilder} fot the given value and {@link BucketOperation}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + protected BucketOperationOutputBuilder(Object value, BucketOperation operation) { + super(value, operation); + } + + @Override + protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketOperationOutputBuilder(operationOutput, this.operation); + } + } + + /** + * {@link ExpressionBucketOperationBuilderSupport} implementation for {@link BucketOperation} using SpEL expression + * based {@link Output}. + * + * @author Mark Paluch + */ + public static class ExpressionBucketOperationBuilder + extends ExpressionBucketOperationBuilderSupport { + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} and + * parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters must not be {@literal null}. + */ + protected ExpressionBucketOperationBuilder(String expression, BucketOperation operation, Object[] parameters) { + super(expression, operation, parameters); + } + + @Override + protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { + return new BucketOperationOutputBuilder(operationOutput, this.operation); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java new file mode 100644 index 0000000000..e19ad59a3f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -0,0 +1,657 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; +import org.springframework.expression.spel.ast.Projection; +import org.springframework.util.Assert; + +/** + * Base class for bucket operations that support output expressions the aggregation framework.
                    + * Bucket stages collect documents into buckets and can contribute output fields.
                    + * Implementing classes are required to provide an {@link OutputBuilder}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + */ +public abstract class BucketOperationSupport, B extends OutputBuilder> + implements FieldsExposingAggregationOperation { + + private final Field groupByField; + private final AggregationExpression groupByExpression; + private final Outputs outputs; + + /** + * Creates a new {@link BucketOperationSupport} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + */ + protected BucketOperationSupport(Field groupByField) { + + Assert.notNull(groupByField, "Group by field must not be null"); + + this.groupByField = groupByField; + this.groupByExpression = null; + this.outputs = Outputs.EMPTY; + } + + /** + * Creates a new {@link BucketOperationSupport} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + */ + protected BucketOperationSupport(AggregationExpression groupByExpression) { + + Assert.notNull(groupByExpression, "Group by AggregationExpression must not be null"); + + this.groupByExpression = groupByExpression; + this.groupByField = null; + this.outputs = Outputs.EMPTY; + } + + /** + * Creates a copy of {@link BucketOperationSupport}. + * + * @param operationSupport must not be {@literal null}. + */ + protected BucketOperationSupport(BucketOperationSupport operationSupport) { + this(operationSupport, operationSupport.outputs); + } + + /** + * Creates a copy of {@link BucketOperationSupport} and applies the new {@link Outputs}. + * + * @param operationSupport must not be {@literal null}. + * @param outputs must not be {@literal null}. + */ + protected BucketOperationSupport(BucketOperationSupport operationSupport, Outputs outputs) { + + Assert.notNull(operationSupport, "BucketOperationSupport must not be null"); + Assert.notNull(outputs, "Outputs must not be null"); + + this.groupByField = operationSupport.groupByField; + this.groupByExpression = operationSupport.groupByExpression; + this.outputs = outputs; + } + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} given a SpEL {@literal expression} and optional + * {@literal params} to add an output field to the resulting bucket documents. + * + * @param expression the SpEL expression, must not be {@literal null} or empty. + * @param params must not be {@literal null} + * @return new instance of {@link ExpressionBucketOperationBuilderSupport} to create {@link BucketOperation}. + */ + public abstract ExpressionBucketOperationBuilderSupport andOutputExpression(String expression, + Object... params); + + /** + * Creates a new {@link BucketOperationSupport} given an {@link AggregationExpression} to add an output field to the + * resulting bucket documents. + * + * @param expression the SpEL expression, must not be {@literal null} or empty. + * @return never {@literal null}. + */ + public abstract B andOutput(AggregationExpression expression); + + /** + * Creates a new {@link BucketOperationSupport} given {@literal fieldName} to add an output field to the resulting + * bucket documents. {@link BucketOperationSupport} exposes accumulation operations that can be applied to + * {@literal fieldName}. + * + * @param fieldName must not be {@literal null} or empty. + * @return never {@literal null}. + */ + public abstract B andOutput(String fieldName); + + /** + * Creates a new {@link BucketOperationSupport} given to add a count field to the resulting bucket documents. + * + * @return never {@literal null}. + */ + public B andOutputCount() { + return andOutput(new AggregationExpression() { + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$sum", 1); + } + }); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document document = new Document(); + + document.put("groupBy", groupByExpression == null ? context.getReference(groupByField).toString() + : groupByExpression.toDocument(context)); + + if (!outputs.isEmpty()) { + document.put("output", outputs.toDocument(context)); + } + + return document; + } + + @Override + public ExposedFields getFields() { + return outputs.asExposedFields(); + } + + /** + * Implementation hook to create a new bucket operation. + * + * @param outputs the outputs + * @return the new bucket operation. + */ + protected abstract T newBucketOperation(Outputs outputs); + + protected T andOutput(Output output) { + return newBucketOperation(outputs.and(output)); + } + + /** + * Builder for SpEL expression-based {@link Output}. + * + * @author Mark Paluch + */ + public abstract static class ExpressionBucketOperationBuilderSupport, T extends BucketOperationSupport> + extends OutputBuilder { + + /** + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperationSupport} + * and parameters. + * + * @param expression must not be {@literal null}. + * @param operation must not be {@literal null}. + * @param parameters + */ + protected ExpressionBucketOperationBuilderSupport(String expression, T operation, Object[] parameters) { + super(new SpelExpressionOutput(expression, parameters), operation); + } + } + + /** + * Base class for {@link Output} builders that result in a {@link BucketOperationSupport} providing the built + * {@link Output}. + * + * @author Mark Paluch + */ + public abstract static class OutputBuilder, T extends BucketOperationSupport> { + + protected final Object value; + protected final T operation; + + /** + * Creates a new {@link OutputBuilder} for the given value and {@link BucketOperationSupport}. + * + * @param value must not be {@literal null}. + * @param operation must not be {@literal null}. + */ + protected OutputBuilder(Object value, T operation) { + + Assert.notNull(value, "Value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); + + this.value = value; + this.operation = operation; + } + + /** + * Generates a builder for a {@code $sum}-expression.
                    + * Count expressions are emulated via {@code $sum: 1}. + * + * @return never {@literal null}. + */ + public B count() { + return sum(1); + } + + /** + * Generates a builder for a {@code $sum}-expression for the current value. + * + * @return never {@literal null}. + */ + public B sum() { + return apply(Accumulators.SUM); + } + + /** + * Generates a builder for a {@code $sum}-expression for the given {@literal value}. + * + * @param value must not be {@literal null}. + * @return never {@literal null}. + */ + public B sum(Number value) { + return apply(new OperationOutput(Accumulators.SUM.getMongoOperator(), Collections.singleton(value))); + } + + /** + * Generates a builder for an {@code $last}-expression for the current value.. + * + * @return never {@literal null}. + */ + public B last() { + return apply(Accumulators.LAST); + } + + /** + * Generates a builder for a {@code $first}-expression the current value. + * + * @return never {@literal null}. + */ + public B first() { + return apply(Accumulators.FIRST); + } + + /** + * Generates a builder for an {@code $avg}-expression for the current value. + * + * @return never {@literal null}. + */ + public B avg() { + return apply(Accumulators.AVG); + } + + /** + * Generates a builder for an {@code $min}-expression for the current value. + * + * @return never {@literal null}. + */ + public B min() { + return apply(Accumulators.MIN); + } + + /** + * Generates a builder for an {@code $max}-expression for the current value. + * + * @return never {@literal null}. + */ + public B max() { + return apply(Accumulators.MAX); + } + + /** + * Generates a builder for an {@code $push}-expression for the current value. + * + * @return never {@literal null}. + */ + public B push() { + return apply(Accumulators.PUSH); + } + + /** + * Generates a builder for an {@code $addToSet}-expression for the current value. + * + * @return never {@literal null}. + */ + public B addToSet() { + return apply(Accumulators.ADDTOSET); + } + + /** + * Apply an operator to the current value. + * + * @param operation the operation name, must not be {@literal null} or empty. + * @param values must not be {@literal null}. + * @return never {@literal null}. + */ + public B apply(String operation, Object... values) { + + Assert.hasText(operation, "Operation must not be empty or null"); + Assert.notNull(value, "Values must not be null"); + + List objects = new ArrayList<>(values.length + 1); + objects.add(value); + objects.addAll(Arrays.asList(values)); + return apply(new OperationOutput(operation, objects)); + } + + /** + * Apply an {@link OperationOutput} to this output. + * + * @param operationOutput must not be {@literal null}. + * @return never {@literal null}. + */ + protected abstract B apply(OperationOutput operationOutput); + + private B apply(Accumulators operation) { + return this.apply(operation.getMongoOperator()); + } + + /** + * Returns the finally to be applied {@link BucketOperation} with the given alias. + * + * @param alias will never be {@literal null} or empty. + * @return never {@literal null}. + */ + public T as(String alias) { + + if (value instanceof OperationOutput operationOutput) { + return this.operation.andOutput(operationOutput.withAlias(alias)); + } + + if (value instanceof Field) { + throw new IllegalStateException("Cannot add a field as top-level output; Use accumulator expressions"); + } + + return this.operation + .andOutput(new AggregationExpressionOutput(Fields.field(alias), (AggregationExpression) value)); + } + } + + private enum Accumulators { + + SUM("$sum"), AVG("$avg"), FIRST("$first"), LAST("$last"), MAX("$max"), MIN("$min"), PUSH("$push"), ADDTOSET( + "$addToSet"); + + private final String mongoOperator; + + Accumulators(String mongoOperator) { + this.mongoOperator = mongoOperator; + } + + public String getMongoOperator() { + return mongoOperator; + } + } + + /** + * Encapsulates {@link Output}s. + * + * @author Mark Paluch + */ + protected static class Outputs implements AggregationExpression { + + protected static final Outputs EMPTY = new Outputs(); + + private final List outputs; + + /** + * Creates a new, empty {@link Outputs}. + */ + private Outputs() { + this.outputs = new ArrayList(); + } + + /** + * Creates new {@link Outputs} containing all given {@link Output}s. + * + * @param current + * @param output + */ + private Outputs(Collection current, Output output) { + + this.outputs = new ArrayList(current.size() + 1); + this.outputs.addAll(current); + this.outputs.add(output); + } + + /** + * @return the {@link ExposedFields} derived from {@link Output}. + */ + protected ExposedFields asExposedFields() { + + // The count field is included by default when the output is not specified. + if (isEmpty()) { + return ExposedFields.from(new ExposedField("count", true)); + } + + ExposedFields fields = ExposedFields.from(); + + for (Output output : outputs) { + fields = fields.and(output.getExposedField()); + } + + return fields; + } + + /** + * Create a new {@link Outputs} that contains the new {@link Output}. + * + * @param output must not be {@literal null}. + * @return the new {@link Outputs} that contains the new {@link Output} + */ + protected Outputs and(Output output) { + + Assert.notNull(output, "BucketOutput must not be null"); + return new Outputs(this.outputs, output); + } + + /** + * @return {@literal true} if {@link Outputs} contains no {@link Output}. + */ + protected boolean isEmpty() { + return outputs.isEmpty(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document document = new Document(); + + for (Output output : outputs) { + document.put(output.getExposedField().getName(), output.toDocument(context)); + } + + return document; + } + + } + + /** + * Encapsulates an output field in a bucket aggregation stage.
                    + * Output fields can be either top-level fields that define a valid field name or nested output fields using + * operators. + * + * @author Mark Paluch + */ + protected abstract static class Output implements AggregationExpression { + + private final ExposedField field; + + /** + * Creates new {@link Projection} for the given {@link Field}. + * + * @param field must not be {@literal null}. + */ + protected Output(Field field) { + + Assert.notNull(field, "Field must not be null"); + this.field = new ExposedField(field, true); + } + + /** + * Returns the field exposed by the {@link Output}. + * + * @return will never be {@literal null}. + */ + protected ExposedField getExposedField() { + return field; + } + } + + /** + * Output field that uses a Mongo operation (expression object) to generate an output field value.
                    + * {@link OperationOutput} is used either with a regular field name or an operation keyword (e.g. + * {@literal $sum, $count}). + * + * @author Mark Paluch + */ + protected static class OperationOutput extends Output { + + private final String operation; + private final List values; + + /** + * Creates a new {@link Output} for the given field. + * + * @param operation the actual operation key, must not be {@literal null} or empty. + * @param values the values to pass into the operation, must not be {@literal null}. + */ + public OperationOutput(String operation, Collection values) { + + super(Fields.field(operation)); + + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); + + this.operation = operation; + this.values = new ArrayList<>(values); + } + + private OperationOutput(Field field, OperationOutput operationOutput) { + + super(field); + + this.operation = operationOutput.operation; + this.values = operationOutput.values; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + List operationArguments = getOperationArguments(context); + return new Document(operation, operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); + } + + protected List getOperationArguments(AggregationOperationContext context) { + + List result = new ArrayList<>(values != null ? values.size() : 1); + + for (Object element : values) { + + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { + result.add(context.getReference(field).toString()); + } + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); + } else { + result.add(element); + } + } + + return result; + } + + /** + * Returns the field that holds the {@link ProjectionOperationBuilder.OperationProjection}. + * + * @return never {@literal null}. + */ + protected Field getField() { + return getExposedField(); + } + + /** + * Creates a new instance of this {@link OperationOutput} with the given alias. + * + * @param alias the alias to set + * @return new instance of {@link OperationOutput}. + */ + public OperationOutput withAlias(String alias) { + + final Field aliasedField = Fields.field(alias); + return new OperationOutput(aliasedField, this) { + + @Override + protected Field getField() { + return aliasedField; + } + + @Override + protected List getOperationArguments(AggregationOperationContext context) { + + // We have to make sure that we use the arguments from the "previous" OperationOutput that we replace + // with this new instance. + return OperationOutput.this.getOperationArguments(context); + } + }; + } + } + + /** + * A {@link Output} based on a SpEL expression. + */ + private static class SpelExpressionOutput extends Output { + + private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer(); + + private final String expression; + private final Object[] params; + + /** + * Creates a new {@link SpelExpressionOutput} for the given field, SpEL expression and parameters. + * + * @param expression must not be {@literal null} or empty. + * @param parameters must not be {@literal null}. + */ + public SpelExpressionOutput(String expression, Object[] parameters) { + + super(Fields.field(expression)); + + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); + + this.expression = expression; + this.params = parameters.clone(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return (Document) TRANSFORMER.transform(expression, context, params); + } + } + + /** + * @author Mark Paluch + */ + private static class AggregationExpressionOutput extends Output { + + private final AggregationExpression expression; + + /** + * Creates a new {@link AggregationExpressionOutput}. + * + * @param field must not be {@literal null}. + * @param expression must not be {@literal null}. + */ + protected AggregationExpressionOutput(Field field, AggregationExpression expression) { + + super(field); + + this.expression = expression; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return expression.toDocument(context); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java new file mode 100644 index 0000000000..f27b7f16cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java @@ -0,0 +1,879 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.List; + +import org.springframework.util.Assert; + +/** + * Gateway to {@literal comparison expressions}. + * + * @author Christoph Strobl + * @since 1.10 + */ +public class ComparisonOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ComparisonOperatorFactory}. + */ + public static ComparisonOperatorFactory valueOf(String fieldReference) { + return new ComparisonOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ComparisonOperatorFactory}. + */ + public static ComparisonOperatorFactory valueOf(AggregationExpression expression) { + return new ComparisonOperatorFactory(expression); + } + + public static class ComparisonOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link ComparisonOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ComparisonOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link ComparisonOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ComparisonOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that compares two values. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareTo(String fieldReference) { + return createCmp().compareTo(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareTo(AggregationExpression expression) { + return createCmp().compareTo(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareToValue(Object value) { + return createCmp().compareToValue(value); + } + + private Cmp createCmp() { + return usesFieldRef() ? Cmp.valueOf(fieldReference) : Cmp.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is equal to the value of the referenced field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalTo(String fieldReference) { + return createEq().equalTo(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is equal to the expression result. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalTo(AggregationExpression expression) { + return createEq().equalTo(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is equal to the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalToValue(Object value) { + return createEq().equalToValue(value); + } + + private Eq createEq() { + return usesFieldRef() ? Eq.valueOf(fieldReference) : Eq.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than the value of the referenced field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThan(String fieldReference) { + return createGt().greaterThan(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than the expression result. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThan(AggregationExpression expression) { + return createGt().greaterThan(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThanValue(Object value) { + return createGt().greaterThanValue(value); + } + + private Gt createGt() { + return usesFieldRef() ? Gt.valueOf(fieldReference) : Gt.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than or equivalent to the value of the referenced field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualTo(String fieldReference) { + return createGte().greaterThanEqualTo(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than or equivalent to the expression result. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualTo(AggregationExpression expression) { + return createGte().greaterThanEqualTo(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is greater than or equivalent to the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualToValue(Object value) { + return createGte().greaterThanEqualToValue(value); + } + + private Gte createGte() { + return usesFieldRef() ? Gte.valueOf(fieldReference) : Gte.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than the value of the referenced field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThan(String fieldReference) { + return createLt().lessThan(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than the expression result. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThan(AggregationExpression expression) { + return createLt().lessThan(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than to the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThanValue(Object value) { + return createLt().lessThanValue(value); + } + + private Lt createLt() { + return usesFieldRef() ? Lt.valueOf(fieldReference) : Lt.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than or equivalent to the value of the referenced field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualTo(String fieldReference) { + return createLte().lessThanEqualTo(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than or equivalent to the expression result. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualTo(AggregationExpression expression) { + return createLte().lessThanEqualTo(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first + * value is less than or equivalent to the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualToValue(Object value) { + return createLte().lessThanEqualToValue(value); + } + + private Lte createLte() { + return usesFieldRef() ? Lte.valueOf(fieldReference) : Lte.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the values + * are not equivalent. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualTo(String fieldReference) { + return createNe().notEqualTo(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the values + * are not equivalent. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualTo(AggregationExpression expression) { + return createNe().notEqualTo(expression); + } + + /** + * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the values + * are not equivalent. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualToValue(Object value) { + return createNe().notEqualToValue(value); + } + + private Ne createNe() { + return usesFieldRef() ? Ne.valueOf(fieldReference) : Ne.valueOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $cmp}. + * + * @author Christoph Strobl + */ + public static class Cmp extends AbstractAggregationExpression { + + private Cmp(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$cmp"; + } + + /** + * Creates new {@link Cmp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public static Cmp valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Cmp(asFields(fieldReference)); + } + + /** + * Creates new {@link Cmp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public static Cmp valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Cmp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Cmp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Cmp(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Cmp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Cmp(append(expression)); + } + + /** + * Creates new {@link Cmp} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Cmp}. + */ + public Cmp compareToValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Cmp(append(value, Expand.KEEP_SOURCE)); + } + } + + /** + * {@link AggregationExpression} for {@code $eq}. + * + * @author Christoph Strobl + */ + public static class Eq extends AbstractAggregationExpression { + + private Eq(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$eq"; + } + + /** + * Creates new {@link Eq}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public static Eq valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Eq(asFields(fieldReference)); + } + + /** + * Creates new {@link Eq}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public static Eq valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Eq(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Eq} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Eq(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Eq} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Eq(append(expression)); + } + + /** + * Creates new {@link Eq} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Eq}. + */ + public Eq equalToValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Eq(append(value, Expand.KEEP_SOURCE)); + } + } + + /** + * {@link AggregationExpression} for {@code $gt}. + * + * @author Christoph Strobl + */ + public static class Gt extends AbstractAggregationExpression { + + private Gt(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$gt"; + } + + /** + * Creates new {@link Gt}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public static Gt valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Gt(asFields(fieldReference)); + } + + /** + * Creates new {@link Gt}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public static Gt valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Gt(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Gt} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThan(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Gt(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Gt} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThan(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Gt(append(expression)); + } + + /** + * Creates new {@link Gt} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Gt}. + */ + public Gt greaterThanValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Gt(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $lt}. + * + * @author Christoph Strobl + */ + public static class Lt extends AbstractAggregationExpression { + + private Lt(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$lt"; + } + + /** + * Creates new {@link Lt}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public static Lt valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Lt(asFields(fieldReference)); + } + + /** + * Creates new {@link Lt}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public static Lt valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Lt(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Lt} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThan(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Lt(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Lt} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThan(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Lt(append(expression)); + } + + /** + * Creates new {@link Lt} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Lt}. + */ + public Lt lessThanValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Lt(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $gte}. + * + * @author Christoph Strobl + */ + public static class Gte extends AbstractAggregationExpression { + + private Gte(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$gte"; + } + + /** + * Creates new {@link Gte}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public static Gte valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Gte(asFields(fieldReference)); + } + + /** + * Creates new {@link Gte}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public static Gte valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Gte(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Gte} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Gte(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Gte} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Gte(append(expression)); + } + + /** + * Creates new {@link Gte} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Gte}. + */ + public Gte greaterThanEqualToValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Gte(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $lte}. + * + * @author Christoph Strobl + */ + public static class Lte extends AbstractAggregationExpression { + + private Lte(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$lte"; + } + + /** + * Creates new {@link Lte}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public static Lte valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Lte(asFields(fieldReference)); + } + + /** + * Creates new {@link Lte}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public static Lte valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Lte(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Lte} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Lte(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Lte} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Lte(append(expression)); + } + + /** + * Creates new {@link Lte} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Lte}. + */ + public Lte lessThanEqualToValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Lte(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $ne}. + * + * @author Christoph Strobl + */ + public static class Ne extends AbstractAggregationExpression { + + private Ne(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$ne"; + } + + /** + * Creates new {@link Ne}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public static Ne valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Ne(asFields(fieldReference)); + } + + /** + * Creates new {@link Ne}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public static Ne valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Ne(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Ne} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualTo(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Ne(append(Fields.field(fieldReference))); + } + + /** + * Creates new {@link Ne} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Ne(append(expression)); + } + + /** + * Creates new {@link Eq} with all previously added arguments appending the given one. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Ne}. + */ + public Ne notEqualToValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Ne(append(value, Expand.KEEP_SOURCE)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java new file mode 100644 index 0000000000..323a11895b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -0,0 +1,967 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Gateway to {@literal conditional expressions} that evaluate their argument expressions as booleans to a value. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + */ +public class ConditionalOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ConditionalOperatorFactory}. + */ + public static ConditionalOperatorFactory when(String fieldReference) { + return new ConditionalOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@literal expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConditionalOperatorFactory}. + */ + public static ConditionalOperatorFactory when(AggregationExpression expression) { + return new ConditionalOperatorFactory(expression); + } + + /** + * Take the value resulting from the given {@literal criteriaDefinition}. + * + * @param criteriaDefinition must not be {@literal null}. + * @return new instance of {@link ConditionalOperatorFactory}. + */ + public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinition) { + return new ConditionalOperatorFactory(criteriaDefinition); + } + + /** + * Creates new {@link AggregationExpression} that evaluates an expression and returns the value of the expression if + * the expression evaluates to a non-null value. If the expression evaluates to a {@literal null} value, including + * instances of undefined values or missing fields, returns the value of the replacement expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. + */ + public static IfNull.ThenBuilder ifNull(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return IfNull.ifNull(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that evaluates an expression and returns the value of the expression if + * the expression evaluates to a non-null value. If the expression evaluates to a {@literal null} value, including + * instances of undefined values or missing fields, returns the value of the replacement expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. + */ + public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return IfNull.ifNull(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates a series of {@link CaseOperator} expressions. When it + * finds an expression which evaluates to {@literal true}, {@code $switch} executes a specified expression and breaks + * out of the control flow. + * + * @param conditions must not be {@literal null}. + * @return new instance of {@link Switch}. + */ + public static Switch switchCases(CaseOperator... conditions) { + return Switch.switchCases(conditions); + } + + /** + * Creates new {@link AggregationExpression} that evaluates a series of {@link CaseOperator} expressions. When it + * finds an expression which evaluates to {@literal true}, {@code $switch} executes a specified expression and breaks + * out of the control flow. + * + * @param conditions must not be {@literal null}. + * @return new instance of {@link Switch}. + */ + public static Switch switchCases(List conditions) { + return Switch.switchCases(conditions); + } + + public static class ConditionalOperatorFactory { + + private final @Nullable String fieldReference; + + private final @Nullable AggregationExpression expression; + + private final @Nullable CriteriaDefinition criteriaDefinition; + + /** + * Creates new {@link ConditionalOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ConditionalOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + this.fieldReference = fieldReference; + this.expression = null; + this.criteriaDefinition = null; + } + + /** + * Creates new {@link ConditionalOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ConditionalOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.fieldReference = null; + this.expression = expression; + this.criteriaDefinition = null; + } + + /** + * Creates new {@link ConditionalOperatorFactory} for given {@link CriteriaDefinition}. + * + * @param criteriaDefinition must not be {@literal null}. + */ + public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { + + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); + + this.fieldReference = null; + this.expression = null; + this.criteriaDefinition = criteriaDefinition; + } + + /** + * Creates new {@link AggregationExpression} that evaluates a boolean expression to return one of the two specified + * return expressions. + * + * @param value must not be {@literal null}. + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. + */ + public OtherwiseBuilder then(Object value) { + + Assert.notNull(value, "Value must not be null"); + return createThenBuilder().then(value); + } + + /** + * Creates new {@link AggregationExpression} that evaluates a boolean expression to return one of the two specified + * return expressions. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. + */ + public OtherwiseBuilder thenValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createThenBuilder().then(expression); + } + + /** + * Creates new {@link AggregationExpression} that evaluates a boolean expression to return one of the two specified + * return expressions. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. + */ + public OtherwiseBuilder thenValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createThenBuilder().thenValueOf(fieldReference); + } + + private ThenBuilder createThenBuilder() { + + if (usesFieldRef()) { + return Cond.newBuilder().when(fieldReference); + } + + return usesCriteriaDefinition() ? Cond.newBuilder().when(criteriaDefinition) : Cond.newBuilder().when(expression); + } + + private boolean usesFieldRef() { + return this.fieldReference != null; + } + + private boolean usesCriteriaDefinition() { + return this.criteriaDefinition != null; + } + } + + /** + * Encapsulates the aggregation framework {@code $ifNull} operator. Replacement values can be either {@link Field + * field references}, {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be + * converted to a simple MongoDB type. + * + * @author Mark Paluch + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ + */ + public static class IfNull implements AggregationExpression { + + private final Object condition; + private final Object value; + + private IfNull(Object condition, Object value) { + + this.condition = condition; + this.value = value; + } + + /** + * Creates new {@link IfNull}. + * + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return never {@literal null}. + */ + public static ThenBuilder ifNull(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IfNullOperatorBuilder().ifNull(fieldReference); + } + + /** + * Creates new {@link IfNull}. + * + * @param expression the expression to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return never {@literal null}. + */ + public static ThenBuilder ifNull(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new IfNullOperatorBuilder().ifNull(expression); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + List list = new ArrayList<>(); + + if (condition instanceof Collection collection) { + for (Object val : collection) { + list.add(mapCondition(val, context)); + } + } else { + list.add(mapCondition(condition, context)); + } + + list.add(resolve(value, context)); + return new Document("$ifNull", list); + } + + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field field) { + return context.getReference(field).toString(); + } else if (condition instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else { + return condition; + } + } + + private Object resolve(Object value, AggregationOperationContext context) { + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } else if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else if (value instanceof Document) { + return value; + } + + return context.getMappedObject(new Document("$set", value)).get("$set"); + } + + /** + * @author Mark Paluch + */ + public interface IfNullBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder ifNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} + * or empty. + * @return the {@link ThenBuilder}. + */ + ThenBuilder ifNull(AggregationExpression expression); + } + + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + + /** + * @author Mark Paluch + */ + public interface ThenBuilder extends OrBuilder { + + /** + * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a + * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB + * representation but must not be {@literal null}. + * @return new instance of {@link IfNull}. + */ + IfNull then(Object value); + + /** + * @param fieldReference the field holding the replacement value, must not be {@literal null}. + * @return new instance of {@link IfNull}. + */ + IfNull thenValueOf(String fieldReference); + + /** + * @param expression the expression yielding to the replacement value, must not be {@literal null}. + * @return new instance of {@link IfNull}. + */ + IfNull thenValueOf(AggregationExpression expression); + } + + /** + * Builder for fluent {@link IfNull} creation. + * + * @author Mark Paluch + */ + static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { + + private @Nullable List conditions; + + private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); + } + + /** + * Creates a new builder for {@link IfNull}. + * + * @return never {@literal null}. + */ + public static IfNullOperatorBuilder newBuilder() { + return new IfNullOperatorBuilder(); + } + + public ThenBuilder ifNull(String fieldReference) { + + Assert.hasText(fieldReference, "FieldReference name must not be null or empty"); + this.conditions.add(Fields.field(fieldReference)); + return this; + } + + @Override + public ThenBuilder ifNull(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression name must not be null or empty"); + this.conditions.add(expression); + return this; + } + + @Override + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + + public IfNull then(Object value) { + return new IfNull(conditions, value); + } + + public IfNull thenValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IfNull(conditions, Fields.field(fieldReference)); + } + + public IfNull thenValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new IfNull(conditions, expression); + } + } + } + + /** + * {@link AggregationExpression} for {@code $switch}. + * + * @author Christoph Strobl + */ + public static class Switch extends AbstractAggregationExpression { + + private Switch(java.util.Map values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$switch"; + } + + /** + * Creates new {@link Switch}. + * + * @param conditions must not be {@literal null}. + */ + public static Switch switchCases(CaseOperator... conditions) { + + Assert.notNull(conditions, "Conditions must not be null"); + return switchCases(Arrays.asList(conditions)); + } + + /** + * Creates new {@link Switch}. + * + * @param conditions must not be {@literal null}. + */ + public static Switch switchCases(List conditions) { + + Assert.notNull(conditions, "Conditions must not be null"); + return new Switch(Collections.singletonMap("branches", new ArrayList<>(conditions))); + } + + /** + * Set the default value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Switch}. + */ + public Switch defaultTo(Object value) { + return new Switch(append("default", value)); + } + + /** + * Encapsulates the aggregation framework case document inside a {@code $switch}-operation. + */ + public static class CaseOperator implements AggregationExpression { + + private final AggregationExpression when; + private final Object then; + + private CaseOperator(AggregationExpression when, Object then) { + + this.when = when; + this.then = then; + } + + public static ThenBuilder when(final AggregationExpression condition) { + + Assert.notNull(condition, "Condition must not be null"); + + return new ThenBuilder() { + + @Override + public CaseOperator then(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new CaseOperator(condition, value); + } + }; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document dbo = new Document("case", when.toDocument(context)); + + if (then instanceof AggregationExpression aggregationExpression) { + dbo.put("then", aggregationExpression.toDocument(context)); + } else if (then instanceof Field field) { + dbo.put("then", context.getReference(field).toString()); + } else { + dbo.put("then", then); + } + + return dbo; + } + + /** + * @author Christoph Strobl + */ + public interface ThenBuilder { + + /** + * Set the then {@literal value}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link CaseOperator}. + */ + CaseOperator then(Object value); + } + } + } + + /** + * Encapsulates the aggregation framework {@code $cond} operator. A {@link Cond} allows nested conditions + * {@code if-then[if-then-else]-else} using {@link Field}, {@link CriteriaDefinition}, {@link AggregationExpression} + * or a {@link Document custom} condition. Replacement values can be either {@link Field field references}, + * {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be converted to a + * simple MongoDB type. + * + * @author Mark Paluch + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ + */ + public static class Cond implements AggregationExpression { + + private final Object condition; + private final Object thenValue; + private final Object otherwiseValue; + + /** + * Creates a new {@link Cond} for a given {@link Field} and {@code then}/{@code otherwise} values. + * + * @param condition must not be {@literal null}. + * @param thenValue must not be {@literal null}. + * @param otherwiseValue must not be {@literal null}. + */ + private Cond(Field condition, Object thenValue, Object otherwiseValue) { + this((Object) condition, thenValue, otherwiseValue); + } + + /** + * Creates a new {@link Cond} for a given {@link CriteriaDefinition} and {@code then}/{@code otherwise} values. + * + * @param condition must not be {@literal null}. + * @param thenValue must not be {@literal null}. + * @param otherwiseValue must not be {@literal null}. + */ + private Cond(CriteriaDefinition condition, Object thenValue, Object otherwiseValue) { + this((Object) condition, thenValue, otherwiseValue); + } + + private Cond(Object condition, Object thenValue, Object otherwiseValue) { + + Assert.notNull(condition, "Condition must not be null"); + Assert.notNull(thenValue, "Then value must not be null"); + Assert.notNull(otherwiseValue, "Otherwise value must not be null"); + + assertNotBuilder(condition, "Condition"); + assertNotBuilder(thenValue, "Then value"); + assertNotBuilder(otherwiseValue, "Otherwise value"); + + this.condition = condition; + this.thenValue = thenValue; + this.otherwiseValue = otherwiseValue; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document condObject = new Document(); + + condObject.append("if", resolveCriteria(context, condition)); + condObject.append("then", resolveValue(context, thenValue)); + condObject.append("else", resolveValue(context, otherwiseValue)); + + return new Document("$cond", condObject); + } + + private Object resolveValue(AggregationOperationContext context, Object value) { + + if (value instanceof Document || value instanceof Field) { + return resolve(context, value); + } + + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + return context.getMappedObject(new Document("$set", value)).get("$set"); + } + + private Object resolveCriteria(AggregationOperationContext context, Object value) { + + if (value instanceof Document || value instanceof Field) { + return resolve(context, value); + } + + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (value instanceof CriteriaDefinition criteriaDefinition) { + + Document mappedObject = context.getMappedObject(criteriaDefinition.getCriteriaObject()); + List clauses = getClauses(context, mappedObject); + return clauses.size() == 1 ? clauses.get(0) : clauses; + } + + throw new InvalidDataAccessApiUsageException( + String.format("Invalid value in condition; Supported: Document, Field references, Criteria, got: %s", value)); + } + + private List getClauses(AggregationOperationContext context, Document mappedObject) { + + List clauses = new ArrayList<>(); + + for (String key : mappedObject.keySet()) { + + Object predicate = mappedObject.get(key); + clauses.addAll(getClauses(context, key, predicate)); + } + + return clauses; + } + + private List getClauses(AggregationOperationContext context, String key, Object predicate) { + + List clauses = new ArrayList<>(); + + if (predicate instanceof List predicates) { + + List args = new ArrayList<>(predicates.size()); + + for (Object clause : predicates) { + if (clause instanceof Document document) { + args.addAll(getClauses(context, document)); + } + } + + clauses.add(new Document(key, args)); + } else if (predicate instanceof Document nested) { + + for (String s : nested.keySet()) { + + if (!isKeyword(s)) { + continue; + } + + List args = new ArrayList<>(2); + args.add("$" + key); + args.add(nested.get(s)); + clauses.add(new Document(s, args)); + } + } else if (!isKeyword(key)) { + + List args = new ArrayList<>(2); + args.add("$" + key); + args.add(predicate); + clauses.add(new Document("$eq", args)); + } + + return clauses; + } + + /** + * Returns whether the given {@link String} is a MongoDB keyword. + * + * @param candidate + * @return + */ + private boolean isKeyword(String candidate) { + return candidate.startsWith("$"); + } + + private Object resolve(AggregationOperationContext context, Object value) { + + if (value instanceof Document document) { + return context.getMappedObject(document); + } + + return context.getReference((Field) value).toString(); + } + + private void assertNotBuilder(Object toCheck, String name) { + Assert.isTrue(!ClassUtils.isAssignableValue(ConditionalExpressionBuilder.class, toCheck), + String.format("%s must not be of type %s", name, ConditionalExpressionBuilder.class.getSimpleName())); + } + + /** + * Get a builder that allows fluent creation of {@link Cond}. + * + * @return never {@literal null}. + */ + public static WhenBuilder newBuilder() { + return ConditionalExpressionBuilder.newBuilder(); + } + + /** + * Start creating new {@link Cond} by providing the boolean expression used in {@code if}. + * + * @param booleanExpression must not be {@literal null}. + * @return never {@literal null}. + */ + public static ThenBuilder when(Document booleanExpression) { + return ConditionalExpressionBuilder.newBuilder().when(booleanExpression); + } + + /** + * Start creating new {@link Cond} by providing the {@link AggregationExpression} used in {@code if}. + * + * @param expression expression that yields in a boolean result, must not be {@literal null}. + * @return never {@literal null}. + */ + public static ThenBuilder when(AggregationExpression expression) { + return ConditionalExpressionBuilder.newBuilder().when(expression); + } + + /** + * Start creating new {@link Cond} by providing the field reference used in {@code if}. + * + * @param booleanField name of a field holding a boolean value, must not be {@literal null}. + * @return never {@literal null}. + */ + public static ThenBuilder when(String booleanField) { + return ConditionalExpressionBuilder.newBuilder().when(booleanField); + } + + /** + * Start creating new {@link Cond} by providing the {@link CriteriaDefinition} used in {@code if}. + * + * @param criteria criteria to evaluate, must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + public static ThenBuilder when(CriteriaDefinition criteria) { + return ConditionalExpressionBuilder.newBuilder().when(criteria); + } + + /** + * @author Mark Paluch + */ + public interface WhenBuilder { + + /** + * @param booleanExpression expression that yields in a boolean result, must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder when(Document booleanExpression); + + /** + * @param expression expression that yields in a boolean result, must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder when(AggregationExpression expression); + + /** + * @param booleanField name of a field holding a boolean value, must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder when(String booleanField); + + /** + * @param criteria criteria to evaluate, must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder when(CriteriaDefinition criteria); + } + + /** + * @author Mark Paluch + */ + public interface ThenBuilder { + + /** + * @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. + * @return the {@link OtherwiseBuilder} + */ + OtherwiseBuilder then(Object value); + + /** + * @param fieldReference must not be {@literal null}. + * @return the {@link OtherwiseBuilder} + */ + OtherwiseBuilder thenValueOf(String fieldReference); + + /** + * @param expression must not be {@literal null}. + * @return the {@link OtherwiseBuilder} + */ + OtherwiseBuilder thenValueOf(AggregationExpression expression); + } + + /** + * @author Mark Paluch + */ + public interface OtherwiseBuilder { + + /** + * @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. + * @return the {@link Cond} + */ + Cond otherwise(Object value); + + /** + * @param fieldReference must not be {@literal null}. + * @return the {@link Cond} + */ + Cond otherwiseValueOf(String fieldReference); + + /** + * @param expression must not be {@literal null}. + * @return the {@link Cond} + */ + Cond otherwiseValueOf(AggregationExpression expression); + } + + /** + * Builder for fluent {@link Cond} creation. + * + * @author Mark Paluch + */ + static class ConditionalExpressionBuilder implements WhenBuilder, ThenBuilder, OtherwiseBuilder { + + private @Nullable Object condition; + private @Nullable Object thenValue; + + private ConditionalExpressionBuilder() {} + + /** + * Creates a new builder for {@link Cond}. + * + * @return never {@literal null}. + */ + public static ConditionalExpressionBuilder newBuilder() { + return new ConditionalExpressionBuilder(); + } + + @Override + public ConditionalExpressionBuilder when(Document booleanExpression) { + + Assert.notNull(booleanExpression, "'Boolean expression' must not be null"); + + this.condition = booleanExpression; + return this; + } + + @Override + public ThenBuilder when(CriteriaDefinition criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + this.condition = criteria; + return this; + } + + @Override + public ThenBuilder when(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression field must not be null"); + this.condition = expression; + return this; + } + + @Override + public ThenBuilder when(String booleanField) { + + Assert.hasText(booleanField, "Boolean field name must not be null or empty"); + this.condition = Fields.field(booleanField); + return this; + } + + @Override + public OtherwiseBuilder then(Object thenValue) { + + Assert.notNull(thenValue, "Then-value must not be null"); + this.thenValue = thenValue; + return this; + } + + @Override + public OtherwiseBuilder thenValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.thenValue = Fields.field(fieldReference); + return this; + } + + @Override + public OtherwiseBuilder thenValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + this.thenValue = expression; + return this; + } + + @Override + public Cond otherwise(Object otherwiseValue) { + + Assert.notNull(otherwiseValue, "Value must not be null"); + return new Cond(condition, thenValue, otherwiseValue); + } + + @Override + public Cond otherwiseValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Cond(condition, thenValue, Fields.field(fieldReference)); + } + + @Override + public Cond otherwiseValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + return new Cond(condition, thenValue, expression); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java new file mode 100644 index 0000000000..aa085b2a29 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -0,0 +1,754 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; + +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal convert} aggregation operations. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ConvertOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(String fieldReference) { + return new ConvertOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(AggregationExpression expression) { + return new ConvertOperatorFactory(expression); + } + + /** + * @author Christoph Strobl + */ + public static class ConvertOperatorFactory { + + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + + /** + * Creates new {@link ConvertOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ConvertOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link ConvertOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ConvertOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code stringTypeIdentifier}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(String stringTypeIdentifier) { + return createConvert().to(stringTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code numericTypeIdentifier}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(int numericTypeIdentifier) { + return createConvert().to(numericTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link Type}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(Type type) { + return createConvert().to(type); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the value of the given {@link Field field reference}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(String fieldReference) { + return createConvert().toTypeOf(fieldReference); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link AggregationExpression expression}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(AggregationExpression expression) { + return createConvert().toTypeOf(expression); + } + + /** + * Creates new {@link ToBool aggregation expression} for {@code $toBool} that converts a value to boolean. Shorthand + * for {@link #convertTo(String) #convertTo("bool")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToBool}. + */ + public ToBool convertToBoolean() { + return ToBool.toBoolean(valueObject()); + } + + /** + * Creates new {@link ToDate aggregation expression} for {@code $toDate} that converts a value to a date. Shorthand + * for {@link #convertTo(String) #convertTo("date")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDate}. + */ + public ToDate convertToDate() { + return ToDate.toDate(valueObject()); + } + + /** + * Creates new {@link ToDecimal aggregation expression} for {@code $toDecimal} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("decimal")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDecimal}. + */ + public ToDecimal convertToDecimal() { + return ToDecimal.toDecimal(valueObject()); + } + + /** + * Creates new {@link ToDouble aggregation expression} for {@code $toDouble} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("double")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDouble}. + */ + public ToDouble convertToDouble() { + return ToDouble.toDouble(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toInt} that converts a value to an int. Shorthand + * for {@link #convertTo(String) #convertTo("int")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToInt convertToInt() { + return ToInt.toInt(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toLong} that converts a value to a long. Shorthand + * for {@link #convertTo(String) #convertTo("long")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToLong convertToLong() { + return ToLong.toLong(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toObjectId} that converts a value to a objectId. Shorthand + * for {@link #convertTo(String) #convertTo("objectId")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToObjectId convertToObjectId() { + return ToObjectId.toObjectId(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toString} that converts a value to a string. Shorthand + * for {@link #convertTo(String) #convertTo("string")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToString convertToString() { + return ToString.toString(valueObject()); + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + + private Convert createConvert() { + return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); + } + + private Object valueObject() { + return usesFieldRef() ? Fields.field(fieldReference) : expression; + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $convert} that converts a value to a specified type.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/convert/ + * @since 2.1 + */ + public static class Convert extends AbstractAggregationExpression { + + private Convert(Object value) { + super(value); + } + + /** + * Creates new {@link Convert} using the given value for the {@literal input} attribute. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValue(Object value) { + return new Convert(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link Convert} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(String fieldReference) { + return convertValue(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Convert} using the result of the provided {@link AggregationExpression expression} as + * {@literal input} value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(AggregationExpression expression) { + return convertValue(expression); + } + + /** + * Specify the conversion target type via its {@link String} representation. + *
                      + *
                    • double
                    • + *
                    • string
                    • + *
                    • objectId
                    • + *
                    • bool
                    • + *
                    • date
                    • + *
                    • int
                    • + *
                    • long
                    • + *
                    • decimal
                    • + *
                    + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(String stringTypeIdentifier) { + return new Convert(append("to", stringTypeIdentifier)); + } + + /** + * Specify the conversion target type via its numeric representation. + *
                    + *
                    1
                    + *
                    double
                    + *
                    2
                    + *
                    string
                    + *
                    7
                    + *
                    objectId
                    + *
                    8
                    + *
                    bool
                    + *
                    9
                    + *
                    date
                    + *
                    16
                    + *
                    int
                    + *
                    18
                    + *
                    long
                    + *
                    19
                    + *
                    decimal
                    + *
                    + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(int numericTypeIdentifier) { + return new Convert(append("to", numericTypeIdentifier)); + } + + /** + * Specify the conversion target type. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(Type type) { + + String typeString = Type.BOOLEAN.equals(type) ? "bool" : type.value().toString(); + return to(typeString); + } + + /** + * Specify the conversion target type via the value of the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(String fieldReference) { + return new Convert(append("to", Fields.field(fieldReference))); + } + + /** + * Specify the conversion target type via the value of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(AggregationExpression expression) { + return new Convert(append("to", expression)); + } + + /** + * Optionally specify the value to return on encountering an error during conversion. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturn(Object value) { + return new Convert(append("onError", value)); + } + + /** + * Optionally specify the field holding the value to return on encountering an error during conversion. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(String fieldReference) { + return onErrorReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return on encountering an error during conversion. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(AggregationExpression expression) { + return onErrorReturn(expression); + } + + /** + * Optionally specify the value to return when the input is {@literal null} or missing. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturn(Object value) { + return new Convert(append("onNull", value)); + } + + /** + * Optionally specify the field holding the value to return when the input is {@literal null} or missing. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(String fieldReference) { + return onNullReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return when the input is {@literal null} or missing. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(AggregationExpression expression) { + return onNullReturn(expression); + } + + @Override + protected String getMongoMethod() { + return "$convert"; + } + } + + /** + * {@link AggregationExpression} for {@code $toBool} that converts a value to {@literal boolean}. Shorthand for + * {@link Convert#to(String) Convert#to("bool")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toBool/ + * @since 2.1 + */ + public static class ToBool extends AbstractAggregationExpression { + + private ToBool(Object value) { + super(value); + } + + /** + * Creates new {@link ToBool} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToBool}. + */ + public static ToBool toBoolean(Object value) { + return new ToBool(value); + } + + @Override + protected String getMongoMethod() { + return "$toBool"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDate} that converts a value to {@literal date}. Shorthand for + * {@link Convert#to(String) Convert#to("date")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDate/ + * @since 2.1 + */ + public static class ToDate extends AbstractAggregationExpression { + + private ToDate(Object value) { + super(value); + } + + /** + * Creates new {@link ToDate} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDate}. + */ + public static ToDate toDate(Object value) { + return new ToDate(value); + } + + @Override + protected String getMongoMethod() { + return "$toDate"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDecimal} that converts a value to {@literal decimal}. Shorthand for + * {@link Convert#to(String) Convert#to("decimal")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDecimal/ + * @since 2.1 + */ + public static class ToDecimal extends AbstractAggregationExpression { + + private ToDecimal(Object value) { + super(value); + } + + /** + * Creates new {@link ToDecimal} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDecimal}. + */ + public static ToDecimal toDecimal(Object value) { + return new ToDecimal(value); + } + + @Override + protected String getMongoMethod() { + return "$toDecimal"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDouble} that converts a value to {@literal double}. Shorthand for + * {@link Convert#to(String) Convert#to("double")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDouble/ + * @since 2.1 + */ + public static class ToDouble extends AbstractAggregationExpression { + + private ToDouble(Object value) { + super(value); + } + + /** + * Creates new {@link ToDouble} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDouble}. + */ + public static ToDouble toDouble(Object value) { + return new ToDouble(value); + } + + @Override + protected String getMongoMethod() { + return "$toDouble"; + } + } + + /** + * {@link AggregationExpression} for {@code $toInt} that converts a value to {@literal integer}. Shorthand for + * {@link Convert#to(String) Convert#to("int")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toInt/ + * @since 2.1 + */ + public static class ToInt extends AbstractAggregationExpression { + + private ToInt(Object value) { + super(value); + } + + /** + * Creates new {@link ToInt} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToInt}. + */ + public static ToInt toInt(Object value) { + return new ToInt(value); + } + + @Override + protected String getMongoMethod() { + return "$toInt"; + } + } + + /** + * {@link AggregationExpression} for {@code $toLong} that converts a value to {@literal long}. Shorthand for + * {@link Convert#to(String) Convert#to("long")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toLong/ + * @since 2.1 + */ + public static class ToLong extends AbstractAggregationExpression { + + private ToLong(Object value) { + super(value); + } + + /** + * Creates new {@link ToLong} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToLong}. + */ + public static ToLong toLong(Object value) { + return new ToLong(value); + } + + @Override + protected String getMongoMethod() { + return "$toLong"; + } + } + + /** + * {@link AggregationExpression} for {@code $toObjectId} that converts a value to {@literal objectId}. Shorthand for + * {@link Convert#to(String) Convert#to("objectId")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toObjectId/ + * @since 2.1 + */ + public static class ToObjectId extends AbstractAggregationExpression { + + private ToObjectId(Object value) { + super(value); + } + + /** + * Creates new {@link ToObjectId} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToObjectId}. + */ + public static ToObjectId toObjectId(Object value) { + return new ToObjectId(value); + } + + @Override + protected String getMongoMethod() { + return "$toObjectId"; + } + } + + /** + * {@link AggregationExpression} for {@code $toString} that converts a value to {@literal string}. Shorthand for + * {@link Convert#to(String) Convert#to("string")}.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toString/ + * @since 2.1 + */ + public static class ToString extends AbstractAggregationExpression { + + private ToString(Object value) { + super(value); + } + + /** + * Creates new {@link ToString} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToString}. + */ + public static ToString toString(Object value) { + return new ToString(value); + } + + @Override + protected String getMongoMethod() { + return "$toString"; + } + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java new file mode 100644 index 0000000000..6a6108f832 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -0,0 +1,78 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $count}-operation.
                    + * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class + * directly. + * + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @author Mark Paluch + * @since 1.10 + */ +public class CountOperation implements FieldsExposingAggregationOperation { + + private final String fieldName; + + /** + * Creates a new {@link CountOperation} given the {@literal fieldName} field name. + * + * @param fieldName must not be {@literal null} or empty. + */ + public CountOperation(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be null or empty"); + this.fieldName = fieldName; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), fieldName); + } + + @Override + public String getOperator() { + return "$count"; + } + + @Override + public ExposedFields getFields() { + return ExposedFields.from(new ExposedField(fieldName, true)); + } + + /** + * Builder for {@link CountOperation}. + * + * @author Mark Paluch + */ + public static class CountOperationBuilder { + + /** + * Returns the finally to be applied {@link CountOperation} with the given alias. + * + * @param fieldName must not be {@literal null} or empty. + * @return new instance of {@link CountOperation}. + */ + public CountOperation as(String fieldName) { + return new CountOperation(fieldName); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java new file mode 100644 index 0000000000..26a85bf2c3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java @@ -0,0 +1,67 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.util.Assert; + +/** + * Gateway to {@literal data type} expressions. + * + * @author Christoph Strobl + * @since 1.10 + * @soundtrack Clawfinger - Catch Me + */ +public class DataTypeOperators { + + /** + * Return the BSON data type of the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Type}. + */ + public static Type typeOf(String fieldReference) { + return Type.typeOf(fieldReference); + } + + /** + * {@link AggregationExpression} for {@code $type}. + * + * @author Christoph Strobl + */ + public static class Type extends AbstractAggregationExpression { + + private Type(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$type"; + } + + /** + * Creates new {@link Type}. + * + * @param field must not be {@literal null}. + * @return new instance of {@link Type}. + */ + public static Type typeOf(String field) { + + Assert.notNull(field, "Field must not be null"); + return new Type(Fields.field(field)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java new file mode 100644 index 0000000000..ff6ed7e983 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -0,0 +1,3414 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Gateway to {@literal Date} aggregation operations. + * + * @author Christoph Strobl + * @author Matt Morrissette + * @since 1.10 + */ +public class DateOperators { + + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + */ + public static DateOperatorFactory dateOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new DateOperatorFactory(fieldReference); + } + + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + + /** + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + */ + public static DateOperatorFactory dateOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new DateOperatorFactory(expression); + } + + /** + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + + /** + * Take the given value as date.
                    + * This can be one of: + *
                      + *
                    • {@link java.util.Date}
                    • + *
                    • {@link java.util.Calendar}
                    • + *
                    • {@link java.time.Instant}
                    • + *
                    • {@link java.time.ZonedDateTime}
                    • + *
                    • {@link java.lang.Long}
                    • + *
                    • {@link Field}
                    • + *
                    • {@link AggregationExpression}
                    • + *
                    + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 2.1 + */ + public static DateOperatorFactory dateValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new DateOperatorFactory(value); + } + + /** + * Construct a Date object by providing the date’s constituent properties.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @since 2.1 + */ + public static DateFromPartsOperatorFactory dateFromParts() { + return new DateFromPartsOperatorFactory(Timezone.none()); + } + + /** + * Construct a Date object from the given date {@link String}.
                    + * To use a {@link Field field reference} or {@link AggregationExpression} as source of the date string consider + * {@link DateOperatorFactory#fromString()} or {@link DateFromString#fromStringOf(AggregationExpression)}.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @since 2.1 + */ + public static DateFromString dateFromString(String value) { + return DateFromString.fromString(value); + } + + /** + * Timezone represents a MongoDB timezone abstraction which can be represented with a timezone ID or offset as a + * {@link String}. Also accepts a {@link AggregationExpression} or {@link Field} that resolves to a {@link String} of + * either Olson Timezone Identifier or a UTC Offset.
                    + * + * + * + * + * + * + * + * + * + * + * + * + * + *
                    FormatExample
                    Olson Timezone Identifier"America/New_York"
                    + * "Europe/London"
                    + * "GMT"
                    UTC Offset+/-[hh]:[mm], e.g. "+04:45"
                    + * -[hh][mm], e.g. "-0530"
                    + * +/-[hh], e.g. "+03"
                    + * NOTE: Support for timezones in aggregations Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ + public static class Timezone { + + private static final Timezone NONE = new Timezone(null); + + private final @Nullable Object value; + + private Timezone(@Nullable Object value) { + this.value = value; + } + + /** + * Return an empty {@link Timezone}. + * + * @return never {@literal null}. + */ + public static Timezone none() { + return NONE; + } + + /** + * Create a {@link Timezone} for the given value which must be a valid expression that resolves to a {@link String} + * representing an Olson Timezone Identifier or UTC Offset. + * + * @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an + * {@link AggregationExpression} resulting in the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone valueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Timezone(value); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link TimeZone} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null"); + + return fromOffset( + ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset())))); + } + + /** + * Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset. + * + * @param offset {@link ZoneOffset} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(ZoneOffset offset) { + + Assert.notNull(offset, "ZoneOffset must not be null"); + return new Timezone(offset.toString()); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link Timezone} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null"); + + return valueOf(timeZone.getID()); + } + + /** + * Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset. + * + * @param zoneId {@link ZoneId} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(ZoneId zoneId) { + + Assert.notNull(zoneId, "ZoneId must not be null"); + return new Timezone(zoneId.toString()); + } + + /** + * Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset. + * + * @param fieldReference the {@link Field} holding the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone ofField(String fieldReference) { + return valueOf(Fields.field(fieldReference)); + } + + /** + * Create a {@link Timezone} for the {@link AggregationExpression} resulting in the Olson Timezone Identifier or UTC + * Offset. + * + * @param expression the {@link AggregationExpression} resulting in the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone ofExpression(AggregationExpression expression) { + return valueOf(expression); + } + + @Nullable + Object getValue() { + return value; + } + } + + /** + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DateOperatorFactory { + + private final @Nullable String fieldReference; + private final @Nullable Object dateValue; + private final @Nullable AggregationExpression expression; + private final Timezone timezone; + + /** + * @param fieldReference + * @param expression + * @param value + * @param timezone + * @since 2.1 + */ + private DateOperatorFactory(@Nullable String fieldReference, @Nullable AggregationExpression expression, + @Nullable Object value, Timezone timezone) { + + this.fieldReference = fieldReference; + this.expression = expression; + this.dateValue = value; + this.timezone = timezone; + } + + /** + * Creates new {@link DateOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public DateOperatorFactory(String fieldReference) { + + this(fieldReference, null, null, Timezone.none()); + + Assert.notNull(fieldReference, "FieldReference must not be null"); + } + + /** + * Creates new {@link DateOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public DateOperatorFactory(AggregationExpression expression) { + + this(null, expression, null, Timezone.none()); + + Assert.notNull(expression, "Expression must not be null"); + } + + /** + * Creates new {@link DateOperatorFactory} for given {@code value} that resolves to a Date.
                    + *
                      + *
                    • {@link java.util.Date}
                    • + *
                    • {@link java.util.Calendar}
                    • + *
                    • {@link java.time.Instant}
                    • + *
                    • {@link java.time.ZonedDateTime}
                    • + *
                    • {@link java.lang.Long}
                    • + *
                    + * + * @param value must not be {@literal null}. + * @since 2.1 + */ + public DateOperatorFactory(Object value) { + + this(null, null, value, Timezone.none()); + + Assert.notNull(value, "Value must not be null"); + } + + /** + * Create a new {@link DateOperatorFactory} bound to a given {@link Timezone}.
                    + * NOTE: Requires Mongo 3.6 or later. + * + * @param timezone must not be {@literal null}. Use {@link Timezone#none()} instead. + * @return new instance of {@link DateOperatorFactory}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + public DateOperatorFactory withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateOperatorFactory(fieldReference, expression, dateValue, timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, String unit) { + return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, String unit) { + return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateSubtract.subtractValueOf(expression, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone( + DateSubtract.subtractValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(String fieldReference, String unit) { + return applyTimezone(DateSubtract.subtractValueOf(fieldReference, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateSubtract.subtractValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtract(Object value, String unit) { + return applyTimezone(DateSubtract.subtractValue(value, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtract(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateSubtract.subtractValue(value, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that truncates a date to the given {@literal unit}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + * @since 4.0 + */ + public DateTrunc truncate(String unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateTrunc.truncateValue(dateReference()).to(unit), timezone); + } + + /** + * Creates new {@link AggregationExpression} that truncates a date to the given {@literal unit}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + * @since 4.0 + */ + public DateTrunc truncate(TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return truncate(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and + * 366. + * + * @return new instance of {@link DayOfYear}. + */ + public DayOfYear dayOfYear() { + return applyTimezone(DayOfYear.dayOfYear(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the month for a date as a number between 1 and + * 31. + * + * @return new instance of {@link DayOfMonth}. + */ + public DayOfMonth dayOfMonth() { + return applyTimezone(DayOfMonth.dayOfMonth(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the week for a date as a number between 1 + * (Sunday) and 7 (Saturday). + * + * @return new instance of {@link DayOfWeek}. + */ + public DayOfWeek dayOfWeek() { + return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, String unit) { + return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, String unit) { + return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the year portion of a date. + * + * @return new instance of {@link Year}. + */ + public Year year() { + return applyTimezone(Year.year(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the month of a date as a number between 1 and 12. + * + * @return new instance of {@link Month}. + */ + public Month month() { + return applyTimezone(Month.month(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the week of the year for a date as a number between 0 and + * 53. + * + * @return new instance of {@link Week}. + */ + public Week week() { + return applyTimezone(Week.week(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the hour portion of a date as a number between 0 and 23. + * + * @return new instance of {@link Hour}. + */ + public Hour hour() { + return applyTimezone(Hour.hour(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the minute portion of a date as a number between 0 and 59. + * + * @return new instance of {@link Minute}. + */ + public Minute minute() { + return applyTimezone(Minute.minute(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the second portion of a date as a number between 0 and 59, + * but can be 60 to account for leap seconds. + * + * @return new instance of {@link Second}. + */ + public Second second() { + return applyTimezone(Second.second(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the millisecond portion of a date as an integer between 0 + * and 999. + * + * @return new instance of {@link Millisecond}. + */ + public Millisecond millisecond() { + return applyTimezone(Millisecond.millisecond(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date object to a string according to a user-specified + * {@literal format}. + * + * @param format must not be {@literal null}. + * @return new instance of {@link DateToString}. + */ + public DateToString toString(String format) { + return applyTimezone(DateToString.dateToString(dateReference()).toString(format), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date object to a string according to the server default + * format. + * + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString toStringWithDefaultFormat() { + return applyTimezone(DateToString.dateToString(dateReference()).defaultFormat(), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the weekday number in ISO 8601-2018 format, ranging from 1 + * (for Monday) to 7 (for Sunday). + * + * @return new instance of {@link IsoDayOfWeek}. + */ + public IsoDayOfWeek isoDayOfWeek() { + return applyTimezone(IsoDayOfWeek.isoDayWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the week number in ISO 8601-2018 format, ranging from 1 to + * 53. + * + * @return new instance of {@link IsoWeek}. + */ + public IsoWeek isoWeek() { + return applyTimezone(IsoWeek.isoWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the year number in ISO 8601-2018 format. + * + * @return new instance of {@link IsoWeekYear}. + */ + public IsoWeekYear isoWeekYear() { + return applyTimezone(IsoWeekYear.isoWeekYear(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns a document containing the constituent parts of the date as + * individual properties.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateToParts}. + * @since 2.1 + */ + public DateToParts toParts() { + return applyTimezone(DateToParts.dateToParts(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date/time string to a date object.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromString}. + * @since 2.1 + */ + public DateFromString fromString() { + return applyTimezone(DateFromString.fromString(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the incrementing ordinal from a timestamp. + * + * @return new instance of {@link TsIncrement}. + * @since 4.0 + */ + public TsIncrement tsIncrement() { + + if (timezone != null && !Timezone.none().equals(timezone)) { + throw new IllegalArgumentException("$tsIncrement does not support timezones"); + } + + return TsIncrement.tsIncrement(dateReference()); + } + + /** + * Creates new {@link AggregationExpression} that returns the seconds from a timestamp. + * + * @return new instance of {@link TsIncrement}. + * @since 4.0 + */ + public TsSecond tsSecond() { + + if (timezone != null && !Timezone.none().equals(timezone)) { + throw new IllegalArgumentException("$tsSecond does not support timezones"); + } + + return TsSecond.tsSecond(dateReference()); + } + + private Object dateReference() { + + if (usesFieldRef()) { + return Fields.field(fieldReference); + } + + return usesExpression() ? expression : dateValue; + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + + private boolean usesExpression() { + return expression != null; + } + } + + /** + * @author Matt Morrissette + * @author Christoph Strobl + * @since 2.1 + */ + public static class DateFromPartsOperatorFactory { + + private final Timezone timezone; + + private DateFromPartsOperatorFactory(Timezone timezone) { + this.timezone = timezone; + } + + /** + * Set the {@literal week date year} to the given value which must resolve to a weekday in range {@code 0 - 9999}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param isoWeekYear must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal isoWeekYear} is {@literal null}. + */ + public IsoDateFromParts isoWeekYear(Object isoWeekYear) { + return applyTimezone(IsoDateFromParts.dateFromParts().isoWeekYear(isoWeekYear), timezone); + } + + /** + * Set the {@literal week date year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public IsoDateFromParts isoWeekYearOf(String fieldReference) { + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Set the {@literal week date year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { + return isoWeekYear(expression); + } + + /** + * Set the {@literal year} to the given value which must resolve to a calendar year. Can be a simple value, + * {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param year must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal year} is {@literal null} + */ + public DateFromParts year(Object year) { + return applyTimezone(DateFromParts.dateFromParts().year(year), timezone); + } + + /** + * Set the {@literal year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public DateFromParts yearOf(String fieldReference) { + return year(Fields.field(fieldReference)); + } + + /** + * Set the {@literal year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public DateFromParts yearOf(AggregationExpression expression) { + return year(expression); + } + + /** + * Create a new {@link DateFromPartsOperatorFactory} bound to a given {@link Timezone}.
                    + * + * @param timezone must not be {@literal null}. Use {@link Timezone#none()} instead. + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + public DateFromPartsOperatorFactory withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateFromPartsOperatorFactory(timezone); + } + } + + /** + * {@link AggregationExpression} capable of setting a given {@link Timezone}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static abstract class TimezonedDateAggregationExpression extends AbstractAggregationExpression { + + protected TimezonedDateAggregationExpression(Object value) { + super(value); + } + + /** + * Append the {@code timezone} to a given source. The source itself can be a {@link Map} of already set properties + * or a single value. In case of single value {@code source} the value will be added as {@code date} property. + * + * @param source must not be {@literal null}. + * @param timezone must not be {@literal null} use {@link Timezone#none()} instead. + * @return + */ + protected static java.util.Map appendTimezone(Object source, Timezone timezone) { + + java.util.Map args; + + if (source instanceof Map map) { + args = new LinkedHashMap<>(map); + } else { + args = new LinkedHashMap<>(2); + args.put("date", source); + } + + if (!ObjectUtils.nullSafeEquals(Timezone.none(), timezone)) { + args.put("timezone", timezone.value); + } else if (args.containsKey("timezone")) { + args.remove("timezone"); + } + + return args; + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + protected abstract TimezonedDateAggregationExpression withTimezone(Timezone timezone); + + protected boolean hasTimezone() { + return contains("timezone"); + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfYear}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfYear extends TimezonedDateAggregationExpression { + + private DayOfYear(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfYear}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfYear dayOfYear(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfYear(value); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfYear}. + */ + public static DayOfYear dayOfYear(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfYear(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfYear}. + */ + public static DayOfYear dayOfYear(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfYear((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfYear}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfYear withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfYear(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfYear"; + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfMonth}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfMonth extends TimezonedDateAggregationExpression { + + private DayOfMonth(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfMonth}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfMonth dayOfMonth(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfMonth(value); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfMonth}. + */ + public static DayOfMonth dayOfMonth(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfMonth(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfMonth}. + */ + public static DayOfMonth dayOfMonth(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfMonth((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfMonth}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfMonth withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfMonth(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfMonth"; + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfWeek extends TimezonedDateAggregationExpression { + + private DayOfWeek(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfWeek dayOfWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfWeek(value); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfWeek}. + */ + public static DayOfWeek dayOfWeek(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfWeek}. + */ + public static DayOfWeek dayOfWeek(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfWeek((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $year}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Year extends TimezonedDateAggregationExpression { + + private Year(Object value) { + super(value); + } + + /** + * Creates new {@link Year}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Year}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Year year(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Year(value); + } + + /** + * Creates new {@link Year}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Year}. + */ + public static Year yearOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return year(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Year}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Year}. + */ + public static Year yearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return year(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Year}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Year withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Year(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$year"; + } + } + + /** + * {@link AggregationExpression} for {@code $month}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Month extends TimezonedDateAggregationExpression { + + private Month(Object value) { + super(value); + } + + /** + * Creates new {@link Month}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Month}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Month month(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Month(value); + } + + /** + * Creates new {@link Month}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Month}. + */ + public static Month monthOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return month(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Month}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Month}. + */ + public static Month monthOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return month(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Month}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Month withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Month(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$month"; + } + } + + /** + * {@link AggregationExpression} for {@code $week}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Week extends TimezonedDateAggregationExpression { + + private Week(Object value) { + super(value); + } + + /** + * Creates new {@link Week}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Week}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Week week(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Week(value); + } + + /** + * Creates new {@link Week}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Week}. + */ + public static Week weekOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return week(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Week}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Week}. + */ + public static Week weekOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return week(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Week}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Week withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Week(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$week"; + } + } + + /** + * {@link AggregationExpression} for {@code $hour}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Hour extends TimezonedDateAggregationExpression { + + private Hour(Object value) { + super(value); + } + + /** + * Creates new {@link Hour}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Hour}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Hour hour(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Hour(value); + } + + /** + * Creates new {@link Hour}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Hour}. + */ + public static Hour hourOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return hour(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Hour}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Hour}. + */ + public static Hour hourOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return hour(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Hour}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Hour withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Hour(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$hour"; + } + } + + /** + * {@link AggregationExpression} for {@code $minute}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Minute extends TimezonedDateAggregationExpression { + + private Minute(Object value) { + super(value); + } + + /** + * Creates new {@link Minute}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Minute}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Minute minute(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Minute(value); + } + + /** + * Creates new {@link Minute}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Minute}. + */ + public static Minute minuteOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return minute(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Minute}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Minute}. + */ + public static Minute minuteOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return minute(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Minute}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Minute withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Minute(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$minute"; + } + } + + /** + * {@link AggregationExpression} for {@code $second}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Second extends TimezonedDateAggregationExpression { + + private Second(Object value) { + super(value); + } + + /** + * Creates new {@link Second}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Second}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Second second(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Second(value); + } + + /** + * Creates new {@link Second}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Second}. + */ + public static Second secondOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return second(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Second}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Second}. + */ + public static Second secondOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return second(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Second}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Second withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Second(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$second"; + } + } + + /** + * {@link AggregationExpression} for {@code $millisecond}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Millisecond extends TimezonedDateAggregationExpression { + + private Millisecond(Object value) { + super(value); + } + + /** + * Creates new {@link Millisecond}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Millisecond millisecond(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Millisecond(value); + } + + /** + * Creates new {@link Millisecond}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Millisecond}. + */ + public static Millisecond millisecondOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return millisecond(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Millisecond}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Millisecond}. + */ + public static Millisecond millisecondOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return millisecond(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Millisecond withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Millisecond(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$millisecond"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateToString}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DateToString extends TimezonedDateAggregationExpression { + + private DateToString(Object value) { + super(value); + } + + /** + * Creates new {@link FormatBuilder}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link FormatBuilder}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static FormatBuilder dateToString(Object value) { + + Assert.notNull(value, "value must not be null"); + + return new FormatBuilder() { + + @Override + public DateToString toString(String format) { + + Assert.notNull(format, "Format must not be null"); + return new DateToString(argumentMap(value, format, Timezone.none())); + } + + @Override + public DateToString defaultFormat() { + return new DateToString(argumentMap(value, null, Timezone.none())); + } + }; + } + + /** + * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link FormatBuilder} to crate {@link DateToString}. + */ + public static FormatBuilder dateOf(final String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dateToString(Fields.field(fieldReference)); + } + + /** + * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link FormatBuilder} to crate {@link DateToString}. + */ + public static FormatBuilder dateOf(final AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dateToString(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DateToString withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateToString(append("timezone", timezone)); + } + + /** + * Optionally specify the value to return when the date is {@literal null} or missing.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturn(Object value) { + return new DateToString(append("onNull", value)); + } + + /** + * Optionally specify the field holding the value to return when the date is {@literal null} or missing.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturnValueOf(String fieldReference) { + return onNullReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return when the date is {@literal null} or missing.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturnValueOf(AggregationExpression expression) { + return onNullReturn(expression); + } + + @Override + protected String getMongoMethod() { + return "$dateToString"; + } + + private static java.util.Map argumentMap(Object date, @Nullable String format, Timezone timezone) { + + java.util.Map args = new LinkedHashMap<>(2); + + if (StringUtils.hasText(format)) { + args.put("format", format); + } + + args.put("date", date); + + if (!ObjectUtils.nullSafeEquals(timezone, Timezone.none())) { + args.put("timezone", timezone.value); + } + return args; + } + + protected java.util.Map append(String key, Object value) { + + java.util.Map clone = new LinkedHashMap<>(argumentMap()); + + if (value instanceof Timezone timezone) { + + if (ObjectUtils.nullSafeEquals(value, Timezone.none())) { + clone.remove("timezone"); + } else { + clone.put("timezone", timezone.value); + } + } else { + clone.put(key, value); + } + + return clone; + } + + public interface FormatBuilder { + + /** + * Creates new {@link DateToString} with all previously added arguments appending the given one. + * + * @param format must not be {@literal null}. + * @return + */ + DateToString toString(String format); + + /** + * Creates new {@link DateToString} using the server default string format ({@code %Y-%m-%dT%H:%M:%S.%LZ}) for + * dates.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + DateToString defaultFormat(); + } + } + + /** + * {@link AggregationExpression} for {@code $isoDayOfWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoDayOfWeek extends TimezonedDateAggregationExpression { + + private IsoDayOfWeek(Object value) { + super(value); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoDayOfWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoDayOfWeek isoDayWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoDayOfWeek(value); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoDayOfWeek}. + */ + public static IsoDayOfWeek isoDayOfWeek(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoDayWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoDayOfWeek}. + */ + public static IsoDayOfWeek isoDayOfWeek(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoDayWeek(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoDayOfWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoDayOfWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoDayOfWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoDayOfWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $isoWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoWeek extends TimezonedDateAggregationExpression { + + private IsoWeek(Object value) { + super(value); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoWeek isoWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoWeek(value); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoWeek}. + */ + public static IsoWeek isoWeekOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoWeek}. + */ + public static IsoWeek isoWeekOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeek(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $isoWeekYear}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoWeekYear extends TimezonedDateAggregationExpression { + + private IsoWeekYear(Object value) { + super(value); + } + + /** + * Creates new {@link IsoWeekYear}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoWeekYear}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoWeekYear isoWeekYear(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoWeekYear(value); + } + + /** + * Creates new {@link IsoWeekYear}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoWeekYear}. + */ + public static IsoWeekYear isoWeekYearOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Millisecond}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoWeekYear}. + */ + public static IsoWeekYear isoWeekYearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeekYear(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoWeekYear}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoWeekYear withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoWeekYear(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoWeekYear"; + } + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + public interface DateParts> { + + /** + * Set the {@literal hour} to the given value which must resolve to a value in range of {@code 0 - 23}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param hour must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal hour} is {@literal null} + */ + T hour(Object hour); + + /** + * Set the {@literal hour} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T hourOf(String fieldReference) { + return hour(Fields.field(fieldReference)); + } + + /** + * Set the {@literal hour} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T hourOf(AggregationExpression expression) { + return hour(expression); + } + + /** + * Set the {@literal minute} to the given value which must resolve to a value in range {@code 0 - 59}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param minute must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal minute} is {@literal null} + */ + T minute(Object minute); + + /** + * Set the {@literal minute} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T minuteOf(String fieldReference) { + return minute(Fields.field(fieldReference)); + } + + /** + * Set the {@literal minute} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T minuteOf(AggregationExpression expression) { + return minute(expression); + } + + /** + * Set the {@literal second} to the given value which must resolve to a value in range {@code 0 - 59}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param second must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal second} is {@literal null} + */ + T second(Object second); + + /** + * Set the {@literal second} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T secondOf(String fieldReference) { + return second(Fields.field(fieldReference)); + } + + /** + * Set the {@literal second} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T secondOf(AggregationExpression expression) { + return second(expression); + } + + /** + * Set the {@literal millisecond} to the given value which must resolve to a value in range {@code 0 - 999}. Can be + * a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param millisecond must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal millisecond} is {@literal null} + * @since 3.2 + */ + T millisecond(Object millisecond); + + /** + * Set the {@literal millisecond} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + * @since 3.2 + */ + default T millisecondOf(String fieldReference) { + return millisecond(Fields.field(fieldReference)); + } + + /** + * Set the {@literal milliseconds} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + * @since 3.2 + */ + default T millisecondOf(AggregationExpression expression) { + return millisecond(expression); + } + } + + /** + * {@link AggregationExpression} for {@code $dateFromParts}.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * @since 2.1 + */ + public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts { + + private DateFromParts(Object value) { + super(value); + } + + /** + * Creates new {@link DateFromPartsWithYear}. + * + * @return new instance of {@link DateFromPartsWithYear}. + * @since 2.1 + */ + public static DateFromPartsWithYear dateFromParts() { + return year -> new DateFromParts(Collections.singletonMap("year", year)); + } + + /** + * Set the {@literal month} to the given value which must resolve to a calendar month in range {@code 1 - 12}. Can + * be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param month must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal month} is {@literal null}. + */ + public DateFromParts month(Object month) { + return new DateFromParts(append("month", month)); + } + + /** + * Set the {@literal month} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public DateFromParts monthOf(String fieldReference) { + return month(Fields.field(fieldReference)); + } + + /** + * Set the {@literal month} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public DateFromParts monthOf(AggregationExpression expression) { + return month(expression); + } + + /** + * Set the {@literal day} to the given value which must resolve to a calendar day in range {@code 1 - 31}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param day must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal day} is {@literal null}. + */ + public DateFromParts day(Object day) { + return new DateFromParts(append("day", day)); + } + + /** + * Set the {@literal day} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public DateFromParts dayOf(String fieldReference) { + return day(Fields.field(fieldReference)); + } + + /** + * Set the {@literal day} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public DateFromParts dayOf(AggregationExpression expression) { + return day(expression); + } + + @Override + public DateFromParts hour(Object hour) { + return new DateFromParts(append("hour", hour)); + } + + @Override + public DateFromParts minute(Object minute) { + return new DateFromParts(append("minute", minute)); + } + + @Override + public DateFromParts second(Object second) { + return new DateFromParts(append("second", second)); + } + + @Override + public DateFromParts millisecond(Object millisecond) { + return new DateFromParts(append("millisecond", millisecond)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + @Override + public DateFromParts withTimezone(Timezone timezone) { + return new DateFromParts(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateFromParts"; + } + + /** + * @author Christoph Strobl + */ + public interface DateFromPartsWithYear { + + /** + * Set the {@literal year} to the given value which must resolve to a calendar year. Can be a simple value, + * {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param year must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal year} is {@literal null} + */ + DateFromParts year(Object year); + + /** + * Set the {@literal year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default DateFromParts yearOf(String fieldReference) { + + Assert.hasText(fieldReference, "Field reference must not be null nor empty"); + return year(Fields.field(fieldReference)); + } + + /** + * Set the {@literal year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default DateFromParts yearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return year(expression); + } + } + } + + /** + * {@link AggregationExpression} for {@code $dateFromParts} using ISO week date.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * @since 2.1 + */ + public static class IsoDateFromParts extends TimezonedDateAggregationExpression + implements DateParts { + + private IsoDateFromParts(Object value) { + super(value); + } + + /** + * Creates new {@link IsoDateFromPartsWithYear}. + * + * @return new instance of {@link IsoDateFromPartsWithYear}. + * @since 2.1 + */ + public static IsoDateFromPartsWithYear dateFromParts() { + return year -> new IsoDateFromParts(Collections.singletonMap("isoWeekYear", year)); + } + + /** + * Set the {@literal week of year} to the given value which must resolve to a calendar week in range {@code 1 - 53}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param isoWeek must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeek} is {@literal null}. + */ + public IsoDateFromParts isoWeek(Object isoWeek) { + return new IsoDateFromParts(append("isoWeek", isoWeek)); + } + + /** + * Set the {@literal week of year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public IsoDateFromParts isoWeekOf(String fieldReference) { + return isoWeek(Fields.field(fieldReference)); + } + + /** + * Set the {@literal week of year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public IsoDateFromParts isoWeekOf(AggregationExpression expression) { + return isoWeek(expression); + } + + /** + * Set the {@literal day of week} to the given value which must resolve to a weekday in range {@code 1 - 7}. Can be + * a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param day must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeek} is {@literal null}. + */ + public IsoDateFromParts isoDayOfWeek(Object day) { + return new IsoDateFromParts(append("isoDayOfWeek", day)); + } + + /** + * Set the {@literal day of week} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public IsoDateFromParts isoDayOfWeekOf(String fieldReference) { + return isoDayOfWeek(Fields.field(fieldReference)); + } + + /** + * Set the {@literal day of week} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public IsoDateFromParts isoDayOfWeekOf(AggregationExpression expression) { + return isoDayOfWeek(expression); + } + + @Override + public IsoDateFromParts hour(Object hour) { + return new IsoDateFromParts(append("hour", hour)); + } + + @Override + public IsoDateFromParts minute(Object minute) { + return new IsoDateFromParts(append("minute", minute)); + } + + @Override + public IsoDateFromParts second(Object second) { + return new IsoDateFromParts(append("second", second)); + } + + @Override + public IsoDateFromParts millisecond(Object millisecond) { + return new IsoDateFromParts(append("millisecond", millisecond)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoDateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + @Override + public IsoDateFromParts withTimezone(Timezone timezone) { + return new IsoDateFromParts(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateFromParts"; + } + + /** + * @author Christoph Strobl + */ + public interface IsoDateFromPartsWithYear { + + /** + * Set the {@literal week date year} to the given value which must resolve to a weekday in range {@code 0 - 9999}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param isoWeekYear must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeekYear} is {@literal null}. + */ + IsoDateFromParts isoWeekYear(Object isoWeekYear); + + /** + * Set the {@literal week date year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default IsoDateFromParts isoWeekYearOf(String fieldReference) { + + Assert.hasText(fieldReference, "Field reference must not be null nor empty"); + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Set the {@literal week date year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeekYear(expression); + } + } + } + + /** + * {@link AggregationExpression} for {@code $dateToParts}.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ + * @since 2.1 + */ + public static class DateToParts extends TimezonedDateAggregationExpression { + + private DateToParts(Object value) { + super(value); + } + + /** + * Creates new {@link DateToParts}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static DateToParts dateToParts(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new DateToParts(Collections.singletonMap("date", value)); + } + + /** + * Creates new {@link DateToParts}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public static DateToParts datePartsOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dateToParts(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DateToParts}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public static DateToParts datePartsOf(AggregationExpression expression) { + return dateToParts(expression); + } + + /** + * Use ISO week date fields in the resulting document. + * + * @return new instance of {@link DateToParts}. + */ + public DateToParts iso8601() { + return new DateToParts(append("iso8601", true)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + @Override + public DateToParts withTimezone(Timezone timezone) { + return new DateToParts(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateToParts"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateFromString}.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ + * @since 2.1 + */ + public static class DateFromString extends TimezonedDateAggregationExpression { + + private DateFromString(Object value) { + super(value); + } + + /** + * Creates new {@link DateFromString}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static DateFromString fromString(Object value) { + return new DateFromString(Collections.singletonMap("dateString", value)); + } + + /** + * Creates new {@link DateFromString}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public static DateFromString fromStringOf(String fieldReference) { + return fromString(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DateFromString}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public static DateFromString fromStringOf(AggregationExpression expression) { + return fromString(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + @Override + public DateFromString withTimezone(Timezone timezone) { + return new DateFromString(appendTimezone(argumentMap(), timezone)); + } + + /** + * Optionally set the date format to use. If not specified {@code %Y-%m-%dT%H:%M:%S.%LZ} is used.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param format must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal format} is {@literal null}. + */ + public DateFromString withFormat(String format) { + + Assert.notNull(format, "Format must not be null"); + return new DateFromString(append("format", format)); + } + + @Override + protected String getMongoMethod() { + return "$dateFromString"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateAdd}.
                    + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateAdd extends TimezonedDateAggregationExpression { + + private DateAdd(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(AggregationExpression expression, String unit) { + return addValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(String fieldReference, String unit) { + return addValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateAdd(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDate(Object dateExpression) { + return new DateAdd(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateAdd withTimezone(Timezone timezone) { + return new DateAdd(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateAdd"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateSubtract}.
                    + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class DateSubtract extends TimezonedDateAggregationExpression { + + private DateSubtract(Object value) { + super(value); + } + + /** + * Subtract the number of {@literal units} of the result of the given {@link AggregationExpression expression} from + * a {@link #fromDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public static DateSubtract subtractValueOf(AggregationExpression expression, String unit) { + return subtractValue(expression, unit); + } + + /** + * Subtract the number of {@literal units} from a {@literal field} from a {@link #fromDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public static DateSubtract subtractValueOf(String fieldReference, String unit) { + return subtractValue(Fields.field(fieldReference), unit); + } + + /** + * Subtract the number of {@literal units} from a {@link #fromDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public static DateSubtract subtractValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateSubtract(args); + } + + /** + * Define the start date, in UTC, for the subtraction operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract fromDateOf(AggregationExpression expression) { + return fromDate(expression); + } + + /** + * Define the start date, in UTC, for the subtraction operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract fromDateOf(String fieldReference) { + return fromDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the subtraction operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract fromDate(Object dateExpression) { + return new DateSubtract(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract withTimezone(Timezone timezone) { + return new DateSubtract(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateSubtract"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateDiff}.
                    + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateDiff extends TimezonedDateAggregationExpression { + + private DateDiff(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(AggregationExpression expression, String unit) { + return diffValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(String fieldReference, String unit) { + return diffValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("endDate", value); + return new DateDiff(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDate(Object dateExpression) { + return new DateDiff(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateDiff withTimezone(Timezone timezone) { + return new DateDiff(appendTimezone(argumentMap(), timezone)); + } + + /** + * Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by + * default. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateDiff}. + */ + public DateDiff startOfWeek(Object day) { + return new DateDiff(append("startOfWeek", day)); + } + + @Override + protected String getMongoMethod() { + return "$dateDiff"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateTrunc}.
                    + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class DateTrunc extends TimezonedDateAggregationExpression { + + private DateTrunc(Object value) { + super(value); + } + + /** + * Truncates the date value of computed by the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public static DateTrunc truncateValueOf(AggregationExpression expression) { + return truncateValue(expression); + } + + /** + * Truncates the date value of the referenced {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public static DateTrunc truncateValueOf(String fieldReference) { + return truncateValue(Fields.field(fieldReference)); + } + + /** + * Truncates the date value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public static DateTrunc truncateValue(Object value) { + return new DateTrunc(Collections.singletonMap("date", value)); + } + + /** + * Define the unit of time. + * + * @param unit must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc to(String unit) { + return new DateTrunc(append("unit", unit)); + } + + /** + * Define the unit of time via an {@link AggregationExpression}. + * + * @param unit must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc to(AggregationExpression unit) { + return new DateTrunc(append("unit", unit)); + } + + /** + * Define the weeks starting day if {@link #to(String)} resolves to {@literal week}. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc startOfWeek(java.time.DayOfWeek day) { + return startOfWeek(day.name().toLowerCase(Locale.US)); + } + + /** + * Define the weeks starting day if {@link #to(String)} resolves to {@literal week}. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc startOfWeek(String day) { + return new DateTrunc(append("startOfWeek", day)); + } + + /** + * Define the numeric time value. + * + * @param binSize must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(int binSize) { + return binSize((Object) binSize); + } + + /** + * Define the numeric time value via an {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(AggregationExpression expression) { + return binSize((Object) expression); + } + + /** + * Define the numeric time value. + * + * @param binSize must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(Object binSize) { + return new DateTrunc(append("binSize", binSize)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc withTimezone(Timezone timezone) { + return new DateTrunc(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateTrunc"; + } + } + + /** + * {@link AggregationExpression} for {@code $tsIncrement}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class TsIncrement extends AbstractAggregationExpression { + + private TsIncrement(Object value) { + super(value); + } + + /** + * Creates new {@link TsIncrement} that returns the incrementing ordinal from a timestamp. + * + * @param value must not be {@literal null}. + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static TsIncrement tsIncrement(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new TsIncrement(value); + } + + /** + * Creates new {@link TsIncrement} that returns the incrementing ordinal from a timestamp. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public static TsIncrement tsIncrementValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return tsIncrement(Fields.field(fieldReference)); + } + + /** + * Creates new {@link TsIncrement}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public static TsIncrement tsIncrementValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return tsIncrement(expression); + } + + @Override + protected String getMongoMethod() { + return "$tsIncrement"; + } + } + + /** + * {@link AggregationExpression} for {@code $tsSecond}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class TsSecond extends AbstractAggregationExpression { + + private TsSecond(Object value) { + super(value); + } + + /** + * Creates new {@link TsSecond} that returns the incrementing ordinal from a timestamp. + * + * @param value must not be {@literal null}. + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static TsSecond tsSecond(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new TsSecond(value); + } + + /** + * Creates new {@link TsSecond} that returns the incrementing ordinal from a timestamp. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public static TsSecond tsSecondValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return tsSecond(Fields.field(fieldReference)); + } + + /** + * Creates new {@link TsSecond}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public static TsSecond tsSecondValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return tsSecond(expression); + } + + @Override + protected String getMongoMethod() { + return "$tsSecond"; + } + } + + /** + * Interface defining a temporal unit for date operators. + * + * @author Mark Paluch + * @since 3.3 + */ + public interface TemporalUnit { + + String name(); + + /** + * Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLISECONDS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return TemporalUnits.YEAR; + case WEEKS: + return TemporalUnits.WEEK; + case MONTHS: + return TemporalUnits.MONTH; + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLIS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit)); + } + } + + /** + * Supported temporal units. + */ + enum TemporalUnits implements TemporalUnit { + YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND + + } + + @SuppressWarnings("unchecked") + private static T applyTimezone(T instance, Timezone timezone) { + return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() + ? (T) instance.withTimezone(timezone) + : instance; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java new file mode 100644 index 0000000000..0da9343ddf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java @@ -0,0 +1,383 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/*** + * Encapsulates the aggregation framework {@code $densify}-operation. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class DensifyOperation implements AggregationOperation { + + private @Nullable Field field; + private @Nullable List partitionBy; + private @Nullable Range range; + + protected DensifyOperation(@Nullable Field field, @Nullable List partitionBy, @Nullable Range range) { + + this.field = field; + this.partitionBy = partitionBy; + this.range = range; + } + + /** + * Obtain a builder to create the {@link DensifyOperation}. + * + * @return new instance of {@link DensifyOperationBuilder}. + */ + public static DensifyOperationBuilder builder() { + return new DensifyOperationBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document densify = new Document(); + densify.put("field", context.getReference(field).getRaw()); + if (!ObjectUtils.isEmpty(partitionBy)) { + densify.put("partitionByFields", partitionBy.stream().map(it -> { + if (it instanceof Field field) { + return context.getReference(field).getRaw(); + } + if (it instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + return it; + }).collect(Collectors.toList())); + } + densify.put("range", range.toDocument(context)); + return new Document("$densify", densify); + } + + /** + * The {@link Range} specifies how the data is densified. + */ + public interface Range { + + /** + * Add documents spanning the range of values within the given lower (inclusive) and upper (exclusive) bound. + * + * @param lower must not be {@literal null}. + * @param upper must not be {@literal null}. + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange bounded(Object lower, Object upper) { + return new BoundedRange(lower, upper, DensifyUnits.NONE); + } + + /** + * Add documents spanning the full value range. + * + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange full() { + + return new DensifyRange(DensifyUnits.NONE) { + + @Override + Object getBounds(AggregationOperationContext ctx) { + return "full"; + } + }; + } + + /** + * Add documents spanning the full value range for each partition. + * + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange partition() { + return new DensifyRange(DensifyUnits.NONE) { + + @Override + Object getBounds(AggregationOperationContext ctx) { + return "partition"; + } + }; + } + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Base {@link Range} implementation. + * + * @author Christoph Strobl + */ + public static abstract class DensifyRange implements Range { + + private @Nullable DensifyUnit unit; + private Number step; + + public DensifyRange(DensifyUnit unit) { + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("step", step); + if (unit != null && !DensifyUnits.NONE.equals(unit)) { + range.put("unit", unit.name().toLowerCase(Locale.US)); + } + range.put("bounds", getBounds(ctx)); + return range; + } + + /** + * Set the increment for the value. + * + * @param step must not be {@literal null}. + * @return this. + */ + public DensifyRange incrementBy(Number step) { + this.step = step; + return this; + } + + /** + * Set the increment for the value. + * + * @param step must not be {@literal null}. + * @return this. + */ + public DensifyRange incrementBy(Number step, DensifyUnit unit) { + this.step = step; + return unit(unit); + } + + /** + * Set the {@link DensifyUnit unit} for the step field. + * + * @param unit + * @return this. + */ + public DensifyRange unit(DensifyUnit unit) { + + this.unit = unit; + return this; + } + + abstract Object getBounds(AggregationOperationContext ctx); + } + + /** + * {@link Range} implementation holding lower and upper bound values. + * + * @author Christoph Strobl + */ + public static class BoundedRange extends DensifyRange { + + private List bounds; + + protected BoundedRange(Object lower, Object upper, DensifyUnit unit) { + + super(unit); + this.bounds = Arrays.asList(lower, upper); + } + + @Override + List getBounds(AggregationOperationContext ctx) { + return bounds.stream().map(it -> { + if (it instanceof AggregationExpression expression) { + return expression.toDocument(ctx); + } + return it; + }).collect(Collectors.toList()); + } + } + + /** + * The actual time unit to apply to a {@link Range}. + */ + public interface DensifyUnit { + + String name(); + + /** + * Converts the given time unit into a {@link DensifyUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static DensifyUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return DensifyUnits.DAY; + case HOURS: + return DensifyUnits.HOUR; + case MINUTES: + return DensifyUnits.MINUTE; + case SECONDS: + return DensifyUnits.SECOND; + case MILLISECONDS: + return DensifyUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create DensifyUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link DensifyUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static DensifyUnits from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return DensifyUnits.YEAR; + case WEEKS: + return DensifyUnits.WEEK; + case MONTHS: + return DensifyUnits.MONTH; + case DAYS: + return DensifyUnits.DAY; + case HOURS: + return DensifyUnits.HOUR; + case MINUTES: + return DensifyUnits.MINUTE; + case SECONDS: + return DensifyUnits.SECOND; + case MILLIS: + return DensifyUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create DensifyUnit from %s", chronoUnit)); + } + } + + /** + * Quick access to available {@link DensifyUnit units}. + */ + public enum DensifyUnits implements DensifyUnit { + NONE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + public static class DensifyOperationBuilder { + + DensifyOperation target; + + public DensifyOperationBuilder() { + this.target = new DensifyOperation(null, Collections.emptyList(), null); + } + + /** + * Set the field to densify. + * + * @param fieldname must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder densify(String fieldname) { + this.target.field = Fields.field(fieldname); + return this; + } + + /** + * Set the fields used for grouping documents. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder partitionBy(String... fields) { + target.partitionBy = Fields.fields(fields).asList(); + return this; + } + + /** + * Set the operational range. + * + * @param range must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder range(Range range) { + + target.range = range; + return this; + } + + /** + * Operate on full range. + * + * @param consumer + * @return this. + */ + public DensifyOperationBuilder fullRange(Consumer consumer) { + + Assert.notNull(consumer, "Consumer must not be null"); + + DensifyRange range = Range.full(); + consumer.accept(range); + + return range(range); + } + + /** + * Operate on full range. + * + * @param consumer + * @return this. + */ + public DensifyOperationBuilder partitionRange(Consumer consumer) { + + DensifyRange range = Range.partition(); + consumer.accept(range); + + return range(range); + } + + public DensifyOperation build() { + return new DensifyOperation(target.field, target.partitionBy, target.range); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java new file mode 100644 index 0000000000..7f260c3785 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java @@ -0,0 +1,162 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.util.Assert; + +/** + * Base class for common tasks required by {@link SetOperation} and {@link AddFieldsOperation}. + * + * @author Christoph Strobl + * @since 3.0 + */ +abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOperation { + + private final Map valueMap; + + private ExposedFields exposedFields = ExposedFields.empty(); + + protected DocumentEnhancingOperation(Map source) { + + this.valueMap = new LinkedHashMap<>(source); + for (Object key : source.keySet()) { + this.exposedFields = add(key); + } + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + + if (valueMap.size() == 1) { + return context.getMappedObject( + new Document(mongoOperator(), toSetEntry(valueMap.entrySet().iterator().next(), operationContext))); + } + + Document $set = new Document(); + valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll); + return context.getMappedObject(new Document(mongoOperator(), $set)); + } + + /** + * @return the String representation of the native MongoDB operator. + */ + protected abstract String mongoOperator(); + + @Override + public String getOperator() { + return mongoOperator(); + } + + /** + * @return the raw value map + */ + protected Map getValueMap() { + return this.valueMap; + } + + @Override + public ExposedFields getFields() { + return exposedFields; + } + + private ExposedFields add(Object fieldValue) { + + if (fieldValue instanceof Field field) { + return exposedFields.and(new ExposedField(field, true)); + } + if (fieldValue instanceof String fieldName) { + return exposedFields.and(new ExposedField(Fields.field(fieldName), true)); + } + + throw new IllegalArgumentException(String.format("Expected %s to be a field/property", fieldValue)); + } + + private static Document toSetEntry(Entry entry, AggregationOperationContext context) { + + String field = entry.getKey() instanceof String key ? context.getReference(key).getRaw() + : context.getReference((Field) entry.getKey()).getRaw(); + + Object value = computeValue(entry.getValue(), context); + + return new Document(field, value); + } + + private static Object computeValue(Object value, AggregationOperationContext context) { + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + + if (value instanceof ExpressionProjection expressionProjection) { + return expressionProjection.toExpression(context); + } + + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (value instanceof Collection collection) { + return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList()); + } + + return value; + } + + /** + * A {@link AggregationExpression} based on a SpEL expression. + * + * @author Mark Paluch + */ + static class ExpressionProjection { + + private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer(); + + private final String expression; + private final Object[] params; + + /** + * Creates a new {@link ProjectionOperation.ExpressionProjectionOperationBuilder.ExpressionProjection} for the given + * field, SpEL expression and parameters. + * + * @param expression must not be {@literal null} or empty. + * @param parameters must not be {@literal null}. + */ + ExpressionProjection(String expression, Object[] parameters) { + + Assert.notNull(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); + + this.expression = expression; + this.params = parameters.clone(); + } + + Object toExpression(AggregationOperationContext context) { + return TRANSFORMER.transform(expression, context, params); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java new file mode 100644 index 0000000000..ff63ad834d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -0,0 +1,222 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; + +import org.bson.Document; + +/** + * Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.} + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentOperators { + + /** + * Obtain the document position (including gaps) relative to others (rank). + * + * @return new instance of {@link Rank}. + * @since 3.3 + */ + public static Rank rank() { + return new Rank(); + } + + /** + * Obtain the document position (without gaps) relative to others (rank). + * + * @return new instance of {@link DenseRank}. + * @since 3.3 + */ + public static DenseRank denseRank() { + return new DenseRank(); + } + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(String fieldReference) { + return new DocumentOperatorsFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(AggregationExpression expression) { + return new DocumentOperatorsFactory(expression); + } + + /** + * Obtain the current document position. + * + * @return new instance of {@link DocumentNumber}. + * @since 3.3 + */ + public static DocumentNumber documentNumber() { + return new DocumentNumber(); + } + + /** + * @author Christoph Strobl + */ + public static class DocumentOperatorsFactory { + + private final Object target; + + public DocumentOperatorsFactory(Object target) { + this.target = target; + } + + /** + * Creates new {@link AggregationExpression} that applies the expression to a document at specified position + * relative to the current document. + * + * @param by the value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift shift(int by) { + + Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString()); + return shift.by(by); + } + + private boolean usesExpression() { + return target instanceof AggregationExpression; + } + } + + /** + * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents + * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Rank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rank", new Document()); + } + } + + /** + * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next + * rank without any gaps. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DenseRank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$denseRank", new Document()); + } + } + + /** + * {@link DocumentNumber} resolves the current document position. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DocumentNumber implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$documentNumber", new Document()); + } + } + + /** + * Shift applies an expression to a document in a specified position relative to the current document. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Shift extends AbstractAggregationExpression { + + private Shift(Object value) { + super(value); + } + + /** + * Specifies the field to evaluate and return. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(String fieldReference) { + return new Shift(Collections.singletonMap("output", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression expression} to evaluate and return. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(AggregationExpression expression) { + return new Shift(Collections.singletonMap("output", expression)); + } + + /** + * Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the + * next) or a negative value for the predecessor documents (eg. -1 for the previous). + * + * @param shiftBy value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift by(int shiftBy) { + return new Shift(append("by", shiftBy)); + } + + /** + * Define the default value if the target document is out of range. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultTo(Object value) { + return new Shift(append("default", value)); + } + + /** + * Define the {@link AggregationExpression expression} to evaluate if the target document is out of range. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultToValueOf(AggregationExpression expression) { + return defaultTo(expression); + } + + @Override + protected String getMongoMethod() { + return "$shift"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java new file mode 100644 index 0000000000..56f20dde17 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -0,0 +1,206 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal evaluation operators} such as {@literal $expr}. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.3 + */ +public class EvaluationOperators { + + /** + * Take the value resulting from the given fieldReference. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(String fieldReference) { + return new EvaluationOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { + return new EvaluationOperatorFactory(expression); + } + + public static class EvaluationOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public EvaluationOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public EvaluationOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public Expr expr() { + return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public LastObservationCarriedForward locf() { + return usesFieldRef() ? LastObservationCarriedForward.locfValueOf(fieldReference) + : LastObservationCarriedForward.locfValueOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * Allows the use of aggregation expressions within the query language. + */ + public static class Expr extends AbstractAggregationExpression { + + private Expr(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$expr"; + } + + /** + * Creates new {@link Expr}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Expr(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Expr}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Expr(expression); + } + + /** + * Creates {@code $expr} as {@link CriteriaDefinition}. + * + * @return the {@link CriteriaDefinition} from this expression. + */ + public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) { + + Document criteriaObject = toDocument(context); + + return new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return criteriaObject; + } + + @Override + public String getKey() { + return getMongoMethod(); + } + }; + } + } + + /** + * Sets {@literal null} and missing values to the last non-null value. + * + * @since 4.0 + */ + public static class LastObservationCarriedForward extends AbstractAggregationExpression { + + private LastObservationCarriedForward(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$locf"; + } + + /** + * Creates new {@link LastObservationCarriedForward}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LastObservationCarriedForward}. + */ + public static LastObservationCarriedForward locfValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new LastObservationCarriedForward(Fields.field(fieldReference)); + } + + /** + * Creates new {@link LastObservationCarriedForward}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LastObservationCarriedForward}. + */ + public static LastObservationCarriedForward locfValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new LastObservationCarriedForward(expression); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java index 79bece0f85..458bc43437 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,14 +22,17 @@ import java.util.List; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CompositeIterator; +import org.springframework.util.ObjectUtils; /** * Value object to capture the fields exposed by an {@link AggregationOperation}. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch * @since 1.3 */ public final class ExposedFields implements Iterable { @@ -40,11 +43,21 @@ public final class ExposedFields implements Iterable { private final List originalFields; private final List syntheticFields; + /** + * Returns an empty {@link ExposedFields} instance. + * + * @return never {@literal null}. + * @since 2.0 + */ + public static ExposedFields empty() { + return EMPTY; + } + /** * Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s. - * + * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields from(ExposedField... fields) { return from(Arrays.asList(fields)); @@ -52,9 +65,9 @@ public static ExposedFields from(ExposedField... fields) { /** * Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s. - * + * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ private static ExposedFields from(List fields) { @@ -69,9 +82,9 @@ private static ExposedFields from(List fields) { /** * Creates synthetic {@link ExposedFields} from the given {@link Fields}. - * + * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields synthetic(Fields fields) { return createFields(fields, true); @@ -79,9 +92,9 @@ public static ExposedFields synthetic(Fields fields) { /** * Creates non-synthetic {@link ExposedFields} from the given {@link Fields}. - * + * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields nonSynthetic(Fields fields) { return createFields(fields, false); @@ -89,26 +102,26 @@ public static ExposedFields nonSynthetic(Fields fields) { /** * Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way. - * + * * @param fields must not be {@literal null}. * @param synthetic - * @return + * @return never {@literal null}. */ private static ExposedFields createFields(Fields fields, boolean synthetic) { - Assert.notNull(fields, "Fields must not be null!"); - List result = new ArrayList(); + Assert.notNull(fields, "Fields must not be null"); + List result = new ArrayList(fields.size()); for (Field field : fields) { result.add(new ExposedField(field, synthetic)); } - return ExposedFields.from(result); + return from(result); } /** * Creates a new {@link ExposedFields} with the given originals and synthetics. - * + * * @param originals must not be {@literal null}. * @param synthetic must not be {@literal null}. */ @@ -120,15 +133,15 @@ private ExposedFields(List originals, List synthetic /** * Creates a new {@link ExposedFields} adding the given {@link ExposedField}. - * + * * @param field must not be {@literal null}. - * @return + * @return new instance of {@link ExposedFields}. */ public ExposedFields and(ExposedField field) { - Assert.notNull(field, "Exposed field must not be null!"); + Assert.notNull(field, "Exposed field must not be null"); - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList<>(); result.addAll(field.synthetic ? syntheticFields : originalFields); result.add(field); @@ -137,10 +150,11 @@ public ExposedFields and(ExposedField field) { /** * Returns the field with the given name or {@literal null} if no field with the given name is available. - * - * @param name - * @return + * + * @param name must not be {@literal null}. + * @return can be {@literal null}. */ + @Nullable public ExposedField getField(String name) { for (ExposedField field : this) { @@ -154,7 +168,7 @@ public ExposedField getField(String name) { /** * Returns whether the {@link ExposedFields} exposes no non-synthetic fields at all. - * + * * @return */ boolean exposesNoNonSyntheticFields() { @@ -163,7 +177,7 @@ boolean exposesNoNonSyntheticFields() { /** * Returns whether the {@link ExposedFields} exposes a single non-synthetic field only. - * + * * @return */ boolean exposesSingleNonSyntheticFieldOnly() { @@ -172,7 +186,7 @@ boolean exposesSingleNonSyntheticFieldOnly() { /** * Returns whether the {@link ExposedFields} exposes no fields at all. - * + * * @return */ boolean exposesNoFields() { @@ -181,7 +195,7 @@ boolean exposesNoFields() { /** * Returns whether the {@link ExposedFields} exposes a single field only. - * + * * @return */ boolean exposesSingleFieldOnly() { @@ -195,23 +209,24 @@ private int exposedFieldsCount() { return originalFields.size() + syntheticFields.size(); } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { CompositeIterator iterator = new CompositeIterator(); - iterator.add(syntheticFields.iterator()); - iterator.add(originalFields.iterator()); + if (!syntheticFields.isEmpty()) { + iterator.add(syntheticFields.iterator()); + } + + if (!originalFields.isEmpty()) { + iterator.add(originalFields.iterator()); + } return iterator; } /** * A single exposed field. - * + * * @author Oliver Gierke */ static class ExposedField implements Field { @@ -221,7 +236,7 @@ static class ExposedField implements Field { /** * Creates a new {@link ExposedField} with the given key. - * + * * @param key must not be {@literal null} or empty. * @param synthetic whether the exposed field is synthetic. */ @@ -231,7 +246,7 @@ public ExposedField(String key, boolean synthetic) { /** * Creates a new {@link ExposedField} for the given {@link Field}. - * + * * @param delegate must not be {@literal null}. * @param synthetic whether the exposed field is synthetic. */ @@ -241,28 +256,16 @@ public ExposedField(Field delegate, boolean synthetic) { this.synthetic = synthetic; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getKey() - */ @Override public String getName() { return field.getName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getTarget() - */ @Override public String getTarget() { return field.getTarget(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#isAliased() - */ @Override public boolean isAliased() { return field.isAliased(); @@ -277,7 +280,7 @@ public boolean isSynthetic() { /** * Returns whether the field can be referred to using the given name. - * + * * @param name * @return */ @@ -285,39 +288,25 @@ public boolean canBeReferredToBy(String name) { return getName().equals(name) || getTarget().equals(name); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("AggregationField: %s, synthetic: %s", field, synthetic); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof ExposedField)) { + if (!(obj instanceof ExposedField that)) { return false; } - ExposedField that = (ExposedField) obj; - return this.field.equals(that.field) && this.synthetic == that.synthetic; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -332,82 +321,151 @@ public int hashCode() { /** * A reference to an {@link ExposedField}. - * + * + * @author Christoph Strobl + * @since 1.10 + */ + public interface FieldReference { + + /** + * Returns the raw, unqualified reference, i.e. the field reference without a {@literal $} prefix. + * + * @return + */ + String getRaw(); + + /** + * Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the + * raw rendering of the reference otherwise. + * + * @return + */ + Object getReferenceValue(); + } + + /** + * A reference to an {@link ExposedField}. + * * @author Oliver Gierke */ - static class FieldReference { + static class DirectFieldReference implements FieldReference { private final ExposedField field; /** * Creates a new {@link FieldReference} for the given {@link ExposedField}. - * + * * @param field must not be {@literal null}. */ - public FieldReference(ExposedField field) { + public DirectFieldReference(ExposedField field) { - Assert.notNull(field, "ExposedField must not be null!"); + Assert.notNull(field, "ExposedField must not be null"); this.field = field; } - /** - * Returns the raw, unqualified reference, i.e. the field reference without a {@literal $} prefix. - * - * @return - */ public String getRaw() { String target = field.getTarget(); return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target); } - /** - * Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the - * raw rendering of the reference otherwise. - * - * @return - */ public Object getReferenceValue() { return field.synthetic && !field.isAliased() ? 1 : toString(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { + + if (getRaw().startsWith("$")) { + return getRaw(); + } + return String.format("$%s", getRaw()); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof FieldReference)) { + if (!(obj instanceof DirectFieldReference fieldReference)) { return false; } - FieldReference that = (FieldReference) obj; + return this.field.equals(fieldReference.field); + } - return this.field.equals(that.field); + @Override + public int hashCode() { + return field.hashCode(); } + } + + /** + * A {@link FieldReference} to a {@link Field} used within a nested {@link AggregationExpression}. + * + * @author Christoph Strobl + * @since 1.10 + */ + static class ExpressionFieldReference implements FieldReference { - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() + private FieldReference delegate; + + /** + * Creates a new {@link FieldReference} for the given {@link ExposedField}. + * + * @param field must not be {@literal null}. */ + public ExpressionFieldReference(FieldReference field) { + delegate = field; + } + + @Override + public String getRaw() { + return delegate.getRaw(); + } + + @Override + public Object getReferenceValue() { + return delegate.getReferenceValue(); + } + + @Override + public String toString() { + + String fieldRef = delegate.toString(); + + if (fieldRef.startsWith("$$")) { + return fieldRef; + } + + if (fieldRef.startsWith("$")) { + return "$" + fieldRef; + } + + return fieldRef; + } + + @Override + public boolean equals(@Nullable Object obj) { + + if (this == obj) { + return true; + } + + if (!(obj instanceof ExpressionFieldReference fieldReference)) { + return false; + } + + return ObjectUtils.nullSafeEquals(this.delegate, fieldReference.delegate); + } + @Override public int hashCode() { - return field.hashCode(); + return delegate.hashCode(); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java index e4c11ae541..131fa8a845 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2016 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,15 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; - /** * {@link AggregationOperationContext} that combines the available field references from a given * {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}. @@ -28,12 +31,14 @@ * @author Thomas Darimont * @author Oliver Gierke * @author Mark Paluch + * @author Christoph Strobl * @since 1.4 */ class ExposedFieldsAggregationOperationContext implements AggregationOperationContext { private final ExposedFields exposedFields; private final AggregationOperationContext rootContext; + private final FieldLookupPolicy lookupPolicy; /** * Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given @@ -41,81 +46,92 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo * * @param exposedFields must not be {@literal null}. * @param rootContext must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. */ - public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, - AggregationOperationContext rootContext) { + public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext, + FieldLookupPolicy lookupPolicy) { - Assert.notNull(exposedFields, "ExposedFields must not be null!"); - Assert.notNull(rootContext, "RootContext must not be null!"); + Assert.notNull(exposedFields, "ExposedFields must not be null"); + Assert.notNull(rootContext, "RootContext must not be null"); + Assert.notNull(lookupPolicy, "FieldLookupPolicy must not be null"); this.exposedFields = exposedFields; this.rootContext = rootContext; + this.lookupPolicy = lookupPolicy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject) - */ @Override - public DBObject getMappedObject(DBObject dbObject) { - return rootContext.getMappedObject(dbObject); + public Document getMappedObject(Document document, @Nullable Class type) { + return rootContext.getMappedObject(document, type); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ @Override public FieldReference getReference(Field field) { + + if (field.isInternal()) { + return new DirectFieldReference(new ExposedField(field, true)); + } + return getReference(field, field.getTarget()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return getReference(null, name); } + @Override + public Fields getFields(Class type) { + return rootContext.getFields(type); + } + /** * Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}. * - * @param field may be {@literal null} - * @param name must not be {@literal null} + * @param field may be {@literal null}. + * @param name must not be {@literal null}. * @return */ - private FieldReference getReference(Field field, String name) { + private FieldReference getReference(@Nullable Field field, String name) { - Assert.notNull(name, "Name must not be null!"); + Assert.notNull(name, "Name must not be null"); FieldReference exposedField = resolveExposedField(field, name); if (exposedField != null) { return exposedField; } - throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name)); + if (lookupPolicy.isStrict()) { + throw new IllegalArgumentException(String.format("Invalid reference '%s'", name)); + } + + if (field != null) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + return new DirectFieldReference(new ExposedField(name, true)); } /** - * Resolves a {@link field}/{@link name} for a {@link FieldReference} if possible. + * Resolves a {@link Field}/{@code name} for a {@link FieldReference} if possible. * - * @param field may be {@literal null} - * @param name must not be {@literal null} - * @return the resolved reference or {@literal null} + * @param field may be {@literal null}. + * @param name must not be {@literal null}. + * @return the resolved reference or {@literal null}. */ - protected FieldReference resolveExposedField(Field field, String name) { + @Nullable + protected FieldReference resolveExposedField(@Nullable Field field, String name) { + ExposedField exposedField = exposedFields.getField(name); if (exposedField != null) { if (field != null) { // we return a FieldReference to the given field directly to make sure that we reference the proper alias here. - return new FieldReference(new ExposedField(field, exposedField.isSynthetic())); + return new DirectFieldReference(new ExposedField(field, exposedField.isSynthetic())); } - return new FieldReference(exposedField); + return new DirectFieldReference(exposedField); } if (name.contains(".")) { @@ -126,9 +142,42 @@ protected FieldReference resolveExposedField(Field field, String name) { if (rootField != null) { // We have to synthetic to true, in order to render the field-name as is. - return new FieldReference(new ExposedField(name, true)); + return new DirectFieldReference(new ExposedField(name, true)); } } return null; } + + /** + * @return obtain the root context used to resolve references. + * @since 3.1 + */ + AggregationOperationContext getRootContext() { + return rootContext; + } + + @Override + public CodecRegistry getCodecRegistry() { + return getRootContext().getCodecRegistry(); + } + + @Override + @Deprecated(since = "4.3.1", forRemoval = true) + public AggregationOperationContext continueOnMissingFieldReference() { + if (!lookupPolicy.isStrict()) { + return this; + } + return new ExposedFieldsAggregationOperationContext(exposedFields, rootContext, FieldLookupPolicy.relaxed()); + } + + @Override + public AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + + @Override + public AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java new file mode 100644 index 0000000000..f5c73dd09c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java @@ -0,0 +1,225 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $facet}-operation.
                    + * Facet of {@link AggregationOperation}s to be used in an {@link Aggregation}. Processes multiple + * {@link AggregationOperation} pipelines within a single stage on the same set of input documents. Each sub-pipeline + * has its own field in the output document where its results are stored as an array of documents. + * {@link FacetOperation} enables various aggregations on the same set of input documents, without needing to retrieve + * the input documents multiple times.
                    + * As of MongoDB 3.4, {@link FacetOperation} cannot be used with nested pipelines containing {@link GeoNearOperation}, + * {@link OutOperation} and {@link FacetOperation}.
                    + * We recommend to use the static factory method {@link Aggregation#facet()} instead of creating instances of this class + * directly. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + * @see MongoDB Aggregation Framework: $facet + */ +public class FacetOperation implements FieldsExposingAggregationOperation { + + /** + * Empty (initial) {@link FacetOperation}. + */ + public static final FacetOperation EMPTY = new FacetOperation(); + + private final Facets facets; + + /** + * Creates a new {@link FacetOperation}. + */ + public FacetOperation() { + this(Facets.EMPTY); + } + + private FacetOperation(Facets facets) { + this.facets = facets; + } + + /** + * Creates a new {@link FacetOperationBuilder} to append a new facet using {@literal operations}.
                    + * {@link FacetOperationBuilder} takes a pipeline of {@link AggregationOperation} to categorize documents into a + * single facet. + * + * @param operations must not be {@literal null} or empty. + * @return + */ + public FacetOperationBuilder and(AggregationOperation... operations) { + + Assert.notNull(operations, "AggregationOperations must not be null"); + Assert.notEmpty(operations, "AggregationOperations must not be empty"); + + return new FacetOperationBuilder(facets, Arrays.asList(operations)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), facets.toDocument(context)); + } + + @Override + public String getOperator() { + return "$facet"; + } + + @Override + public ExposedFields getFields() { + return facets.asExposedFields(); + } + + /** + * Builder for {@link FacetOperation} by adding existing and the new pipeline of {@link AggregationOperation} to the + * new {@link FacetOperation}. + * + * @author Mark Paluch + */ + public static class FacetOperationBuilder { + + private final Facets current; + private final List operations; + + private FacetOperationBuilder(Facets current, List operations) { + this.current = current; + this.operations = operations; + } + + /** + * Creates a new {@link FacetOperation} that contains the configured pipeline of {@link AggregationOperation} + * exposed as {@literal fieldName} in the resulting facet document. + * + * @param fieldName must not be {@literal null} or empty. + * @return + */ + public FacetOperation as(String fieldName) { + + Assert.hasText(fieldName, "FieldName must not be null or empty"); + + return new FacetOperation(current.and(fieldName, operations)); + } + } + + /** + * Encapsulates multiple {@link Facet}s + * + * @author Mark Paluch + */ + private static class Facets { + + private static final Facets EMPTY = new Facets(Collections. emptyList()); + + private List facets; + + /** + * Creates a new {@link Facets} given {@link List} of {@link Facet}. + * + * @param facets + */ + private Facets(List facets) { + this.facets = facets; + } + + /** + * @return the {@link ExposedFields} derived from {@link Output}. + */ + ExposedFields asExposedFields() { + + ExposedFields fields = ExposedFields.from(); + + for (Facet facet : facets) { + fields = fields.and(facet.getExposedField()); + } + + return fields; + } + + protected Document toDocument(AggregationOperationContext context) { + + Document document = new Document(); + + for (Facet facet : facets) { + document.put(facet.getExposedField().getName(), facet.toDocuments(context)); + } + + return document; + } + + /** + * Adds a facet to this {@link Facets}. + * + * @param fieldName must not be {@literal null}. + * @param operations must not be {@literal null}. + * @return the new {@link Facets}. + */ + Facets and(String fieldName, List operations) { + + Assert.hasText(fieldName, "FieldName must not be null or empty"); + Assert.notNull(operations, "AggregationOperations must not be null"); + + List facets = new ArrayList(this.facets.size() + 1); + facets.addAll(this.facets); + facets.add(new Facet(new ExposedField(fieldName, true), operations)); + + return new Facets(facets); + } + } + + /** + * A single facet with a {@link ExposedField} and its {@link AggregationOperation} pipeline. + * + * @author Mark Paluch + */ + private static class Facet { + + private final ExposedField exposedField; + private final List operations; + + /** + * Creates a new {@link Facet} given {@link ExposedField} and {@link AggregationOperation} pipeline. + * + * @param exposedField must not be {@literal null}. + * @param operations must not be {@literal null}. + */ + Facet(ExposedField exposedField, List operations) { + + Assert.notNull(exposedField, "ExposedField must not be null"); + Assert.notNull(operations, "AggregationOperations must not be null"); + + this.exposedField = exposedField; + this.operations = operations; + } + + ExposedField getExposedField() { + return exposedField; + } + + protected List toDocuments(AggregationOperationContext context) { + return AggregationOperationRenderer.toDocument(operations, context); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java index 4ac7f5b184..a6737dc574 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,7 @@ /** * Abstraction for a field. - * + * * @author Oliver Gierke * @since 1.3 */ @@ -25,22 +25,30 @@ public interface Field { /** * Returns the name of the field. - * + * * @return must not be {@literal null}. */ String getName(); /** * Returns the target of the field. In case no explicit target is available {@link #getName()} should be returned. - * + * * @return must not be {@literal null}. */ String getTarget(); /** * Returns whether the Field is aliased, which means it has a name set different from the target. - * + * * @return */ boolean isAliased(); + + /** + * @return true if the field name references a local value such as {@code $$this}. + * @since 2.2 + */ + default boolean isInternal() { + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java new file mode 100644 index 0000000000..b438be3f31 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +/** + * Lookup policy for aggregation fields. Allows strict lookups that fail if the field is absent or relaxed ones that + * pass-thru the requested field even if we have to assume that the field isn't present because of the limited scope of + * our input. + * + * @author Mark Paluch + * @since 4.3.1 + */ +public abstract class FieldLookupPolicy { + + private static final FieldLookupPolicy STRICT = new FieldLookupPolicy() { + @Override + boolean isStrict() { + return true; + } + }; + + private static final FieldLookupPolicy RELAXED = new FieldLookupPolicy() { + @Override + boolean isStrict() { + return false; + } + }; + + private FieldLookupPolicy() {} + + /** + * @return a relaxed lookup policy. + */ + public static FieldLookupPolicy relaxed() { + return RELAXED; + } + + /** + * @return a strict lookup policy. + */ + public static FieldLookupPolicy strict() { + return STRICT; + } + + /** + * @return {@code true} if the policy uses a strict lookup; {@code false} to allow references to fields that cannot be + * determined to be exactly present. + */ + abstract boolean isStrict(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java index 70cd2070a3..83fc7c2b87 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,55 +17,58 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** * Value object to capture a list of {@link Field} instances. - * + * * @author Oliver Gierke * @author Thomas Darimont * @since 1.3 */ public final class Fields implements Iterable { - private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please " - + "customize your field definitions to get to unique field names!"; + private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s; Please " + + "customize your field definitions to get to unique field names"; - public static final String UNDERSCORE_ID = "_id"; + public static final String UNDERSCORE_ID = FieldName.ID.name(); public static final String UNDERSCORE_ID_REF = "$_id"; private final List fields; /** * Creates a new {@link Fields} instance from the given {@link Fields}. - * + * * @param fields must not be {@literal null} or empty. * @return */ public static Fields from(Field... fields) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); return new Fields(Arrays.asList(fields)); } /** * Creates a new {@link Fields} instance for {@link Field}s with the given names. - * + * * @param names must not be {@literal null}. * @return */ public static Fields fields(String... names) { - Assert.notNull(names, "Field names must not be null!"); + Assert.notNull(names, "Field names must not be null"); - List fields = new ArrayList(); + List fields = new ArrayList<>(); for (String name : names) { fields.add(field(name)); @@ -76,7 +79,7 @@ public static Fields fields(String... names) { /** * Creates a {@link Field} with the given name. - * + * * @param name must not be {@literal null} or empty. * @return */ @@ -94,25 +97,25 @@ public static Field field(String name) { * @return */ public static Field field(String name, String target) { - Assert.hasText(target, "Target must not be null or empty!"); + Assert.hasText(target, "Target must not be null or empty"); return new AggregationField(name, target); } /** * Creates a new {@link Fields} instance using the given {@link Field}s. - * + * * @param fields must not be {@literal null}. */ private Fields(List fields) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); this.fields = verify(fields); } - private static final List verify(List fields) { + private static List verify(List fields) { - Map reference = new HashMap(); + Map reference = new HashMap<>(); for (Field field : fields) { @@ -131,14 +134,14 @@ private static final List verify(List fields) { private Fields(Fields existing, Field tail) { - this.fields = new ArrayList(existing.fields.size() + 1); + this.fields = new ArrayList<>(existing.fields.size() + 1); this.fields.addAll(existing.fields); this.fields.add(tail); } /** * Creates a new {@link Fields} instance with a new {@link Field} of the given name added. - * + * * @param name must not be {@literal null}. * @return */ @@ -165,6 +168,11 @@ public Fields and(Fields fields) { return result; } + public int size() { + return fields.size(); + } + + @Nullable public Field getField(String name) { for (Field field : fields) { @@ -176,28 +184,33 @@ public Field getField(String name) { return null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { return fields.iterator(); } + /** + * @return + * @since 1.10 + */ + public List asList() { + return Collections.unmodifiableList(fields); + } + /** * Value object to encapsulate a field in an aggregation operation. - * + * * @author Oliver Gierke */ static class AggregationField implements Field { + private final String raw; private final String name; private final String target; /** * Creates an aggregation field with the given {@code name}. - * + * * @see AggregationField#AggregationField(String, String). * @param name must not be {@literal null} or empty */ @@ -210,16 +223,17 @@ public AggregationField(String name) { *

                    * The {@code name} serves as an alias for the actual backing document field denoted by {@code target}. If no target * is set explicitly, the name will be used as target. - * + * * @param name must not be {@literal null} or empty * @param target */ - public AggregationField(String name, String target) { + public AggregationField(String name, @Nullable String target) { - String nameToSet = cleanUp(name); - String targetToSet = cleanUp(target); + raw = name; + String nameToSet = name != null ? cleanUp(name) : null; + String targetToSet = target != null ? cleanUp(target) : null; - Assert.hasText(nameToSet, "AggregationField name must not be null or empty!"); + Assert.hasText(nameToSet, "AggregationField name must not be null or empty"); if (target == null && name.contains(".")) { this.name = nameToSet.substring(nameToSet.indexOf('.') + 1); @@ -230,13 +244,9 @@ public AggregationField(String name, String target) { } } - private static final String cleanUp(String source) { - - if (source == null) { - return source; - } + private static String cleanUp(String source) { - if (Aggregation.SystemVariable.isReferingToSystemVariable(source)) { + if (AggregationVariable.isVariable(source)) { return source; } @@ -244,64 +254,70 @@ private static final String cleanUp(String source) { return dollarIndex == -1 ? source : source.substring(dollarIndex + 1); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getKey() - */ + @Override public String getName() { return name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getAlias() - */ + @Override public String getTarget() { + + if (isLocalVar() || pointsToDBRefId()) { + return this.getRaw(); + } + return StringUtils.hasText(this.target) ? this.target : this.name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#isAliased() - */ @Override public boolean isAliased() { return !getName().equals(getTarget()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() + @Override + public boolean isInternal() { + return getRaw().endsWith("$$this") || getRaw().endsWith("$$value"); + } + + /** + * @return {@literal true} in case the field name starts with {@code $$}. + * @since 1.10 + */ + public boolean isLocalVar() { + return raw.startsWith("$$") && !raw.startsWith("$$$"); + } + + protected boolean pointsToDBRefId() { // see https://jira.mongodb.org/browse/SERVER-14466 + return raw.endsWith(".$id"); + } + + /** + * @return + * @since 1.10 */ + public String getRaw() { + return raw; + } + @Override public String toString() { return String.format("AggregationField - name: %s, target: %s", name, target); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof AggregationField)) { + if (!(obj instanceof AggregationField field)) { return false; } - AggregationField that = (AggregationField) obj; - - return this.name.equals(that.name) && ObjectUtils.nullSafeEquals(this.target, that.target); + return this.name.equals(field.name) && ObjectUtils.nullSafeEquals(this.target, field.target); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java index c17aeb7fa0..4fdea92dde 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2016 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,11 +33,22 @@ public interface FieldsExposingAggregationOperation extends AggregationOperation */ ExposedFields getFields(); + /** + * @return {@literal true} to conditionally inherit fields from previous operations. + * @since 2.0.6 + */ + default boolean inheritsFields() { + return false; + } + /** * Marker interface for {@link AggregationOperation} that inherits fields from previous operations. */ - static interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation { + interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation { + @Override + default boolean inheritsFields() { + return true; + } } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java index 29afc03f8e..f4a5fb4498 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,116 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.util.StringUtils; /** * Represents a {@code geoNear} aggregation operation. *

                    * We recommend to use the static factory method {@link Aggregation#geoNear(NearQuery, String)} instead of creating * instances of this class directly. - * + * * @author Thomas Darimont + * @author Christoph Strobl * @since 1.3 + * @see MongoDB Aggregation Framework: + * $geoNear */ public class GeoNearOperation implements AggregationOperation { private final NearQuery nearQuery; private final String distanceField; + private final @Nullable String indexKey; /** * Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The * {@code distanceField} defines output field that contains the calculated distance. - * - * @param query must not be {@literal null}. + * + * @param nearQuery must not be {@literal null}. * @param distanceField must not be {@literal null}. */ public GeoNearOperation(NearQuery nearQuery, String distanceField) { + this(nearQuery, distanceField, null); + } + + /** + * Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The + * {@code distanceField} defines output field that contains the calculated distance. + * + * @param nearQuery must not be {@literal null}. + * @param distanceField must not be {@literal null}. + * @param indexKey can be {@literal null}; + * @since 2.1 + */ + private GeoNearOperation(NearQuery nearQuery, String distanceField, @Nullable String indexKey) { - Assert.notNull(nearQuery, "NearQuery must not be null."); - Assert.hasLength(distanceField, "Distance field must not be null or empty."); + Assert.notNull(nearQuery, "NearQuery must not be null"); + Assert.hasLength(distanceField, "Distance field must not be null or empty"); this.nearQuery = nearQuery; this.distanceField = distanceField; + this.indexKey = indexKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Optionally specify the geospatial index to use via the field to use in the calculation.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param key the geospatial index field to use when calculating the distance. + * @return new instance of {@link GeoNearOperation}. + * @since 2.1 */ + public GeoNearOperation useIndex(String key) { + return new GeoNearOperation(nearQuery, distanceField, key); + } + @Override - public DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { + + Document command = context.getMappedObject(nearQuery.toDocument()); - BasicDBObject command = (BasicDBObject) context.getMappedObject(nearQuery.toDBObject()); + if (command.containsKey("query")) { + command.replace("query", context.getMappedObject(command.get("query", Document.class))); + } + + command.remove("collation"); command.put("distanceField", distanceField); - return new BasicDBObject("$geoNear", command); + if (StringUtils.hasText(indexKey)) { + command.put("key", indexKey); + } + + return new Document(getOperator(), command); + } + + @Override + public String getOperator() { + return "$geoNear"; + } + + @Override + public List toPipelineStages(AggregationOperationContext context) { + + Document command = toDocument(context); + Number limit = (Number) command.get("$geoNear", Document.class).remove("num"); + + List stages = new ArrayList<>(3); + stages.add(command); + + if (nearQuery.getSkip() != null && nearQuery.getSkip() > 0) { + stages.add(new Document("$skip", nearQuery.getSkip())); + } + + if (limit != null) { + stages.add(new Document("$limit", limit.longValue())); + } + + return stages; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java new file mode 100644 index 0000000000..72a917c599 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java @@ -0,0 +1,396 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Encapsulates the aggregation framework {@code $graphLookup}-operation.
                    + * Performs a recursive search on a collection, with options for restricting the search by recursion depth and query + * filter.
                    + * We recommend to use the static factory method {@link Aggregation#graphLookup(String)} instead of creating instances + * of this class directly. + * + * @see https://docs.mongodb.org/manual/reference/aggregation/graphLookup/ + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + */ +public class GraphLookupOperation implements InheritsFieldsAggregationOperation { + + private static final Set> ALLOWED_START_TYPES = Set.of(AggregationExpression.class, String.class, Field.class, Document.class); + + private final String from; + private final List startWith; + private final Field connectFrom; + private final Field connectTo; + private final Field as; + private final @Nullable Long maxDepth; + private final @Nullable Field depthField; + private final @Nullable CriteriaDefinition restrictSearchWithMatch; + + private GraphLookupOperation(String from, List startWith, Field connectFrom, Field connectTo, Field as, + @Nullable Long maxDepth, @Nullable Field depthField, @Nullable CriteriaDefinition restrictSearchWithMatch) { + + this.from = from; + this.startWith = startWith; + this.connectFrom = connectFrom; + this.connectTo = connectTo; + this.as = as; + this.maxDepth = maxDepth; + this.depthField = depthField; + this.restrictSearchWithMatch = restrictSearchWithMatch; + } + + /** + * Creates a new {@link FromBuilder} to build {@link GraphLookupOperation}. + * + * @return a new {@link FromBuilder}. + */ + public static FromBuilder builder() { + return new GraphLookupOperationFromBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document graphLookup = new Document(); + + graphLookup.put("from", from); + + List mappedStartWith = new ArrayList<>(startWith.size()); + + for (Object startWithElement : startWith) { + + if (startWithElement instanceof AggregationExpression aggregationExpression) { + mappedStartWith.add(aggregationExpression.toDocument(context)); + } else if (startWithElement instanceof Field field) { + mappedStartWith.add(context.getReference(field).toString()); + } else { + mappedStartWith.add(startWithElement); + } + } + + graphLookup.put("startWith", mappedStartWith.size() == 1 ? mappedStartWith.iterator().next() : mappedStartWith); + + graphLookup.put("connectFromField", connectFrom.getTarget()); + graphLookup.put("connectToField", connectTo.getTarget()); + graphLookup.put("as", as.getName()); + + if (maxDepth != null) { + graphLookup.put("maxDepth", maxDepth); + } + + if (depthField != null) { + graphLookup.put("depthField", depthField.getTarget()); + } + + if (restrictSearchWithMatch != null) { + graphLookup.put("restrictSearchWithMatch", context.getMappedObject(restrictSearchWithMatch.getCriteriaObject())); + } + + return new Document(getOperator(), graphLookup); + } + + @Override + public String getOperator() { + return "$graphLookup"; + } + + @Override + public ExposedFields getFields() { + + List fields = new ArrayList<>(2); + fields.add(new ExposedField(as, true)); + if(depthField != null) { + fields.add(new ExposedField(depthField, true)); + } + return ExposedFields.from(fields.toArray(new ExposedField[0])); + } + + /** + * @author Mark Paluch + */ + public interface FromBuilder { + + /** + * Set the {@literal collectionName} to apply the {@code $graphLookup} to. + * + * @param collectionName must not be {@literal null} or empty. + * @return never {@literal null}. + */ + StartWithBuilder from(String collectionName); + } + + /** + * @author Mark Paluch + * @author Christoph Strobl + */ + public interface StartWithBuilder { + + /** + * Set the startWith {@literal fieldReferences} to apply the {@code $graphLookup} to. + * + * @param fieldReferences must not be {@literal null}. + * @return never {@literal null}. + */ + ConnectFromBuilder startWith(String... fieldReferences); + + /** + * Set the startWith {@literal expressions} to apply the {@code $graphLookup} to. + * + * @param expressions must not be {@literal null}. + * @return never {@literal null}. + */ + ConnectFromBuilder startWith(AggregationExpression... expressions); + + /** + * Set the startWith as either {@literal fieldReferences}, {@link Fields}, {@link Document} or + * {@link AggregationExpression} to apply the {@code $graphLookup} to. + * + * @param expressions must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException + */ + ConnectFromBuilder startWith(Object... expressions); + } + + /** + * @author Mark Paluch + */ + public interface ConnectFromBuilder { + + /** + * Set the connectFrom {@literal fieldName} to apply the {@code $graphLookup} to. + * + * @param fieldName must not be {@literal null} or empty. + * @return never {@literal null}. + */ + ConnectToBuilder connectFrom(String fieldName); + } + + /** + * @author Mark Paluch + */ + public interface ConnectToBuilder { + + /** + * Set the connectTo {@literal fieldName} to apply the {@code $graphLookup} to. + * + * @param fieldName must not be {@literal null} or empty. + * @return never {@literal null}. + */ + GraphLookupOperationBuilder connectTo(String fieldName); + } + + /** + * Builder to build the initial {@link GraphLookupOperationBuilder} that configures the initial mandatory set of + * {@link GraphLookupOperation} properties. + * + * @author Mark Paluch + */ + static final class GraphLookupOperationFromBuilder + implements FromBuilder, StartWithBuilder, ConnectFromBuilder, ConnectToBuilder { + + private @Nullable String from; + private @Nullable List startWith; + private @Nullable String connectFrom; + + @Override + public StartWithBuilder from(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null or empty"); + + this.from = collectionName; + return this; + } + + @Override + public ConnectFromBuilder startWith(String... fieldReferences) { + + Assert.notNull(fieldReferences, "FieldReferences must not be null"); + Assert.noNullElements(fieldReferences, "FieldReferences must not contain null elements"); + + List fields = new ArrayList<>(fieldReferences.length); + + for (String fieldReference : fieldReferences) { + fields.add(Fields.field(fieldReference)); + } + + this.startWith = fields; + return this; + } + + @Override + public ConnectFromBuilder startWith(AggregationExpression... expressions) { + + Assert.notNull(expressions, "AggregationExpressions must not be null"); + Assert.noNullElements(expressions, "AggregationExpressions must not contain null elements"); + + this.startWith = Arrays.asList(expressions); + return this; + } + + @Override + public ConnectFromBuilder startWith(Object... expressions) { + + Assert.notNull(expressions, "Expressions must not be null"); + Assert.noNullElements(expressions, "Expressions must not contain null elements"); + + this.startWith = verifyAndPotentiallyTransformStartsWithTypes(expressions); + return this; + } + + private List verifyAndPotentiallyTransformStartsWithTypes(Object... expressions) { + + List expressionsToUse = new ArrayList<>(expressions.length); + + for (Object expression : expressions) { + + assertStartWithType(expression); + + if (expression instanceof String stringValue) { + expressionsToUse.add(Fields.field(stringValue)); + } else { + expressionsToUse.add(expression); + } + + } + return expressionsToUse; + } + + private void assertStartWithType(Object expression) { + + for (Class type : ALLOWED_START_TYPES) { + + if (ClassUtils.isAssignable(type, expression.getClass())) { + return; + } + } + + throw new IllegalArgumentException( + String.format("Expression must be any of %s but was %s", ALLOWED_START_TYPES, expression.getClass())); + } + + @Override + public ConnectToBuilder connectFrom(String fieldName) { + + Assert.hasText(fieldName, "ConnectFrom must not be null or empty"); + + this.connectFrom = fieldName; + return this; + } + + @Override + public GraphLookupOperationBuilder connectTo(String fieldName) { + + Assert.hasText(fieldName, "ConnectTo must not be null or empty"); + + return new GraphLookupOperationBuilder(from, startWith, connectFrom, fieldName); + } + } + + /** + * @author Mark Paluch + */ + public static final class GraphLookupOperationBuilder { + + private final String from; + private final List startWith; + private final Field connectFrom; + private final Field connectTo; + private @Nullable Long maxDepth; + private @Nullable Field depthField; + private @Nullable CriteriaDefinition restrictSearchWithMatch; + + private GraphLookupOperationBuilder(String from, List startWith, String connectFrom, + String connectTo) { + + this.from = from; + this.startWith = new ArrayList<>(startWith); + this.connectFrom = Fields.field(connectFrom); + this.connectTo = Fields.field(connectTo); + } + + /** + * Optionally limit the number of recursions. + * + * @param numberOfRecursions must be greater or equal to zero. + * @return this. + */ + public GraphLookupOperationBuilder maxDepth(long numberOfRecursions) { + + Assert.isTrue(numberOfRecursions >= 0, "Max depth must be >= 0"); + + this.maxDepth = numberOfRecursions; + return this; + } + + /** + * Optionally add a depth field {@literal fieldName} to each traversed document in the search path. + * + * @param fieldName must not be {@literal null} or empty. + * @return this. + */ + public GraphLookupOperationBuilder depthField(String fieldName) { + + Assert.hasText(fieldName, "Depth field name must not be null or empty"); + + this.depthField = Fields.field(fieldName); + return this; + } + + /** + * Optionally add a query specifying conditions to the recursive search. + * + * @param criteriaDefinition must not be {@literal null}. + * @return + */ + public GraphLookupOperationBuilder restrict(CriteriaDefinition criteriaDefinition) { + + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); + + this.restrictSearchWithMatch = criteriaDefinition; + return this; + } + + /** + * Set the name of the array field added to each output document and return the final {@link GraphLookupOperation}. + * Contains the documents traversed in the {@literal $graphLookup} stage to reach the document. + * + * @param fieldName must not be {@literal null} or empty. + * @return the final {@link GraphLookupOperation}. + */ + public GraphLookupOperation as(String fieldName) { + + Assert.hasText(fieldName, "As field name must not be null or empty"); + + return new GraphLookupOperation(from, startWith, connectFrom, connectTo, Fields.field(fieldName), maxDepth, + depthField, restrictSearchWithMatch); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java index 4ee8b37ed8..10d58a7682 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,30 +16,31 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Locale; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; /** * Encapsulates the aggregation framework {@code $group}-operation. *

                    * We recommend to use the static factory method {@link Aggregation#group(Fields)} instead of creating instances of this * class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/group/#stage._S_group + * * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Gustavo de Geus + * @author Christoph Strobl + * @author Mark Paluch + * @author Sergey Shcherbakov * @since 1.3 + * @see MongoDB Aggregation Framework: $group */ public class GroupOperation implements FieldsExposingAggregationOperation { @@ -52,7 +53,7 @@ public class GroupOperation implements FieldsExposingAggregationOperation { /** * Creates a new {@link GroupOperation} including the given {@link Fields}. - * + * * @param fields must not be {@literal null}. */ public GroupOperation(Fields fields) { @@ -63,7 +64,7 @@ public GroupOperation(Fields fields) { /** * Creates a new {@link GroupOperation} from the given {@link GroupOperation}. - * + * * @param groupOperation must not be {@literal null}. */ protected GroupOperation(GroupOperation groupOperation) { @@ -72,14 +73,14 @@ protected GroupOperation(GroupOperation groupOperation) { /** * Creates a new {@link GroupOperation} from the given {@link GroupOperation} and the given {@link Operation}s. - * + * * @param groupOperation * @param nextOperations */ private GroupOperation(GroupOperation groupOperation, List nextOperations) { - Assert.notNull(groupOperation, "GroupOperation must not be null!"); - Assert.notNull(nextOperations, "NextOperations must not be null!"); + Assert.notNull(groupOperation, "GroupOperation must not be null"); + Assert.notNull(nextOperations, "NextOperations must not be null"); this.idFields = groupOperation.idFields; this.operations = new ArrayList(nextOperations.size() + 1); @@ -89,17 +90,17 @@ private GroupOperation(GroupOperation groupOperation, List nextOperat /** * Creates a new {@link GroupOperation} from the current one adding the given {@link Operation}. - * + * * @param operation must not be {@literal null}. * @return */ protected GroupOperation and(Operation operation) { - return new GroupOperation(this, Arrays.asList(operation)); + return new GroupOperation(this, List.of(operation)); } /** * Builder for {@link GroupOperation}s on a field. - * + * * @author Thomas Darimont */ public static final class GroupOperationBuilder { @@ -109,14 +110,14 @@ public static final class GroupOperationBuilder { /** * Creates a new {@link GroupOperationBuilder} from the given {@link GroupOperation} and {@link Operation}. - * + * * @param groupOperation * @param operation */ private GroupOperationBuilder(GroupOperation groupOperation, Operation operation) { - Assert.notNull(groupOperation, "GroupOperation must not be null!"); - Assert.notNull(operation, "Operation must not be null!"); + Assert.notNull(groupOperation, "GroupOperation must not be null"); + Assert.notNull(operation, "Operation must not be null"); this.groupOperation = groupOperation; this.operation = operation; @@ -124,7 +125,7 @@ private GroupOperationBuilder(GroupOperation groupOperation, Operation operation /** * Allows to specify an alias for the new-operation operation. - * + * * @param alias * @return */ @@ -137,8 +138,8 @@ public GroupOperation as(String alias) { * Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression. *

                    * Count expressions are emulated via {@code $sum: 1}. - *

                    - * + *

                    + * * @return */ public GroupOperationBuilder count() { @@ -147,7 +148,7 @@ public GroupOperationBuilder count() { /** * Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -155,13 +156,28 @@ public GroupOperationBuilder sum(String reference) { return sum(reference, null); } - private GroupOperationBuilder sum(String reference, Object value) { + /** + * Generates an {@link GroupOperationBuilder} for an {@code $sum}-expression for the given + * {@link AggregationExpression}. + * + * @param expr must not be {@literal null}. + * @return new instance of {@link GroupOperationBuilder}. Never {@literal null}. + * @throws IllegalArgumentException when {@code expr} is {@literal null}. + * @since 1.10.8 + */ + public GroupOperationBuilder sum(AggregationExpression expr) { + + Assert.notNull(expr, "Expr must not be null"); + return newBuilder(GroupOps.SUM, null, expr); + } + + private GroupOperationBuilder sum(@Nullable String reference, @Nullable Object value) { return newBuilder(GroupOps.SUM, reference, value); } /** * Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -171,7 +187,7 @@ public GroupOperationBuilder addToSet(String reference) { /** * Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given value. - * + * * @param value * @return */ @@ -179,13 +195,13 @@ public GroupOperationBuilder addToSet(Object value) { return addToSet(null, value); } - private GroupOperationBuilder addToSet(String reference, Object value) { + private GroupOperationBuilder addToSet(@Nullable String reference, @Nullable Object value) { return newBuilder(GroupOps.ADD_TO_SET, reference, value); } /** * Generates an {@link GroupOperationBuilder} for an {@code $last}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -194,8 +210,9 @@ public GroupOperationBuilder last(String reference) { } /** - * Generates an {@link GroupOperationBuilder} for an {@code $last}-expression for the given {@link AggregationExpression}. - * + * Generates an {@link GroupOperationBuilder} for an {@code $last}-expression for the given + * {@link AggregationExpression}. + * * @param expr * @return */ @@ -205,7 +222,7 @@ public GroupOperationBuilder last(AggregationExpression expr) { /** * Generates an {@link GroupOperationBuilder} for a {@code $first}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -214,8 +231,9 @@ public GroupOperationBuilder first(String reference) { } /** - * Generates an {@link GroupOperationBuilder} for a {@code $first}-expression for the given {@link AggregationExpression}. - * + * Generates an {@link GroupOperationBuilder} for a {@code $first}-expression for the given + * {@link AggregationExpression}. + * * @param expr * @return */ @@ -225,7 +243,7 @@ public GroupOperationBuilder first(AggregationExpression expr) { /** * Generates an {@link GroupOperationBuilder} for an {@code $avg}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -234,8 +252,9 @@ public GroupOperationBuilder avg(String reference) { } /** - * Generates an {@link GroupOperationBuilder} for an {@code $avg}-expression for the given {@link AggregationExpression}. - * + * Generates an {@link GroupOperationBuilder} for an {@code $avg}-expression for the given + * {@link AggregationExpression}. + * * @param expr * @return */ @@ -245,7 +264,7 @@ public GroupOperationBuilder avg(AggregationExpression expr) { /** * Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given field-reference. - * + * * @param reference * @return */ @@ -255,7 +274,7 @@ public GroupOperationBuilder push(String reference) { /** * Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given value. - * + * * @param value * @return */ @@ -263,13 +282,13 @@ public GroupOperationBuilder push(Object value) { return push(null, value); } - private GroupOperationBuilder push(String reference, Object value) { + private GroupOperationBuilder push(@Nullable String reference, @Nullable Object value) { return newBuilder(GroupOps.PUSH, reference, value); } /** * Generates an {@link GroupOperationBuilder} for an {@code $min}-expression that for the given field-reference. - * + * * @param reference * @return */ @@ -278,8 +297,9 @@ public GroupOperationBuilder min(String reference) { } /** - * Generates an {@link GroupOperationBuilder} for an {@code $min}-expression that for the given {@link AggregationExpression}. - * + * Generates an {@link GroupOperationBuilder} for an {@code $min}-expression that for the given + * {@link AggregationExpression}. + * * @param expr * @return */ @@ -289,7 +309,7 @@ public GroupOperationBuilder min(AggregationExpression expr) { /** * Generates an {@link GroupOperationBuilder} for an {@code $max}-expression that for the given field-reference. - * + * * @param reference * @return */ @@ -298,8 +318,9 @@ public GroupOperationBuilder max(String reference) { } /** - * Generates an {@link GroupOperationBuilder} for an {@code $max}-expression that for the given {@link AggregationExpression}. - * + * Generates an {@link GroupOperationBuilder} for an {@code $max}-expression that for the given + * {@link AggregationExpression}. + * * @param expr * @return */ @@ -307,14 +328,79 @@ public GroupOperationBuilder max(AggregationExpression expr) { return newBuilder(GroupOps.MAX, null, expr); } - private GroupOperationBuilder newBuilder(Keyword keyword, String reference, Object value) { - return new GroupOperationBuilder(this, new Operation(keyword, null, reference, value)); + /** + * Generates an {@link GroupOperationBuilder} for an {@code $stdDevSamp}-expression that for the given + * field-reference. + * + * @param reference must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public GroupOperationBuilder stdDevSamp(String reference) { + return newBuilder(GroupOps.STD_DEV_SAMP, reference, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields() + /** + * Generates an {@link GroupOperationBuilder} for an {@code $stdDevSamp}-expression that for the given + * {@link AggregationExpression}. + * + * @param expr must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 */ + public GroupOperationBuilder stdDevSamp(AggregationExpression expr) { + return newBuilder(GroupOps.STD_DEV_SAMP, null, expr); + } + + /** + * Generates an {@link GroupOperationBuilder} for an {@code $stdDevPop}-expression that for the given field-reference. + * + * @param reference must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public GroupOperationBuilder stdDevPop(String reference) { + return newBuilder(GroupOps.STD_DEV_POP, reference, null); + } + + /** + * Generates an {@link GroupOperationBuilder} for an {@code $stdDevPop}-expression that for the given + * {@link AggregationExpression}. + * + * @param expr must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public GroupOperationBuilder stdDevPop(AggregationExpression expr) { + return newBuilder(GroupOps.STD_DEV_POP, null, expr); + } + + /** + * Generates an {@link GroupOperationBuilder} for an {@code $accumulator}-expression. + * + * @param accumulator must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + public GroupOperationBuilder accumulate(Accumulator accumulator) { + return new GroupOperationBuilder(this, new Operation(accumulator)); + } + + /** + * Adds a computed field to the {@link GroupOperation}. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + public GroupOperation and(String fieldName, AggregationExpression expression) { + return new GroupOperationBuilder(this, new Operation(expression)).as(fieldName); + } + + private GroupOperationBuilder newBuilder(Keyword keyword, @Nullable String reference, @Nullable Object value) { + return new GroupOperationBuilder(this, new Operation(keyword, null, reference, value)); + } + @Override public ExposedFields getFields() { @@ -327,14 +413,10 @@ public ExposedFields getFields() { return fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public com.mongodb.DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { - BasicDBObject operationObject = new BasicDBObject(); + Document operationObject = new Document(); if (idFields.exposesNoNonSyntheticFields()) { @@ -347,7 +429,7 @@ public com.mongodb.DBObject toDBObject(AggregationOperationContext context) { } else { - BasicDBObject inner = new BasicDBObject(); + Document inner = new Document(); for (ExposedField field : idFields) { FieldReference reference = context.getReference(field); @@ -358,10 +440,15 @@ public com.mongodb.DBObject toDBObject(AggregationOperationContext context) { } for (Operation operation : operations) { - operationObject.putAll(operation.toDBObject(context)); + operationObject.putAll(operation.toDocument(context)); } - return new BasicDBObject("$group", operationObject); + return new Document(getOperator(), operationObject); + } + + @Override + public String getOperator() { + return "$group"; } interface Keyword { @@ -369,34 +456,35 @@ interface Keyword { String toString(); } - private static enum GroupOps implements Keyword { + private enum GroupOps implements Keyword { - SUM, LAST, FIRST, PUSH, AVG, MIN, MAX, ADD_TO_SET, COUNT; + SUM("$sum"), LAST("$last"), FIRST("$first"), PUSH("$push"), AVG("$avg"), MIN("$min"), MAX("$max"), ADD_TO_SET( + "$addToSet"), STD_DEV_POP("$stdDevPop"), STD_DEV_SAMP("$stdDevSamp"); - @Override - public String toString() { - - String[] parts = name().split("_"); + private final String mongoOperator; - StringBuilder builder = new StringBuilder(); - - for (String part : parts) { - String lowerCase = part.toLowerCase(Locale.US); - builder.append(builder.length() == 0 ? lowerCase : StringUtils.capitalize(lowerCase)); - } + GroupOps(String mongoOperator) { + this.mongoOperator = mongoOperator; + } - return "$" + builder.toString(); + @Override + public String toString() { + return mongoOperator; } } static class Operation implements AggregationOperation { - private final Keyword op; - private final String key; - private final String reference; - private final Object value; + private final @Nullable Keyword op; + private final @Nullable String key; + private final @Nullable String reference; + private final @Nullable Object value; + + Operation(AggregationExpression expression) { + this(null, null, null, expression); + } - public Operation(Keyword op, String key, String reference, Object value) { + public Operation(@Nullable Keyword op, @Nullable String key, @Nullable String reference, @Nullable Object value) { this.op = op; this.key = key; @@ -412,22 +500,27 @@ public ExposedField asField() { return new ExposedField(key, true); } - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject(key, new BasicDBObject(op.toString(), getValue(context))); + public Document toDocument(AggregationOperationContext context) { + + Object value = getValue(context); + if(op == null && value instanceof Document) { + return new Document(key, value); + } + return new Document(key, new Document(op.toString(), value)); } public Object getValue(AggregationOperationContext context) { if (reference == null) { - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDbObject(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } return value; } - if (Aggregation.SystemVariable.isReferingToSystemVariable(reference)) { + if (SystemVariable.isReferingToSystemVariable(reference)) { return reference; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java index c25b567328..ca6a2e2754 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; -import org.springframework.util.Assert; +import org.springframework.lang.Nullable; /** * {@link ExposedFieldsAggregationOperationContext} that inherits fields from its parent * {@link AggregationOperationContext}. * * @author Mark Paluch + * @author Christoph Strobl + * @since 1.9 */ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext { @@ -35,21 +37,23 @@ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAg * * @param exposedFields must not be {@literal null}. * @param previousContext must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. */ public InheritingExposedFieldsAggregationOperationContext(ExposedFields exposedFields, - AggregationOperationContext previousContext) { + AggregationOperationContext previousContext, FieldLookupPolicy lookupPolicy) { + + super(exposedFields, previousContext, lookupPolicy); - super(exposedFields, previousContext); - Assert.notNull(previousContext, "PreviousContext must not be null!"); this.previousContext = previousContext; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#resolveExposedField(org.springframework.data.mongodb.core.aggregation.Field, java.lang.String) - */ @Override - protected FieldReference resolveExposedField(Field field, String name) { + public Document getMappedObject(Document document) { + return previousContext.getMappedObject(document); + } + + @Override + protected FieldReference resolveExposedField(@Nullable Field field, String name) { FieldReference fieldReference = super.resolveExposedField(field, name); if (fieldReference != null) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java index b56a59e01d..e73dba1b9f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,20 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the {@code $limit}-operation. *

                    * We recommend to use the static factory method {@link Aggregation#limit(long)} instead of creating instances of this * class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/limit/ + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl * @since 1.3 + * @see MongoDB Aggregation Framework: $limit */ public class LimitOperation implements AggregationOperation { @@ -40,16 +39,17 @@ public class LimitOperation implements AggregationOperation { */ public LimitOperation(long maxElements) { - Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero!"); + Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero"); this.maxElements = maxElements; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject("$limit", maxElements); + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), maxElements); + } + + @Override + public String getOperator() { + return "$limit"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java new file mode 100644 index 0000000000..e277539315 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java @@ -0,0 +1,96 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.util.Assert; + +/** + * Gateway to {@literal literal} aggregation operations. + * + * @author Christoph Strobl + * @since 1.10 + */ +public class LiteralOperators { + + /** + * Take the value referenced by given {@literal value}. + * + * @param value must not be {@literal null}. + * @return + */ + public static LiteralOperatorFactory valueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new LiteralOperatorFactory(value); + } + + /** + * @author Christoph Strobl + */ + public static class LiteralOperatorFactory { + + private final Object value; + + /** + * Creates new {@link LiteralOperatorFactory} for given {@literal value}. + * + * @param value must not be {@literal null}. + */ + public LiteralOperatorFactory(Object value) { + + Assert.notNull(value, "Value must not be null"); + this.value = value; + } + + /** + * Creates new {@link Literal} that returns the associated value without parsing. + * + * @return + */ + public Literal asLiteral() { + return Literal.asLiteral(value); + } + } + + /** + * {@link AggregationExpression} for {@code $literal}. + * + * @author Christoph Strobl + */ + public static class Literal extends AbstractAggregationExpression { + + private Literal(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$literal"; + } + + /** + * Creates new {@link Literal}. + * + * @param value must not be {@literal null}. + * @return + */ + public static Literal asLiteral(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Literal(value); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java index 78860073aa..282ffbd9e0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,45 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.function.Supplier; + +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** - * Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method - * {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly. + * Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the builder provided via + * {@link #newLookup()} instead of creating instances of this class directly. * * @author Alessio Fachechi * @author Christoph Strobl * @author Mark Paluch - * @see http://docs.mongodb.org/manual/reference/aggregation/lookup/#stage._S_lookup + * @author Sangyong Choi * @since 1.9 + * @see MongoDB Aggregation Framework: + * $lookup */ public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation { - private Field from; - private Field localField; - private Field foreignField; - private ExposedField as; + private final String from; + + @Nullable // + private final Field localField; + + @Nullable // + private final Field foreignField; + + @Nullable // + private final Let let; + + @Nullable // + private final AggregationPipeline pipeline; + + private final ExposedField as; /** * Creates a new {@link LookupOperation} for the given {@link Field}s. @@ -48,90 +64,193 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe * @param as must not be {@literal null}. */ public LookupOperation(Field from, Field localField, Field foreignField, Field as) { + this(((Supplier) () -> { - Assert.notNull(from, "From must not be null!"); - Assert.notNull(localField, "LocalField must not be null!"); - Assert.notNull(foreignField, "ForeignField must not be null!"); - Assert.notNull(as, "As must not be null!"); + Assert.notNull(from, "From must not be null"); + return from.getTarget(); + }).get(), localField, foreignField, null, null, as); + } + + /** + * Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline + * pipeline}. + * + * @param from must not be {@literal null}. + * @param let must not be {@literal null}. + * @param as must not be {@literal null}. + * @since 4.1 + */ + public LookupOperation(String from, @Nullable Let let, AggregationPipeline pipeline, Field as) { + this(from, null, null, let, pipeline, as); + } + + /** + * Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline + * pipeline}. + * + * @param from must not be {@literal null}. + * @param localField can be {@literal null} if {@literal pipeline} is present. + * @param foreignField can be {@literal null} if {@literal pipeline} is present. + * @param let can be {@literal null} if {@literal localField} and {@literal foreignField} are present. + * @param as must not be {@literal null}. + * @since 4.1 + */ + public LookupOperation(String from, @Nullable Field localField, @Nullable Field foreignField, @Nullable Let let, + @Nullable AggregationPipeline pipeline, Field as) { + + Assert.notNull(from, "From must not be null"); + if (pipeline == null) { + Assert.notNull(localField, "LocalField must not be null"); + Assert.notNull(foreignField, "ForeignField must not be null"); + } else if (localField == null && foreignField == null) { + Assert.notNull(pipeline, "Pipeline must not be null"); + } + Assert.notNull(as, "As must not be null"); this.from = from; this.localField = localField; this.foreignField = foreignField; this.as = new ExposedField(as, true); + this.let = let; + this.pipeline = pipeline; } - private LookupOperation() { - // used by builder - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(as); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { - BasicDBObject lookupObject = new BasicDBObject(); + Document lookupObject = new Document(); + + lookupObject.append("from", from); + if (localField != null) { + lookupObject.append("localField", localField.getTarget()); + } + if (foreignField != null) { + lookupObject.append("foreignField", foreignField.getTarget()); + } + if (let != null) { + lookupObject.append("let", let.toDocument(context).get("$let", Document.class).get("vars")); + } + if (pipeline != null) { + lookupObject.append("pipeline", pipeline.toDocuments(context)); + } - lookupObject.append("from", from.getTarget()); - lookupObject.append("localField", localField.getTarget()); - lookupObject.append("foreignField", foreignField.getTarget()); lookupObject.append("as", as.getTarget()); - return new BasicDBObject("$lookup", lookupObject); + return new Document(getOperator(), lookupObject); + } + + @Override + public String getOperator() { + return "$lookup"; } /** * Get a builder that allows creation of {@link LookupOperation}. * - * @return + * @return never {@literal null}. */ public static FromBuilder newLookup() { return new LookupOperationBuilder(); } - public static interface FromBuilder { + public interface FromBuilder { /** * @param name the collection in the same database to perform the join with, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ LocalFieldBuilder from(String name); } - public static interface LocalFieldBuilder { + public interface LocalFieldBuilder extends PipelineBuilder { /** * @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or * empty. - * @return + * @return never {@literal null}. */ ForeignFieldBuilder localField(String name); } - public static interface ForeignFieldBuilder { + public interface ForeignFieldBuilder { /** * @param name the field from the documents in the {@code from} collection, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ AsBuilder foreignField(String name); } - public static interface AsBuilder { + /** + * @since 4.1 + * @author Christoph Strobl + */ + public interface LetBuilder { + + /** + * Specifies {@link Let#getVariableNames() variables) that can be used in the + * {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}. + * + * @param let must not be {@literal null}. + * @return never {@literal null}. + * @see PipelineBuilder + */ + PipelineBuilder let(Let let); + + /** + * Specifies {@link Let#getVariableNames() variables) that can be used in the + * {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}. + * + * @param variables must not be {@literal null}. + * @return never {@literal null}. + * @see PipelineBuilder + */ + default PipelineBuilder let(ExpressionVariable... variables) { + return let(Let.just(variables)); + } + } + + /** + * @since 4.1 + * @author Christoph Strobl + */ + public interface PipelineBuilder extends LetBuilder { + + /** + * Specifies the {@link AggregationPipeline pipeline} that determines the resulting documents. + * + * @param pipeline must not be {@literal null}. + * @return never {@literal null}. + */ + AsBuilder pipeline(AggregationPipeline pipeline); + + /** + * Specifies the {@link AggregationPipeline#getOperations() stages} that determine the resulting documents. + * + * @param stages must not be {@literal null} can be empty. + * @return never {@literal null}. + */ + default AsBuilder pipeline(AggregationOperation... stages) { + return pipeline(AggregationPipeline.of(stages)); + } + + /** + * @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty. + * @return new instance of {@link LookupOperation}. + */ + LookupOperation as(String name); + } + + public interface AsBuilder extends PipelineBuilder { /** * @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty. - * @return + * @return new instance of {@link LookupOperation}. */ LookupOperation as(String name); } @@ -145,11 +264,12 @@ public static interface AsBuilder { public static final class LookupOperationBuilder implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder { - private final LookupOperation lookupOperation; - - private LookupOperationBuilder() { - this.lookupOperation = new LookupOperation(); - } + private @Nullable String from; + private @Nullable Field localField; + private @Nullable Field foreignField; + private @Nullable ExposedField as; + private @Nullable Let let; + private @Nullable AggregationPipeline pipeline; /** * Creates new builder for {@link LookupOperation}. @@ -163,34 +283,49 @@ public static FromBuilder newBuilder() { @Override public LocalFieldBuilder from(String name) { - Assert.hasText(name, "'From' must not be null or empty!"); - lookupOperation.from = Fields.field(name); + Assert.hasText(name, "'From' must not be null or empty"); + from = name; return this; } @Override - public LookupOperation as(String name) { + public AsBuilder foreignField(String name) { - Assert.hasText(name, "'As' must not be null or empty!"); - lookupOperation.as = new ExposedField(Fields.field(name), true); - return new LookupOperation(lookupOperation.from, lookupOperation.localField, lookupOperation.foreignField, - lookupOperation.as); + Assert.hasText(name, "'ForeignField' must not be null or empty"); + foreignField = Fields.field(name); + return this; } @Override - public AsBuilder foreignField(String name) { + public ForeignFieldBuilder localField(String name) { - Assert.hasText(name, "'ForeignField' must not be null or empty!"); - lookupOperation.foreignField = Fields.field(name); + Assert.hasText(name, "'LocalField' must not be null or empty"); + localField = Fields.field(name); return this; } @Override - public ForeignFieldBuilder localField(String name) { + public PipelineBuilder let(Let let) { + + Assert.notNull(let, "Let must not be null"); + this.let = let; + return this; + } + + @Override + public AsBuilder pipeline(AggregationPipeline pipeline) { - Assert.hasText(name, "'LocalField' must not be null or empty!"); - lookupOperation.localField = Fields.field(name); + Assert.notNull(pipeline, "Pipeline must not be null"); + this.pipeline = pipeline; return this; } + + @Override + public LookupOperation as(String name) { + + Assert.hasText(name, "'As' must not be null or empty"); + as = new ExposedField(Fields.field(name), true); + return new LookupOperation(from, localField, foreignField, let, pipeline, as); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index eb86fb1e5c..da1dbfc027 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,67 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; + import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the {@code $match}-operation. *

                    * We recommend to use the static factory method * {@link Aggregation#match(org.springframework.data.mongodb.core.query.Criteria)} instead of creating instances of this * class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/match/ + * * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Divya Srivastava * @since 1.3 + * @see MongoDB Aggregation Framework: + * $match */ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; + private final AggregationExpression expression; /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. - * + * * @param criteriaDefinition must not be {@literal null}. */ public MatchOperation(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "Criteria must not be null!"); + Assert.notNull(criteriaDefinition, "Criteria must not be null"); + this.criteriaDefinition = criteriaDefinition; + this.expression = null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @since 3.3 */ + public MatchOperation(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.criteriaDefinition = null; + this.expression = expression; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + return new Document(getOperator(), + context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject())); + } + @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject("$match", context.getMappedObject(criteriaDefinition.getCriteriaObject())); + public String getOperator() { + return "$match"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java new file mode 100644 index 0000000000..314f83fc7c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java @@ -0,0 +1,587 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Encapsulates the {@code $merge}-operation. + *

                    + * We recommend to use the {@link MergeOperationBuilder builder} via {@link MergeOperation#builder()} instead of + * creating instances of this class directly. + * + * @see MongoDB Documentation + * @author Christoph Strobl + * @since 3.0 + */ +public class MergeOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation { + + private final MergeOperationTarget into; + private final UniqueMergeId on; + private final @Nullable Let let; + private final @Nullable WhenDocumentsMatch whenMatched; + private final @Nullable WhenDocumentsDontMatch whenNotMatched; + + /** + * Create new instance of {@link MergeOperation}. + * + * @param into the target (collection and database) + * @param on the unique identifier. Can be {@literal null}. + * @param let exposed variables for {@link WhenDocumentsMatch#updateWith(Aggregation)}. Can be {@literal null}. + * @param whenMatched behavior if a result document matches an existing one in the target collection. Can be + * {@literal null}. + * @param whenNotMatched behavior if a result document does not match an existing one in the target collection. Can be + * {@literal null}. + */ + public MergeOperation(MergeOperationTarget into, UniqueMergeId on, @Nullable Let let, + @Nullable WhenDocumentsMatch whenMatched, @Nullable WhenDocumentsDontMatch whenNotMatched) { + + Assert.notNull(into, "Into must not be null Please provide a target collection"); + Assert.notNull(on, "On must not be null Use UniqueMergeId.id() instead"); + + this.into = into; + this.on = on; + this.let = let; + this.whenMatched = whenMatched; + this.whenNotMatched = whenNotMatched; + } + + /** + * Simplified form to apply all default options for {@code $merge} (including writing to a collection in the same + * database). + * + * @param collection the output collection within the same database. + * @return new instance of {@link MergeOperation}. + */ + public static MergeOperation mergeInto(String collection) { + return builder().intoCollection(collection).build(); + } + + /** + * Access the {@link MergeOperationBuilder builder API} to create a new instance of {@link MergeOperation}. + * + * @return new instance of {@link MergeOperationBuilder}. + */ + public static MergeOperationBuilder builder() { + return new MergeOperationBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (isJustCollection()) { + return new Document(getOperator(), into.collection); + } + + Document $merge = new Document(); + $merge.putAll(into.toDocument(context)); + + if (!on.isJustIdField()) { + $merge.putAll(on.toDocument(context)); + } + + if (let != null) { + $merge.append("let", let.toDocument(context).get("$let", Document.class).get("vars")); + } + + if (whenMatched != null) { + $merge.putAll(whenMatched.toDocument(context)); + } + + if (whenNotMatched != null) { + $merge.putAll(whenNotMatched.toDocument(context)); + } + + return new Document(getOperator(), $merge); + } + + @Override + public String getOperator() { + return "$merge"; + } + + @Override + public ExposedFields getFields() { + + if (let == null) { + return ExposedFields.from(); + } + + return ExposedFields.synthetic(Fields.fields(let.getVariableNames())); + } + + @Override + public boolean inheritsFields() { + return true; + } + + /** + * @return true if nothing more than the collection is specified. + */ + private boolean isJustCollection() { + return into.isTargetingSameDatabase() && on.isJustIdField() && let == null && whenMatched == null + && whenNotMatched == null; + } + + /** + * Value object representing the unique id used during the merge operation to identify duplicates in the target + * collection. + * + * @author Christoph Strobl + */ + public static class UniqueMergeId { + + private static final UniqueMergeId ID = new UniqueMergeId(Collections.emptyList()); + + private final Collection uniqueIdentifier; + + private UniqueMergeId(Collection uniqueIdentifier) { + this.uniqueIdentifier = uniqueIdentifier; + } + + public static UniqueMergeId ofIdFields(String... fields) { + + Assert.noNullElements(fields, "Fields must not contain null values"); + + if (ObjectUtils.isEmpty(fields)) { + return id(); + } + + return new UniqueMergeId(Arrays.asList(fields)); + } + + /** + * Merge Documents by using the MongoDB {@literal _id} field. + * + * @return never {@literal null}. + */ + public static UniqueMergeId id() { + return ID; + } + + boolean isJustIdField() { + return this.equals(ID); + } + + Document toDocument(AggregationOperationContext context) { + + List mappedOn = uniqueIdentifier.stream().map(context::getReference).map(FieldReference::getRaw) + .collect(Collectors.toList()); + return new Document("on", mappedOn.size() == 1 ? mappedOn.iterator().next() : mappedOn); + } + } + + /** + * Value Object representing the {@code into} field of a {@code $merge} aggregation stage.
                    + * If not stated explicitly via {@link MergeOperationTarget#inDatabase(String)} the {@literal collection} is created + * in the very same {@literal database}. In this case {@code into} is just a single String holding the collection + * name.
                    + * + *

                    +	 *     into: "target-collection-name"
                    +	 * 
                    + * + * If the collection needs to be in a different database {@code into} will be a {@link Document} like the following + * + *
                    +	 * {
                    +	 * 	into: {}
                    +	 * }
                    +	 * 
                    + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MergeOperationTarget { + + private final @Nullable String database; + private final String collection; + + private MergeOperationTarget(@Nullable String database, String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + this.database = database; + this.collection = collection; + } + + /** + * @param collection The output collection results will be stored in. Must not be {@literal null}. + * @return new instance of {@link MergeOperationTarget}. + */ + public static MergeOperationTarget collection(String collection) { + return new MergeOperationTarget(null, collection); + } + + /** + * Optionally specify the target database if different from the source one. + * + * @param database must not be {@literal null}. + * @return new instance of {@link MergeOperationTarget}. + */ + public MergeOperationTarget inDatabase(String database) { + return new MergeOperationTarget(database, collection); + } + + boolean isTargetingSameDatabase() { + return !StringUtils.hasText(database); + } + + Document toDocument(AggregationOperationContext context) { + + return new Document("into", + !StringUtils.hasText(database) ? collection : new Document("db", database).append("coll", collection)); + } + } + + /** + * Value Object specifying how to deal with a result document that matches an existing document in the collection + * based on the fields of the {@code on} property describing the unique identifier. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class WhenDocumentsMatch { + + private final Object value; + + private WhenDocumentsMatch(Object value) { + this.value = value; + } + + public static WhenDocumentsMatch whenMatchedOf(String value) { + return new WhenDocumentsMatch(value); + } + + /** + * Replace the existing document in the output collection with the matching results document. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch replaceDocument() { + return whenMatchedOf("replace"); + } + + /** + * Keep the existing document in the output collection. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch keepExistingDocument() { + return whenMatchedOf("keepExisting"); + } + + /** + * Merge the matching documents. Please see the MongoDB reference documentation for details. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch mergeDocuments() { + return whenMatchedOf("merge"); + } + + /** + * Stop and fail the aggregation operation. Does not revert already performed changes on previous documents. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch failOnMatch() { + return whenMatchedOf("fail"); + } + + /** + * Use an {@link Aggregation} to update the document in the collection. Please see the MongoDB reference + * documentation for details. + * + * @param aggregation must not be {@literal null}. + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch updateWith(Aggregation aggregation) { + return new WhenDocumentsMatch(aggregation); + } + + /** + * Use an aggregation pipeline to update the document in the collection. Please see the MongoDB reference + * documentation for details. + * + * @param aggregationPipeline must not be {@literal null}. + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch updateWith(List aggregationPipeline) { + return new WhenDocumentsMatch(aggregationPipeline); + } + + Document toDocument(AggregationOperationContext context) { + + if (value instanceof Aggregation aggregation) { + return new Document("whenMatched", aggregation.toPipeline(context)); + } + + return new Document("whenMatched", value); + } + } + + /** + * Value Object specifying how to deal with a result document that do not match an existing document in the collection + * based on the fields of the {@code on} property describing the unique identifier. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class WhenDocumentsDontMatch { + + private final String value; + + private WhenDocumentsDontMatch(String value) { + + Assert.notNull(value, "Value must not be null"); + + this.value = value; + } + + /** + * Factory method creating {@link WhenDocumentsDontMatch} from a {@code value} literal. + * + * @param value must not be {@literal null}. + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch whenNotMatchedOf(String value) { + return new WhenDocumentsDontMatch(value); + } + + /** + * Insert the document into the output collection. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch insertNewDocument() { + return whenNotMatchedOf("insert"); + } + + /** + * Discard the document - do not insert the document into the output collection. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch discardDocument() { + return whenNotMatchedOf("discard"); + } + + /** + * Stop and fail the aggregation operation. Does not revert already performed changes on previous documents. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch failWhenNotMatch() { + return whenNotMatchedOf("fail"); + } + + public Document toDocument(AggregationOperationContext context) { + return new Document("whenNotMatched", value); + } + } + + /** + * Builder API to construct a {@link MergeOperation}. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MergeOperationBuilder { + + private String collection; + private @Nullable String database; + private UniqueMergeId id = UniqueMergeId.id(); + private @Nullable Let let; + private @Nullable WhenDocumentsMatch whenMatched; + private @Nullable WhenDocumentsDontMatch whenNotMatched; + + public MergeOperationBuilder() {} + + /** + * Required output collection name to store results to. + * + * @param collection must not be {@literal null} nor empty. + * @return this. + */ + public MergeOperationBuilder intoCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + this.collection = collection; + return this; + } + + /** + * Optionally define a target database if different from the current one. + * + * @param database must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder inDatabase(String database) { + + this.database = database; + return this; + } + + /** + * Define the target to store results in. + * + * @param into must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder into(MergeOperationTarget into) { + + this.database = into.database; + this.collection = into.collection; + return this; + } + + /** + * Define the target to store results in. + * + * @param target must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder target(MergeOperationTarget target) { + return into(target); + } + + /** + * Appends a single field or multiple fields that act as a unique identifier for a document. The identifier + * determines if a results document matches an already existing document in the output collection.
                    + * The aggregation results documents must contain the field(s) specified via {@code on}, unless it's the {@code _id} + * field. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder on(String... fields) { + return id(UniqueMergeId.ofIdFields(fields)); + } + + /** + * Set the identifier that determines if a results document matches an already existing document in the output + * collection. + * + * @param id must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder id(UniqueMergeId id) { + + this.id = id; + return this; + } + + /** + * Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update + * aggregation}. + * + * @param let the variable expressions + * @return this. + */ + public MergeOperationBuilder let(Let let) { + + this.let = let; + return this; + } + + /** + * Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update + * aggregation}. + * + * @param let the variable expressions + * @return this. + */ + public MergeOperationBuilder exposeVariablesOf(Let let) { + return let(let); + } + + /** + * The action to take place when documents already exist in the target collection. + * + * @param whenMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenMatched(WhenDocumentsMatch whenMatched) { + + this.whenMatched = whenMatched; + return this; + } + + /** + * The action to take place when documents already exist in the target collection. + * + * @param whenMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsMatch(WhenDocumentsMatch whenMatched) { + return whenMatched(whenMatched); + } + + /** + * The {@link Aggregation action} to take place when documents already exist in the target collection. + * + * @param aggregation must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsMatchApply(Aggregation aggregation) { + return whenMatched(WhenDocumentsMatch.updateWith(aggregation)); + } + + /** + * The action to take place when documents do not already exist in the target collection. + * + * @param whenNotMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenNotMatched(WhenDocumentsDontMatch whenNotMatched) { + + this.whenNotMatched = whenNotMatched; + return this; + } + + /** + * The action to take place when documents do not already exist in the target collection. + * + * @param whenNotMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsDontMatch(WhenDocumentsDontMatch whenNotMatched) { + return whenNotMatched(whenNotMatched); + } + + /** + * @return new instance of {@link MergeOperation}. + */ + public MergeOperation build() { + return new MergeOperation(new MergeOperationTarget(database, collection), id, let, whenMatched, whenNotMatched); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java new file mode 100644 index 0000000000..c553a7be02 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java @@ -0,0 +1,101 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collection; + +import org.bson.Document; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExpressionFieldReference; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.util.Assert; + +/** + * {@link AggregationOperationContext} that delegates {@link FieldReference} resolution and mapping to a parent one, but + * assures {@link FieldReference} get converted into {@link ExpressionFieldReference} using {@code $$} to ref an inner + * variable. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 1.10 + */ +class NestedDelegatingExpressionAggregationOperationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + private final Collection inners; + + /** + * Creates new {@link NestedDelegatingExpressionAggregationOperationContext}. + * + * @param referenceContext must not be {@literal null}. + */ + NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext, + Collection inners) { + + Assert.notNull(referenceContext, "Reference context must not be null"); + this.delegate = referenceContext; + this.inners = inners; + } + + @Override + public Document getMappedObject(Document document) { + return delegate.getMappedObject(document); + } + + @Override + public Document getMappedObject(Document document, Class type) { + return delegate.getMappedObject(document, type); + } + + @Override + public FieldReference getReference(Field field) { + + FieldReference reference = delegate.getReference(field); + return isInnerVariableReference(field) ? new ExpressionFieldReference(delegate.getReference(field)) : reference; + } + + private boolean isInnerVariableReference(Field field) { + + if (inners.isEmpty()) { + return false; + } + + for (Field inner : inners) { + if (inner.getName().equals(field.getName()) + || (field.getTarget().contains(".") && field.getTarget().startsWith(inner.getName()))) { + return true; + } + } + + return false; + } + + @Override + public FieldReference getReference(String name) { + return new ExpressionFieldReference(delegate.getReference(name)); + } + + @Override + public Fields getFields(Class type) { + return delegate.getFields(type); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java new file mode 100644 index 0000000000..25189241b7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java @@ -0,0 +1,532 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Gateway for + * object + * expression operators. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ObjectOperators { + + /** + * Take the value referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ObjectOperatorFactory}. + */ + public static ObjectOperatorFactory valueOf(String fieldReference) { + return new ObjectOperatorFactory(Fields.field(fieldReference)); + } + + /** + * Take the value provided by the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ObjectOperatorFactory}. + */ + public static ObjectOperatorFactory valueOf(AggregationExpression expression) { + return new ObjectOperatorFactory(expression); + } + + /** + * Use the value from the given {@link SystemVariable} as input for the target {@link AggregationExpression expression}. + * + * @param variable the {@link SystemVariable} to use (eg. {@link SystemVariable#ROOT}. + * @return new instance of {@link ObjectOperatorFactory}. + * @since 4.2 + */ + public static ObjectOperatorFactory valueOf(SystemVariable variable) { + return new ObjectOperatorFactory(Fields.field(variable.getName(), variable.getTarget())); + } + + /** + * Get the value of the field with given name from the {@literal $$CURRENT} object. + * Short version for {@code ObjectOperators.valueOf("$$CURRENT").getField(fieldName)}. + * + * @param fieldName the field name. + * @return new instance of {@link AggregationExpression}. + * @since 4.2 + */ + public static AggregationExpression getValueOf(String fieldName) { + return new ObjectOperatorFactory(SystemVariable.CURRENT).getField(fieldName); + } + + /** + * Set the value of the field with given name on the {@literal $$CURRENT} object. + * Short version for {@code ObjectOperators.valueOf($$CURRENT).setField(fieldName).toValue(value)}. + * + * @param fieldName the field name. + * @return new instance of {@link AggregationExpression}. + * @since 4.2 + */ + public static AggregationExpression setValueTo(String fieldName, Object value) { + return new ObjectOperatorFactory(SystemVariable.CURRENT).setField(fieldName).toValue(value); + } + + /** + * @author Christoph Strobl + */ + public static class ObjectOperatorFactory { + + private final Object value; + + /** + * Creates new {@link ObjectOperatorFactory} for given {@literal value}. + * + * @param value must not be {@literal null}. + */ + public ObjectOperatorFactory(Object value) { + + Assert.notNull(value, "Value must not be null"); + + this.value = value; + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and uses + * {@literal $mergeObjects} as an accumulator within the {@literal $group} stage.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects merge() { + return MergeObjects.merge(value); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * given values (documents or mapped objects) into a single document.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWith(Object... values) { + return merge().mergeWith(values); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * values of the given {@link Field field references} into a single document.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(String... fieldReferences) { + return merge().mergeWithValuesOf(fieldReferences); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * result values of the given {@link Aggregation expressions} into a single document.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(AggregationExpression... expression) { + return merge().mergeWithValuesOf(expression); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the associated value and converts it to an + * array of {@link Document documents} that contain two fields {@literal k} and {@literal v} each.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @since 2.1 + */ + public ObjectToArray toArray() { + return ObjectToArray.toArray(value); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public GetField getField(String fieldName) { + return GetField.getField(Fields.field(fieldName)).of(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public SetField setField(String fieldName) { + return SetField.field(Fields.field(fieldName)).input(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public AggregationExpression removeField(String fieldName) { + return SetField.field(fieldName).input(value).toValue(SystemVariable.REMOVE); + } + } + + /** + * {@link AggregationExpression} for {@code $mergeObjects} that combines multiple documents into a single document. + *
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/mergeObjects/ + * @since 2.1 + */ + public static class MergeObjects extends AbstractAggregationExpression { + + private MergeObjects(Object value) { + super(value); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes given values and combines them into a single + * document.
                    + * + * @param values must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects merge(Object... values) { + return new MergeObjects(Arrays.asList(values)); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the given {@link Field field references} and + * combines them into a single document. + * + * @param fieldReferences must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects mergeValuesOf(String... fieldReferences) { + return merge(Arrays.stream(fieldReferences).map(Fields::field).toArray()); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the result of the given {@link Aggregation + * expressions} and combines them into a single document. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects mergeValuesOf(AggregationExpression... expressions) { + return merge(expressions); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given {@link Field field references}. + * + * @param fieldReferences must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(String... fieldReferences) { + return mergeWith(Arrays.stream(fieldReferences).map(Fields::field).toArray()); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given {@link AggregationExpression + * expressions}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(AggregationExpression... expression) { + return mergeWith(expression); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given values (documents or mapped objects). + * + * @param values must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWith(Object... values) { + return new MergeObjects(append(Arrays.asList(values))); + } + + @Override + public Document toDocument(Object value, AggregationOperationContext context) { + return super.toDocument(potentiallyExtractSingleValue(value), context); + } + + @SuppressWarnings("unchecked") + private Object potentiallyExtractSingleValue(Object value) { + + if (value instanceof Collection collection && collection.size() == 1) { + return collection.iterator().next(); + } + return value; + } + + @Override + protected String getMongoMethod() { + return "$mergeObjects"; + } + } + + /** + * {@link AggregationExpression} for {@code $objectToArray} that converts a document to an array of {@link Document + * documents} that each contains two fields {@literal k} and {@literal v}.
                    + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/objectToArray/ + * @since 2.1 + */ + public static class ObjectToArray extends AbstractAggregationExpression { + + private ObjectToArray(Object value) { + super(value); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the value pointed to by given {@link Field + * fieldReference} and converts it to an array. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray valueOfToArray(String fieldReference) { + return toArray(Fields.field(fieldReference)); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the result value of the given + * {@link AggregationExpression expression} and converts it to an array. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray valueOfToArray(AggregationExpression expression) { + return toArray(expression); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the given value and converts it to an array. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray toArray(Object value) { + return new ObjectToArray(value); + } + + @Override + protected String getMongoMethod() { + return "$objectToArray"; + } + } + + /** + * {@link AggregationExpression} for {@code $getField}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class GetField extends AbstractAggregationExpression { + + protected GetField(Object value) { + super(value); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given {@code fieldName}. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public static GetField getField(String fieldName) { + return new GetField(Collections.singletonMap("field", fieldName)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given {@link Field}. + * + * @param field must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public static GetField getField(Field field) { + return new GetField(Collections.singletonMap("field", field)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public GetField of(String fieldRef) { + return of(Fields.field(fieldRef)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public GetField of(AggregationExpression expression) { + return of((Object) expression); + } + + private GetField of(Object fieldRef) { + return new GetField(append("input", fieldRef)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if(isArgumentMap() && get("field") instanceof Field field) { + return new GetField(append("field", context.getReference(field).getRaw())).toDocument(context); + } + return super.toDocument(context); + } + + @Override + protected String getMongoMethod() { + return "$getField"; + } + } + + /** + * {@link AggregationExpression} for {@code $setField}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SetField extends AbstractAggregationExpression { + + protected SetField(Object value) { + super(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@code fieldName}. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public static SetField field(String fieldName) { + return new SetField(Collections.singletonMap("field", fieldName)); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input {@link Field}. + * + * @param field must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public static SetField field(Field field) { + return new SetField(Collections.singletonMap("field", field)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given input + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public SetField input(String fieldRef) { + return input(Fields.field(fieldRef)); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField input(AggregationExpression expression) { + return input((Object) expression); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + private SetField input(Object fieldRef) { + return new SetField(append("input", fieldRef)); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value} using {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField toValueOf(String fieldReference) { + return toValue(Fields.field(fieldReference)); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value} using + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField toValueOf(AggregationExpression expression) { + return toValue(expression); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value}. + * + * @param value + * @return new instance of {@link SetField}. + */ + public SetField toValue(Object value) { + return new SetField(append("value", value)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + if(get("field") instanceof Field field) { + return new SetField(append("field", context.getReference(field).getRaw())).toDocument(context); + } + return super.toDocument(context); + } + + @Override + protected String getMongoMethod() { + return "$setField"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java new file mode 100644 index 0000000000..51520f0868 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java @@ -0,0 +1,254 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Encapsulates the {@code $out}-operation. + *

                    + * We recommend to use the static factory method {@link Aggregation#out(String)} instead of creating instances of this + * class directly. + * + * @author Nikolay Bogdanov + * @author Christoph Strobl + * @see MongoDB Aggregation Framework: + * $out + */ +public class OutOperation implements AggregationOperation { + + private final @Nullable String databaseName; + private final String collectionName; + private final @Nullable Document uniqueKey; + private final @Nullable OutMode mode; + + /** + * @param outCollectionName Collection name to export the results. Must not be {@literal null}. + */ + public OutOperation(String outCollectionName) { + this(null, outCollectionName, null, null); + } + + /** + * @param databaseName Optional database name the target collection is located in. Can be {@literal null}. + * @param collectionName Collection name to export the results. Must not be {@literal null}. Can be {@literal null}. + * @param uniqueKey Optional unique key spec identify a document in the to collection for replacement or merge. + * @param mode The mode for merging the aggregation pipeline output with the target collection. Can be + * {@literal null}. {@literal null}. + * @since 2.2 + */ + private OutOperation(@Nullable String databaseName, String collectionName, @Nullable Document uniqueKey, + @Nullable OutMode mode) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + this.databaseName = databaseName; + this.collectionName = collectionName; + this.uniqueKey = uniqueKey; + this.mode = mode; + } + + /** + * Optionally specify the database of the target collection.
                    + * NOTE: Requires MongoDB 4.2 or later. + * + * @param database can be {@literal null}. Defaulted to aggregation target database. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation in(@Nullable String database) { + return new OutOperation(database, collectionName, uniqueKey, mode); + } + + /** + * Optionally specify the field that uniquely identifies a document in the target collection.
                    + * For convenience the given {@literal key} can either be a single field name or the Json representation of a key + * {@link Document}. + * + *

                    +	 *
                    +	 * // {
                    +	 * //    "field-1" : 1
                    +	 * // }
                    +	 * .uniqueKey("field-1")
                    +	 *
                    +	 * // {
                    +	 * //    "field-1" : 1,
                    +	 * //    "field-2" : 1
                    +	 * // }
                    +	 * .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}")
                    +	 * 
                    + * + * NOTE: Requires MongoDB 4.2 or later. + * + * @param key can be {@literal null}. Server uses {@literal _id} when {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation uniqueKey(@Nullable String key) { + + Document uniqueKey = key == null ? null : BsonUtils.toDocumentOrElse(key, it -> new Document(it, 1)); + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Optionally specify the fields that uniquely identifies a document in the target collection.
                    + * + *
                    +	 *
                    +	 * // {
                    +	 * //    "field-1" : 1
                    +	 * //    "field-2" : 1
                    +	 * // }
                    +	 * .uniqueKeyOf(Arrays.asList("field-1", "field-2"))
                    +	 * 
                    + * + * NOTE: Requires MongoDB 4.2 or later. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation uniqueKeyOf(Iterable fields) { + + Assert.notNull(fields, "Fields must not be null"); + + Document uniqueKey = new Document(); + fields.forEach(it -> uniqueKey.append(it, 1)); + + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Specify how to merge the aggregation output with the target collection.
                    + * NOTE: Requires MongoDB 4.2 or later. + * + * @param mode must not be {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation mode(OutMode mode) { + + Assert.notNull(mode, "Mode must not be null"); + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Replace the target collection.
                    + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#REPLACE_COLLECTION + * @since 2.2 + */ + public OutOperation replaceCollection() { + return mode(OutMode.REPLACE_COLLECTION); + } + + /** + * Replace/Upsert documents in the target collection.
                    + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#REPLACE + * @since 2.2 + */ + public OutOperation replaceDocuments() { + return mode(OutMode.REPLACE); + } + + /** + * Insert documents to the target collection.
                    + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#INSERT + * @since 2.2 + */ + public OutOperation insertDocuments() { + return mode(OutMode.INSERT); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (!requiresMongoDb42Format()) { + return new Document("$out", collectionName); + } + + Assert.state(mode != null, "Mode must not be null"); + + Document $out = new Document("to", collectionName) // + .append("mode", mode.getMongoMode()); + + if (StringUtils.hasText(databaseName)) { + $out.append("db", databaseName); + } + + if (uniqueKey != null) { + $out.append("uniqueKey", uniqueKey); + } + + return new Document(getOperator(), $out); + } + + @Override + public String getOperator() { + return "$out"; + } + + private boolean requiresMongoDb42Format() { + return StringUtils.hasText(databaseName) || mode != null || uniqueKey != null; + } + + /** + * The mode for merging the aggregation pipeline output. + * + * @author Christoph Strobl + * @since 2.2 + */ + public enum OutMode { + + /** + * Write documents to the target collection. Errors if a document same uniqueKey already exists. + */ + INSERT("insertDocuments"), + + /** + * Update on any document in the target collection with the same uniqueKey. + */ + REPLACE("replaceDocuments"), + + /** + * Replaces the to collection with the output from the aggregation pipeline. Cannot be in a different database. + */ + REPLACE_COLLECTION("replaceCollection"); + + private final String mode; + + OutMode(String mode) { + this.mode = mode; + } + + public String getMongoMode() { + return mode; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java new file mode 100644 index 0000000000..9524171fed --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java @@ -0,0 +1,146 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; + +/** + * {@link AggregationOperationContext} implementation prefixing non-command keys on root level with the given prefix. + * Useful when mapping fields to domain specific types while having to prefix keys for query purpose. + *
                    + * Fields to be excluded from prefixing my be added to a {@literal denylist}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class PrefixingDelegatingAggregationOperationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + private final String prefix; + private final Set denylist; + + public PrefixingDelegatingAggregationOperationContext(AggregationOperationContext delegate, String prefix) { + this(delegate, prefix, Collections.emptySet()); + } + + public PrefixingDelegatingAggregationOperationContext(AggregationOperationContext delegate, String prefix, + Collection denylist) { + + this.delegate = delegate; + this.prefix = prefix; + this.denylist = new HashSet<>(denylist); + } + + @Override + public Document getMappedObject(Document document) { + return doPrefix(delegate.getMappedObject(document)); + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return doPrefix(delegate.getMappedObject(document, type)); + } + + @Override + public FieldReference getReference(Field field) { + return delegate.getReference(field); + } + + @Override + public FieldReference getReference(String name) { + return delegate.getReference(name); + } + + @Override + public Fields getFields(Class type) { + return delegate.getFields(type); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } + + @SuppressWarnings("unchecked") + private Document doPrefix(Document source) { + + Document result = new Document(); + for (Map.Entry entry : source.entrySet()) { + + String key = prefixKey(entry.getKey()); + Object value = entry.getValue(); + + if (entry.getValue() instanceof Collection) { + + Collection sourceCollection = (Collection) entry.getValue(); + value = prefixCollection(sourceCollection); + } + + result.append(key, value); + } + return result; + } + + private String prefixKey(String key) { + return (key.startsWith("$") || isDenied(key)) ? key : (prefix + "." + key); + } + + private Object prefixCollection(Collection sourceCollection) { + + List prefixed = new ArrayList<>(sourceCollection.size()); + + for (Object o : sourceCollection) { + if (o instanceof Document document) { + prefixed.add(doPrefix(document)); + } else { + prefixed.add(o); + } + } + + return prefixed; + } + + private boolean isDenied(String key) { + + if (denylist.contains(key)) { + return true; + } + + if (!key.contains(".")) { + return false; + } + + for (String denied : denylist) { + if (key.startsWith(denied + ".")) { + return true; + } + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java index 68947352af..35db2214f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,17 +17,21 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the aggregation framework {@code $project}-operation. *

                    @@ -36,19 +40,21 @@ *

                    * We recommend to use the static factory method {@link Aggregation#project(Fields)} instead of creating instances of * this class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/project/ + * * @author Tobias Trelle * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 + * @see MongoDB Aggregation Framework: + * $project */ public class ProjectionOperation implements FieldsExposingAggregationOperation { private static final List NONE = Collections.emptyList(); - private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb " - + "aggregation framework only support the exclusion of the %s field!"; + private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed; Projections by the mongodb" + + " aggregation framework only support the exclusion of the %s field"; private final List projections; @@ -61,24 +67,34 @@ public ProjectionOperation() { /** * Creates a new {@link ProjectionOperation} including the given {@link Fields}. - * + * * @param fields must not be {@literal null}. */ public ProjectionOperation(Fields fields) { this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields)); } + /** + * Creates a new {@link ProjectionOperation} including all top level fields of the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @since 2.2 + */ + public ProjectionOperation(Class type) { + this(NONE, Collections.singletonList(new TypeProjection(type))); + } + /** * Copy constructor to allow building up {@link ProjectionOperation} instances from already existing * {@link Projection}s. - * + * * @param current must not be {@literal null}. * @param projections must not be {@literal null}. */ private ProjectionOperation(List current, List projections) { - Assert.notNull(current, "Current projections must not be null!"); - Assert.notNull(projections, "Projections must not be null!"); + Assert.notNull(current, "Current projections must not be null"); + Assert.notNull(projections, "Projections must not be null"); this.projections = new ArrayList(current.size() + projections.size()); this.projections.addAll(current); @@ -87,31 +103,31 @@ private ProjectionOperation(List current, List projections = this.projections.isEmpty() ? Collections. emptyList() : this.projections - .subList(0, this.projections.size() - 1); - return new ProjectionOperation(projections, Arrays.asList(projection)); + List projections = this.projections.isEmpty() ? Collections. emptyList() + : this.projections.subList(0, this.projections.size() - 1); + return new ProjectionOperation(projections, Collections.singletonList(projection)); } /** * Creates a new {@link ProjectionOperationBuilder} to define a projection for the field with the given name. - * + * * @param name must not be {@literal null} or empty. * @return */ @@ -129,24 +145,19 @@ public ProjectionOperationBuilder and(AggregationExpression expression) { /** * Excludes the given fields from the projection. - * + * * @param fieldNames must not be {@literal null}. * @return */ public ProjectionOperation andExclude(String... fieldNames) { - for (String fieldName : fieldNames) { - Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName), - String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID)); - } - List excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false); return new ProjectionOperation(this.projections, excludeProjections); } /** * Includes the given fields into the projection. - * + * * @param fieldNames must not be {@literal null}. * @return */ @@ -158,7 +169,7 @@ public ProjectionOperation andInclude(String... fieldNames) { /** * Includes the given fields into the projection. - * + * * @param fields must not be {@literal null}. * @return */ @@ -166,10 +177,48 @@ public ProjectionOperation andInclude(Fields fields) { return new ProjectionOperation(this.projections, FieldProjection.from(fields, true)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() + /** + * Includes the current {@link ProjectionOperation} as an array with given name.
                    + * If you want to specify array values directly use {@link #andArrayOf(Object...)}. + * + * @param name the target property name. + * @return new instance of {@link ProjectionOperation}. + * @since 2.2 */ + public ProjectionOperation asArray(String name) { + + return new ProjectionOperation(Collections.emptyList(), + Collections.singletonList(new ArrayProjection(Fields.field(name), (List) this.projections))); + } + + /** + * Includes the given values ({@link Field field references}, {@link AggregationExpression expression}, plain values) + * as an array.
                    + * The target property name needs to be set via {@link ArrayProjectionOperationBuilder#as(String)}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ArrayProjectionOperationBuilder}. + * @throws IllegalArgumentException if the required argument it {@literal null}. + * @since 2.2 + */ + public ArrayProjectionOperationBuilder andArrayOf(Object... values) { + + ArrayProjectionOperationBuilder builder = new ArrayProjectionOperationBuilder(this); + + for (Object value : values) { + + if (value instanceof Field field) { + builder.and(field); + } else if (value instanceof AggregationExpression aggregationExpression) { + builder.and(aggregationExpression); + } else { + builder.and(value); + } + } + + return builder; + } + @Override public ExposedFields getFields() { @@ -180,28 +229,37 @@ public ExposedFields getFields() { fields = fields == null ? ExposedFields.from(field) : fields.and(field); } - return fields; + return fields != null ? fields : ExposedFields.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { + public boolean inheritsFields() { - BasicDBObject fieldObject = new BasicDBObject(); + return projections.stream().filter(FieldProjection.class::isInstance) // + .map(FieldProjection.class::cast) // + .anyMatch(FieldProjection::isExcluded); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document fieldObject = new Document(); for (Projection projection : projections) { - fieldObject.putAll(projection.toDBObject(context)); + fieldObject.putAll(projection.toDocument(context)); } - return new BasicDBObject("$project", fieldObject); + return new Document(getOperator(), fieldObject); + } + + @Override + public String getOperator() { + return "$project"; } /** * Base class for {@link ProjectionOperationBuilder}s. - * + * * @author Thomas Darimont */ private static abstract class AbstractProjectionOperationBuilder implements AggregationOperation { @@ -211,40 +269,54 @@ private static abstract class AbstractProjectionOperationBuilder implements Aggr /** * Creates a new {@link AbstractProjectionOperationBuilder} fot the given value and {@link ProjectionOperation}. - * + * * @param value must not be {@literal null}. * @param operation must not be {@literal null}. */ public AbstractProjectionOperationBuilder(Object value, ProjectionOperation operation) { - Assert.notNull(value, "value must not be null or empty!"); - Assert.notNull(operation, "ProjectionOperation must not be null!"); + Assert.notNull(value, "value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); this.value = value; this.operation = operation; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { - return this.operation.toDBObject(context); + public Document toDocument(AggregationOperationContext context) { + return this.operation.toDocument(context); } /** * Returns the finally to be applied {@link ProjectionOperation} with the given alias. - * + * * @param alias will never be {@literal null} or empty. * @return */ public abstract ProjectionOperation as(String alias); + + /** + * Apply a conditional projection using {@link Cond}. + * + * @param cond must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public abstract ProjectionOperation applyCondition(Cond cond); + + /** + * Apply a conditional value replacement for {@literal null} values using {@link IfNull}. + * + * @param ifNull must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public abstract ProjectionOperation applyCondition(IfNull ifNull); } /** * An {@link ProjectionOperationBuilder} that is used for SpEL expression based projections. - * + * * @author Thomas Darimont */ public static class ExpressionProjectionOperationBuilder extends ProjectionOperationBuilder { @@ -255,10 +327,10 @@ public static class ExpressionProjectionOperationBuilder extends ProjectionOpera /** * Creates a new {@link ExpressionProjectionOperationBuilder} for the given value, {@link ProjectionOperation} and * parameters. - * + * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters parameters must not be {@literal null}. */ public ExpressionProjectionOperationBuilder(String expression, ProjectionOperation operation, Object[] parameters) { @@ -267,9 +339,6 @@ public ExpressionProjectionOperationBuilder(String expression, ProjectionOperati this.params = parameters.clone(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder#project(java.lang.String, java.lang.Object[]) - */ @Override public ProjectionOperationBuilder project(String operation, final Object... values) { @@ -278,7 +347,7 @@ public ProjectionOperationBuilder project(String operation, final Object... valu @Override protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values.length + 1); + List result = new ArrayList<>(values.length + 1); result.add(ExpressionProjection.toMongoExpression(context, ExpressionProjectionOperationBuilder.this.expression, ExpressionProjectionOperationBuilder.this.params)); result.addAll(Arrays.asList(values)); @@ -290,10 +359,6 @@ protected List getOperationArguments(AggregationOperationContext context return new ProjectionOperationBuilder(value, this.operation.and(operationProjection), operationProjection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#as(java.lang.String) - */ @Override public ProjectionOperation as(String alias) { @@ -303,7 +368,7 @@ public ProjectionOperation as(String alias) { /** * A {@link Projection} based on a SpEL expression. - * + * * @author Thomas Darimont * @author Oliver Gierke */ @@ -316,7 +381,7 @@ static class ExpressionProjection extends Projection { /** * Creates a new {@link ExpressionProjection} for the given field, SpEL expression and parameters. - * + * * @param field must not be {@literal null}. * @param expression must not be {@literal null} or empty. * @param parameters must not be {@literal null}. @@ -325,23 +390,20 @@ public ExpressionProjection(Field field, String expression, Object[] parameters) super(field); - Assert.hasText(expression, "Expression must not be null!"); - Assert.notNull(parameters, "Parameters must not be null!"); + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); this.expression = expression; this.params = parameters.clone(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject(getExposedField().getName(), toMongoExpression(context, expression, params)); + public Document toDocument(AggregationOperationContext context) { + return new Document(getExposedField().getName(), toMongoExpression(context, expression, params)); } - protected static Object toMongoExpression(AggregationOperationContext context, String expression, Object[] params) { + protected static Object toMongoExpression(AggregationOperationContext context, String expression, + Object[] params) { return TRANSFORMER.transform(expression, context, params); } } @@ -349,27 +411,29 @@ protected static Object toMongoExpression(AggregationOperationContext context, S /** * Builder for {@link ProjectionOperation}s on a field. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl */ public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder { - private static final String NUMBER_NOT_NULL = "Number must not be null!"; - private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null!"; + private static final String NUMBER_NOT_NULL = "Number must not be null"; + private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null"; - private final String name; - private final OperationProjection previousProjection; + private final @Nullable String name; + private final @Nullable OperationProjection previousProjection; /** * Creates a new {@link ProjectionOperationBuilder} for the field with the given name on top of the given * {@link ProjectionOperation}. - * + * * @param name must not be {@literal null} or empty. * @param operation must not be {@literal null}. * @param previousProjection the previous operation projection, may be {@literal null}. */ - public ProjectionOperationBuilder(String name, ProjectionOperation operation, OperationProjection previousProjection) { + public ProjectionOperationBuilder(String name, ProjectionOperation operation, + @Nullable OperationProjection previousProjection) { super(name, operation); this.name = name; @@ -379,13 +443,13 @@ public ProjectionOperationBuilder(String name, ProjectionOperation operation, Op /** * Creates a new {@link ProjectionOperationBuilder} for the field with the given value on top of the given * {@link ProjectionOperation}. - * + * * @param value * @param operation * @param previousProjection */ protected ProjectionOperationBuilder(Object value, ProjectionOperation operation, - OperationProjection previousProjection) { + @Nullable OperationProjection previousProjection) { super(value, operation); @@ -396,29 +460,29 @@ protected ProjectionOperationBuilder(Object value, ProjectionOperation operation /** * Projects the result of the previous operation onto the current field. Will automatically add an exclusion for * {@code _id} as what would be held in it by default will now go into the field just projected into. - * + * * @return */ public ProjectionOperation previousOperation() { return this.operation.andExclude(Fields.UNDERSCORE_ID) // - .and(new PreviousOperationProjection(name)); + .and(new PreviousOperationProjection(getRequiredName())); } /** * Defines a nested field binding for the current field. - * + * * @param fields must not be {@literal null}. * @return */ public ProjectionOperation nested(Fields fields) { - return this.operation.and(new NestedFieldProjection(name, fields)); + return this.operation.and(new NestedFieldProjection(getRequiredName(), fields)); } /** * Allows to specify an alias for the previous projection operation. - * - * @param string + * + * @param alias * @return */ @Override @@ -429,15 +493,29 @@ public ProjectionOperation as(String alias) { } if (value instanceof AggregationExpression) { - return this.operation.and(new ExpressionProjection(Fields.field(alias), (AggregationExpression) value)); + return this.operation.and(new ExpressionProjection(Fields.field(alias, alias), (AggregationExpression) value)); } - return this.operation.and(new FieldProjection(Fields.field(alias, name), null)); + return this.operation.and(new FieldProjection(Fields.field(alias, getRequiredName()), null)); + } + + @Override + public ProjectionOperation applyCondition(Cond cond) { + + Assert.notNull(cond, "ConditionalOperator must not be null"); + return this.operation.and(new ExpressionProjection(Fields.field(getRequiredName()), cond)); + } + + @Override + public ProjectionOperation applyCondition(IfNull ifNull) { + + Assert.notNull(ifNull, "IfNullOperator must not be null"); + return this.operation.and(new ExpressionProjection(Fields.field(getRequiredName()), ifNull)); } /** * Generates an {@code $add} expression that adds the given number to the previously mentioned field. - * + * * @param number * @return */ @@ -449,32 +527,32 @@ public ProjectionOperationBuilder plus(Number number) { /** * Generates an {@code $add} expression that adds the value of the given field to the previously mentioned field. - * + * * @param fieldReference * @return */ public ProjectionOperationBuilder plus(String fieldReference) { - Assert.notNull(fieldReference, "Field reference must not be null!"); + Assert.notNull(fieldReference, "Field reference must not be null"); return project("add", Fields.field(fieldReference)); } /** * Generates an {@code $subtract} expression that subtracts the given number to the previously mentioned field. - * + * * @param number * @return */ public ProjectionOperationBuilder minus(Number number) { - Assert.notNull(number, "Number must not be null!"); + Assert.notNull(number, "Number must not be null"); return project("subtract", number); } /** * Generates an {@code $subtract} expression that subtracts the value of the given field to the previously mentioned * field. - * + * * @param fieldReference * @return */ @@ -484,9 +562,23 @@ public ProjectionOperationBuilder minus(String fieldReference) { return project("subtract", Fields.field(fieldReference)); } + /** + * Generates an {@code $subtract} expression that subtracts the result of the given {@link AggregationExpression} + * from the previously mentioned field. + * + * @param expression must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder minus(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return project("subtract", expression); + } + /** * Generates an {@code $multiply} expression that multiplies the given number with the previously mentioned field. - * + * * @param number * @return */ @@ -499,7 +591,7 @@ public ProjectionOperationBuilder multiply(Number number) { /** * Generates an {@code $multiply} expression that multiplies the value of the given field with the previously * mentioned field. - * + * * @param fieldReference * @return */ @@ -509,23 +601,37 @@ public ProjectionOperationBuilder multiply(String fieldReference) { return project("multiply", Fields.field(fieldReference)); } + /** + * Generates an {@code $multiply} expression that multiplies the previously with the result of the + * {@link AggregationExpression}. mentioned field. + * + * @param expression must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder multiply(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return project("multiply", expression); + } + /** * Generates an {@code $divide} expression that divides the previously mentioned field by the given number. - * + * * @param number * @return */ public ProjectionOperationBuilder divide(Number number) { Assert.notNull(number, FIELD_REFERENCE_NOT_NULL); - Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!"); + Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero"); return project("divide", number); } /** * Generates an {@code $divide} expression that divides the value of the given field by the previously mentioned * field. - * + * * @param fieldReference * @return */ @@ -535,24 +641,38 @@ public ProjectionOperationBuilder divide(String fieldReference) { return project("divide", Fields.field(fieldReference)); } + /** + * Generates an {@code $divide} expression that divides the value of the previously mentioned by the result of the + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder divide(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return project("divide", expression); + } + /** * Generates an {@code $mod} expression that divides the previously mentioned field by the given number and returns * the remainder. - * + * * @param number * @return */ public ProjectionOperationBuilder mod(Number number) { Assert.notNull(number, NUMBER_NOT_NULL); - Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!"); + Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero"); return project("mod", number); } /** * Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field * and returns the remainder. - * + * * @param fieldReference * @return */ @@ -562,22 +682,607 @@ public ProjectionOperationBuilder mod(String fieldReference) { return project("mod", Fields.field(fieldReference)); } + /** + * Generates an {@code $mod} expression that divides the value of the previously mentioned field by the result of + * the {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder mod(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return project("mod", expression); + } + + /** + * Generates a {@code $size} expression that returns the size of the array held by the given field.
                    + * + * @return never {@literal null}. + * @since 1.7 + */ public ProjectionOperationBuilder size() { return project("size"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Generates a {@code $cmp} expression (compare to) that compares the value of the field to a given value or field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder cmp(Object compareValue) { + return project("cmp", compareValue); + } + + /** + * Generates a {@code $eq} expression (equal) that compares the value of the field to a given value or field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder eq(Object compareValue) { + return project("eq", compareValue); + } + + /** + * Generates a {@code $gt} expression (greater than) that compares the value of the field to a given value or field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder gt(Object compareValue) { + return project("gt", compareValue); + } + + /** + * Generates a {@code $gte} expression (greater than equal) that compares the value of the field to a given value or + * field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder gte(Object compareValue) { + return project("gte", compareValue); + } + + /** + * Generates a {@code $lt} expression (less than) that compares the value of the field to a given value or field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder lt(Object compareValue) { + return project("lt", compareValue); + } + + /** + * Generates a {@code $lte} expression (less than equal) that compares the value of the field to a given value or + * field. + * + * @param compareValue the compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder lte(Object compareValue) { + return project("lte", compareValue); + } + + /** + * Generates a {@code $ne} expression (not equal) that compares the value of the field to a given value or field. + * + * @param compareValue compare value or a {@link Field} object. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder ne(Object compareValue) { + return project("ne", compareValue); + } + + /** + * Generates a {@code $slice} expression that returns a subset of the array held by the given field.
                    + * If {@literal n} is positive, $slice returns up to the first n elements in the array.
                    + * If {@literal n} is negative, $slice returns up to the last n elements in the array. + * + * @param count max number of elements. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder slice(int count) { + return project("slice", count); + } + + /** + * Generates a {@code $slice} expression that returns a subset of the array held by the given field.
                    + * + * @param count max number of elements. Must not be negative. + * @param offset the offset within the array to start from. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder slice(int count, int offset) { + return project("slice", offset, count); + } + + /** + * Generates a {@code $filter} expression that returns a subset of the array held by the given field. + * + * @param as The variable name for the element in the input array. Must not be {@literal null}. + * @param condition The {@link AggregationExpression} that determines whether to include the element in the + * resulting array. Must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder filter(String as, AggregationExpression condition) { + return this.operation.and(ArrayOperators.Filter.filter(getRequiredName()).as(as).by(condition)); + } + + // SET OPERATORS + + /** + * Generates a {@code $setEquals} expression that compares the previously mentioned field to one or more arrays and + * returns {@literal true} if they have the same distinct elements and {@literal false} otherwise. + * + * @param arrays must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 */ + public ProjectionOperationBuilder equalsArrays(String... arrays) { + + Assert.notEmpty(arrays, "Arrays must not be null or empty"); + return project("setEquals", Fields.fields(arrays)); + } + + /** + * Generates a {@code $setIntersection} expression that takes array of the previously mentioned field and one or + * more arrays and returns an array that contains the elements that appear in every of those. + * + * @param arrays must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder intersectsArrays(String... arrays) { + + Assert.notEmpty(arrays, "Arrays must not be null or empty"); + return project("setIntersection", Fields.fields(arrays)); + } + + /** + * Generates a {@code $setUnion} expression that takes array of the previously mentioned field and one or more + * arrays and returns an array that contains the elements that appear in any of those. + * + * @param arrays must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder unionArrays(String... arrays) { + + Assert.notEmpty(arrays, "Arrays must not be null or empty"); + return project("setUnion", Fields.fields(arrays)); + } + + /** + * Generates a {@code $setDifference} expression that takes array of the previously mentioned field and returns an + * array containing the elements that do not exist in the given {@literal array}. + * + * @param array must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder differenceToArray(String array) { + + Assert.hasText(array, "Array must not be null or empty"); + return project("setDifference", Fields.fields(array)); + } + + /** + * Generates a {@code $setIsSubset} expression that takes array of the previously mentioned field and returns + * {@literal true} if it is a subset of the given {@literal array}. + * + * @param array must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder subsetOfArray(String array) { + + Assert.hasText(array, "Array must not be null or empty"); + return project("setIsSubset", Fields.fields(array)); + } + + /** + * Generates an {@code $anyElementTrue} expression that Takes array of the previously mentioned field and returns + * {@literal true} if any of the elements are {@literal true} and {@literal false} otherwise. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder anyElementInArrayTrue() { + return project("anyElementTrue"); + } + + /** + * Generates an {@code $allElementsTrue} expression that takes array of the previously mentioned field and returns + * {@literal true} if no elements is {@literal false}. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder allElementsInArrayTrue() { + return project("allElementsTrue"); + } + + /** + * Generates a {@code $abs} expression that takes the number of the previously mentioned field and returns the + * absolute value of it. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder absoluteValue() { + return this.operation.and(ArithmeticOperators.Abs.absoluteValueOf(getRequiredName())); + } + + /** + * Generates a {@code $ceil} expression that takes the number of the previously mentioned field and returns the + * smallest integer greater than or equal to the specified number. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder ceil() { + return this.operation.and(ArithmeticOperators.Ceil.ceilValueOf(getRequiredName())); + } + + /** + * Generates a {@code $exp} expression that takes the number of the previously mentioned field and raises Euler’s + * number (i.e. e ) on it. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder exp() { + return this.operation.and(ArithmeticOperators.Exp.expValueOf(getRequiredName())); + } + + /** + * Generates a {@code $floor} expression that takes the number of the previously mentioned field and returns the + * largest integer less than or equal to it. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder floor() { + return this.operation.and(ArithmeticOperators.Floor.floorValueOf(getRequiredName())); + } + + /** + * Generates a {@code $ln} expression that takes the number of the previously mentioned field and calculates the + * natural logarithm ln (i.e loge) of it. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder ln() { + return this.operation.and(ArithmeticOperators.Ln.lnValueOf(getRequiredName())); + } + + /** + * Generates a {@code $log} expression that takes the number of the previously mentioned field and calculates the + * log of the associated number in the specified base. + * + * @param baseFieldRef must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder log(String baseFieldRef) { + return this.operation.and(ArithmeticOperators.Log.valueOf(getRequiredName()).log(baseFieldRef)); + } + + /** + * Generates a {@code $log} expression that takes the number of the previously mentioned field and calculates the + * log of the associated number in the specified base. + * + * @param base must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder log(Number base) { + return this.operation.and(ArithmeticOperators.Log.valueOf(getRequiredName()).log(base)); + } + + /** + * Generates a {@code $log} expression that takes the number of the previously mentioned field and calculates the + * log of the associated number in the specified base. + * + * @param base must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder log(AggregationExpression base) { + return this.operation.and(ArithmeticOperators.Log.valueOf(getRequiredName()).log(base)); + } + + /** + * Generates a {@code $log10} expression that takes the number of the previously mentioned field and calculates the + * log base 10. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder log10() { + return this.operation.and(ArithmeticOperators.Log10.log10ValueOf(getRequiredName())); + } + + /** + * Generates a {@code $pow} expression that takes the number of the previously mentioned field and raises it by the + * specified exponent. + * + * @param exponentFieldRef must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder pow(String exponentFieldRef) { + return this.operation.and(ArithmeticOperators.Pow.valueOf(getRequiredName()).pow(exponentFieldRef)); + } + + /** + * Generates a {@code $pow} expression that takes the number of the previously mentioned field and raises it by the + * specified exponent. + * + * @param exponent must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder pow(Number exponent) { + return this.operation.and(ArithmeticOperators.Pow.valueOf(getRequiredName()).pow(exponent)); + } + + /** + * Generates a {@code $pow} expression that Takes the number of the previously mentioned field and raises it by the + * specified exponent. + * + * @param exponentExpression must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder pow(AggregationExpression exponentExpression) { + return this.operation.and(ArithmeticOperators.Pow.valueOf(getRequiredName()).pow(exponentExpression)); + } + + /** + * Generates a {@code $sqrt} expression that takes the number of the previously mentioned field and calculates the + * square root. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder sqrt() { + return this.operation.and(ArithmeticOperators.Sqrt.sqrtOf(getRequiredName())); + } + + /** + * Takes the number of the previously mentioned field and truncates it to its integer value. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder trunc() { + return this.operation.and(ArithmeticOperators.Trunc.truncValueOf(getRequiredName())); + } + + /** + * Generates a {@code $concat} expression that takes the string representation of the previously mentioned field and + * concats given values to it. + * + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder concat(Object... values) { + return project("concat", values); + } + + /** + * Generates a {@code $substr} expression that Takes the string representation of the previously mentioned field and + * returns a substring starting at a specified index position. + * + * @param start + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder substring(int start) { + return substring(start, -1); + } + + /** + * Generates a {@code $substr} expression that takes the string representation of the previously mentioned field and + * returns a substring starting at a specified index position including the specified number of characters. + * + * @param start + * @param nrOfChars + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder substring(int start, int nrOfChars) { + return project("substr", start, nrOfChars); + } + + /** + * Generates a {@code $toLower} expression that takes the string representation of the previously mentioned field + * and lowers it. + * + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder toLower() { + return this.operation.and(StringOperators.ToLower.lowerValueOf(getRequiredName())); + } + + /** + * Generates a {@code $toUpper} expression that takes the string representation of the previously mentioned field + * and uppers it. + * + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder toUpper() { + return this.operation.and(StringOperators.ToUpper.upperValueOf(getRequiredName())); + } + + /** + * Generates a {@code $strcasecmp} expression that takes the string representation of the previously mentioned field + * and performs case-insensitive comparison to the given {@literal value}. + * + * @param value must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder strCaseCmp(String value) { + return project("strcasecmp", value); + } + + /** + * Generates a {@code $strcasecmp} expression that takes the string representation of the previously mentioned field + * and performs case-insensitive comparison to the referenced {@literal fieldRef}. + * + * @param fieldRef must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder strCaseCmpValueOf(String fieldRef) { + return project("strcasecmp", fieldRef); + } + + /** + * Generates a {@code $strcasecmp} expression that takes the string representation of the previously mentioned field + * and performs case-insensitive comparison to the result of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder strCaseCmp(AggregationExpression expression) { + return project("strcasecmp", expression); + } + + /** + * Generates a {@code $arrayElemAt} expression that takes the string representation of the previously mentioned + * field and returns the element at the specified array {@literal position}. + * + * @param position + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder arrayElementAt(int position) { + return project("arrayElemAt", position); + } + + /** + * Generates a {@code $concatArrays} expression that takes the string representation of the previously mentioned + * field and concats it with the arrays from the referenced {@literal fields}. + * + * @param fields must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder concatArrays(String... fields) { + return project("concatArrays", Fields.fields(fields)); + } + + /** + * Generates a {@code $isArray} expression that takes the string representation of the previously mentioned field + * and checks if its an array. + * + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder isArray() { + return this.operation.and(ArrayOperators.IsArray.isArray(getRequiredName())); + } + + /** + * Generates a {@code $literal} expression that Takes the value previously and uses it as literal. + * + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder asLiteral() { + return this.operation.and(LiteralOperators.Literal.asLiteral(getRequiredName())); + } + + /** + * Generates a {@code $dateToString} expression that takes the date representation of the previously mentioned field + * and applies given {@literal format} to it. + * + * @param format must not be {@literal null}. + * @return + * @since 1.10 + */ + public ProjectionOperationBuilder dateAsFormattedString(String format) { + return this.operation.and(DateOperators.DateToString.dateOf(getRequiredName()).toString(format)); + } + + /** + * Generates a {@code $dateToString} expression that takes the date representation of the previously mentioned field + * using the server default format.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return + * @since 2.1 + */ + public ProjectionOperationBuilder dateAsFormattedString() { + return this.operation.and(DateOperators.DateToString.dateOf(getRequiredName()).defaultFormat()); + } + + /** + * Generates a {@code $let} expression that binds variables for use in the specified expression, and returns the + * result of the expression. + * + * @param valueExpression The {@link AggregationExpression} bound to {@literal variableName}. + * @param variableName The variable name to be used in the {@literal in} {@link AggregationExpression}. + * @param in The {@link AggregationExpression} to evaluate. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder let(AggregationExpression valueExpression, String variableName, + AggregationExpression in) { + return this.operation.and(VariableOperators.Let + .define(ExpressionVariable.newVariable(variableName).forExpression(valueExpression)).andApply(in)); + } + + /** + * Generates a {@code $let} expression that binds variables for use in the specified expression, and returns the + * result of the expression. + * + * @param variables The bound {@link ExpressionVariable}s. + * @param in The {@link AggregationExpression} to evaluate. + * @return never {@literal null}. + * @since 1.10 + */ + public ProjectionOperationBuilder let(Collection variables, AggregationExpression in) { + return this.operation.and(VariableOperators.Let.define(variables).andApply(in)); + } + + private String getRequiredName() { + + Assert.state(name != null, "Projection field name must not be null"); + + return name; + } + @Override - public DBObject toDBObject(AggregationOperationContext context) { - return this.operation.toDBObject(context); + public Document toDocument(AggregationOperationContext context) { + return this.operation.toDocument(context); } /** * Adds a generic projection for the current field. - * + * * @param operation the operation key, e.g. {@code $add}. * @param values the values to be set for the projection operation. * @return @@ -590,7 +1295,7 @@ public ProjectionOperationBuilder project(String operation, Object... values) { /** * A {@link Projection} to pull in the result of the previous operation. - * + * * @author Oliver Gierke */ static class PreviousOperationProjection extends Projection { @@ -599,7 +1304,7 @@ static class PreviousOperationProjection extends Projection { /** * Creates a new {@link PreviousOperationProjection} for the field with the given name. - * + * * @param name must not be {@literal null} or empty. */ public PreviousOperationProjection(String name) { @@ -607,30 +1312,27 @@ public PreviousOperationProjection(String name) { this.name = name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject(name, Fields.UNDERSCORE_ID_REF); + public Document toDocument(AggregationOperationContext context) { + return new Document(name, Fields.UNDERSCORE_ID_REF); } } /** * A {@link FieldProjection} to map a result of a previous {@link AggregationOperation} to a new field. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch */ static class FieldProjection extends Projection { private final Field field; - private final Object value; + private final @Nullable Object value; /** * Creates a new {@link FieldProjection} for the field of the given name, assigning the given value. - * + * * @param name must not be {@literal null} or empty. * @param value */ @@ -638,9 +1340,9 @@ public FieldProjection(String name, Object value) { this(Fields.field(name), value); } - private FieldProjection(Field field, Object value) { + private FieldProjection(Field field, @Nullable Object value) { - super(field); + super(new ExposedField(field.getName(), true)); this.field = field; this.value = value; @@ -649,7 +1351,7 @@ private FieldProjection(Field field, Object value) { /** * Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. Fields are projected as * references with their given name. A field {@code foo} will be projected as: {@code foo : 1 } . - * + * * @param fields the {@link Fields} to in- or exclude, must not be {@literal null}. * @return */ @@ -659,14 +1361,14 @@ public static List from(Fields fields) { /** * Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. - * + * * @param fields the {@link Fields} to in- or exclude, must not be {@literal null}. * @param value to use for the given field. * @return */ - public static List from(Fields fields, Object value) { + public static List from(Fields fields, @Nullable Object value) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); List projections = new ArrayList(); for (Field field : fields) { @@ -676,13 +1378,16 @@ public static List from(Fields fields, Object value) { return projections; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * @return {@literal true} if this field is excluded. */ + public boolean isExcluded() { + return Boolean.FALSE.equals(value); + } + @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject(field.getName(), renderFieldValue(context)); + public Document toDocument(AggregationOperationContext context) { + return new Document(field.getName(), renderFieldValue(context)); } private Object renderFieldValue(AggregationOperationContext context) { @@ -690,10 +1395,18 @@ private Object renderFieldValue(AggregationOperationContext context) { // implicit reference or explicit include? if (value == null || Boolean.TRUE.equals(value)) { - if (Aggregation.SystemVariable.isReferingToSystemVariable(field.getTarget())) { + if (SystemVariable.isReferingToSystemVariable(field.getTarget())) { return field.getTarget(); } + if (field.getTarget().equals(Fields.UNDERSCORE_ID)) { + try { + return context.getReference(field).getReferenceValue(); + } catch (java.lang.IllegalArgumentException e) { + return Fields.UNDERSCORE_ID_REF; + } + } + // check whether referenced field exists in the context return context.getReference(field).getReferenceValue(); @@ -715,42 +1428,49 @@ static class OperationProjection extends Projection { /** * Creates a new {@link OperationProjection} for the given field. - * + * * @param field the name of the field to add the operation projection for, must not be {@literal null} or empty. * @param operation the actual operation key, must not be {@literal null} or empty. * @param values the values to pass into the operation, must not be {@literal null}. */ - public OperationProjection(Field field, String operation, Object[] values) { + OperationProjection(Field field, String operation, Object[] values) { super(field); - Assert.hasText(operation, "Operation must not be null or empty!"); - Assert.notNull(values, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); this.field = field; this.operation = operation; this.values = Arrays.asList(values); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { - DBObject inner = new BasicDBObject("$" + operation, getOperationArguments(context)); + Document inner = new Document("$" + operation, getOperationArguments(context)); - return new BasicDBObject(getField().getName(), inner); + return new Document(getField().getName(), inner); } protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values.size()); - result.add(context.getReference(getField().getName()).toString()); + List result = new ArrayList<>(values.size()); + result.add(context.getReference(getField()).toString()); for (Object element : values) { - result.add(element instanceof Field ? context.getReference((Field) element).toString() : element); + + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { + result.add(context.getReference(field).toString()); + } + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); + } else { + result.add(element); + } } return result; @@ -758,17 +1478,13 @@ protected List getOperationArguments(AggregationOperationContext context /** * Returns the field that holds the {@link OperationProjection}. - * + * * @return */ protected Field getField() { return field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#getExposedField() - */ @Override public ExposedField getExposedField() { @@ -781,18 +1497,15 @@ public ExposedField getExposedField() { /** * Creates a new instance of this {@link OperationProjection} with the given alias. - * + * * @param alias the alias to set * @return */ - public OperationProjection withAlias(String alias) { + OperationProjection withAlias(String alias) { final Field aliasedField = Fields.field(alias, this.field.getName()); return new OperationProjection(aliasedField, operation, values.toArray()) { - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.OperationProjection#getField() - */ @Override protected Field getField() { return aliasedField; @@ -815,33 +1528,29 @@ static class NestedFieldProjection extends Projection { private final String name; private final Fields fields; - public NestedFieldProjection(String name, Fields fields) { + NestedFieldProjection(String name, Fields fields) { super(Fields.field(name)); this.name = name; this.fields = fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { - DBObject nestedObject = new BasicDBObject(); + Document nestedObject = new Document(); for (Field field : fields) { nestedObject.put(field.getName(), context.getReference(field.getTarget()).toString()); } - return new BasicDBObject(name, nestedObject); + return new Document(name, nestedObject); } } /** * Extracts the minute from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractMinute() { @@ -850,7 +1559,7 @@ public ProjectionOperationBuilder extractMinute() { /** * Extracts the hour from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractHour() { @@ -859,7 +1568,7 @@ public ProjectionOperationBuilder extractHour() { /** * Extracts the second from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractSecond() { @@ -868,7 +1577,7 @@ public ProjectionOperationBuilder extractSecond() { /** * Extracts the millisecond from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractMillisecond() { @@ -877,7 +1586,7 @@ public ProjectionOperationBuilder extractMillisecond() { /** * Extracts the year from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractYear() { @@ -886,7 +1595,7 @@ public ProjectionOperationBuilder extractYear() { /** * Extracts the month from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractMonth() { @@ -895,7 +1604,7 @@ public ProjectionOperationBuilder extractMonth() { /** * Extracts the week from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractWeek() { @@ -904,7 +1613,7 @@ public ProjectionOperationBuilder extractWeek() { /** * Extracts the dayOfYear from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractDayOfYear() { @@ -913,7 +1622,7 @@ public ProjectionOperationBuilder extractDayOfYear() { /** * Extracts the dayOfMonth from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractDayOfMonth() { @@ -922,7 +1631,7 @@ public ProjectionOperationBuilder extractDayOfMonth() { /** * Extracts the dayOfWeek from a date expression. - * + * * @return */ public ProjectionOperationBuilder extractDayOfWeek() { @@ -932,7 +1641,7 @@ public ProjectionOperationBuilder extractDayOfWeek() { /** * Base class for {@link Projection} implementations. - * + * * @author Oliver Gierke */ private static abstract class Projection { @@ -941,18 +1650,18 @@ private static abstract class Projection { /** * Creates new {@link Projection} for the given {@link Field}. - * + * * @param field must not be {@literal null}. */ public Projection(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } /** * Returns the field exposed by the {@link Projection}. - * + * * @return will never be {@literal null}. */ public ExposedField getExposedField() { @@ -960,13 +1669,13 @@ public ExposedField getExposedField() { } /** - * Renders the current {@link Projection} into a {@link DBObject} based on the given + * Renders the current {@link Projection} into a {@link Document} based on the given * {@link AggregationOperationContext}. - * + * * @param context will never be {@literal null}. * @return */ - public abstract DBObject toDBObject(AggregationOperationContext context); + public abstract Document toDocument(AggregationOperationContext context); } /** @@ -979,7 +1688,7 @@ static class ExpressionProjection extends Projection { /** * Creates a new {@link ExpressionProjection}. - * + * * @param field * @param expression */ @@ -991,8 +1700,171 @@ public ExpressionProjection(Field field, AggregationExpression expression) { } @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject(field.getName(), expression.toDbObject(context)); + public Document toDocument(AggregationOperationContext context) { + return new Document(field.getName(), expression.toDocument(context)); + } + } + + /** + * A {@link Projection} including all top level fields of the given target type mapped to include potentially + * deviating field names. + * + * @since 2.2 + * @author Christoph Strobl + */ + static class TypeProjection extends Projection { + + private final Class type; + + TypeProjection(Class type) { + + super(Fields.field(type.getSimpleName())); + this.type = type; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document projections = new Document(); + + Fields fields = context.getFields(type); + + fields.forEach(it -> projections.append(it.getTarget(), 1)); + return projections; + } + } + + /** + * A {@link Projection} including all top level fields of the given target type mapped to include potentially + * deviating field names. + * + * @since 2.2 + * @author Christoph Strobl + */ + static class FilterProjection extends Projection { + + public static String FILTER_ELEMENT = "filterElement"; + private final Object value; + + FilterProjection(String fieldReference, Object value) { + super(Fields.field(FILTER_ELEMENT + "." + fieldReference)); + this.value = value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getExposedField().getName(), value); + } + } + + /** + * Builder for {@code array} projections. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class ArrayProjectionOperationBuilder { + + private ProjectionOperation target; + private final List projections; + + public ArrayProjectionOperationBuilder(ProjectionOperation target) { + + this.target = target; + this.projections = new ArrayList<>(); + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param expression + * @return + */ + public ArrayProjectionOperationBuilder and(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + + this.projections.add(expression); + return this; + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param field + * @return + */ + public ArrayProjectionOperationBuilder and(Field field) { + + Assert.notNull(field, "Field must not be null"); + + this.projections.add(field); + return this; + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param value + * @return + */ + public ArrayProjectionOperationBuilder and(Object value) { + + this.projections.add(value); + return this; + } + + /** + * Create the {@link ProjectionOperation} for the array property with given {@literal name}. + * + * @param name The target property name. Must not be {@literal null}. + * @return new instance of {@link ArrayProjectionOperationBuilder}. + */ + public ProjectionOperation as(String name) { + + return new ProjectionOperation(target.projections, + Collections.singletonList(new ArrayProjection(Fields.field(name), this.projections))); + } + } + + /** + * @author Christoph Strobl + * @since 2.2 + */ + static class ArrayProjection extends Projection { + + private final Field targetField; + private final List projections; + + public ArrayProjection(Field targetField, List projections) { + + super(targetField); + this.targetField = targetField; + this.projections = projections; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + return new Document(targetField.getName(), + projections.stream().map(it -> toArrayEntry(it, context)).collect(Collectors.toList())); + } + + private Object toArrayEntry(Object projection, AggregationOperationContext ctx) { + + if (projection instanceof Field field) { + return ctx.getReference(field).toString(); + } + + if (projection instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(ctx); + } + + if (projection instanceof FieldProjection fieldProjection) { + return ctx.getReference(fieldProjection.getExposedField().getTarget()).toString(); + } + + return projection; } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java new file mode 100644 index 0000000000..a370016356 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java @@ -0,0 +1,243 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * {@link RedactOperation} allows to restrict the content of a {@link Document} based on information stored within + * itself. + * + *
                    + * RedactOperation.builder() //
                    + * 		.when(Criteria.where("level").is(5)) //
                    + * 		.thenPrune() //
                    + * 		.otherwiseDescend() //
                    + * 		.build();
                    + * 
                    + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/redact/ + * @since 3.0 + */ +public class RedactOperation implements AggregationOperation { + + /** + * Return fields at the current document level. Exclude embedded ones. + */ + public static final String DESCEND = "$$DESCEND"; + + /** + * Return/Keep all fields at the current document/embedded level. + */ + public static final String KEEP = "$$KEEP"; + + /** + * Exclude all fields at this current document/embedded level. + */ + public static final String PRUNE = "$$PRUNE"; + + private final AggregationExpression condition; + + /** + * Create new {@link RedactOperation}. + * + * @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or + * {@literal $$KEEP}. Must not be {@literal null}. + */ + public RedactOperation(AggregationExpression condition) { + + Assert.notNull(condition, "Condition must not be null"); + this.condition = condition; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), condition.toDocument(context)); + } + + @Override + public String getOperator() { + return "$redact"; + } + + /** + * Obtain a new instance of {@link RedactOperationBuilder} to specify condition and outcome of the {@literal $redact} + * operation. + * + * @return new instance of {@link RedactOperationBuilder}. + */ + public static RedactOperationBuilder builder() { + return new RedactOperationBuilder(); + } + + /** + * Builder to create new instance of {@link RedactOperation}. + * + * @author Christoph Strobl + */ + public static class RedactOperationBuilder { + + private Object when; + private Object then; + private Object otherwise; + + private RedactOperationBuilder() { + + } + + /** + * Specify the evaluation condition. + * + * @param criteria must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(CriteriaDefinition criteria) { + + this.when = criteria; + return this; + } + + /** + * Specify the evaluation condition. + * + * @param condition must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(AggregationExpression condition) { + + this.when = condition; + return this; + } + + /** + * Specify the evaluation condition. + * + * @param condition must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(Document condition) { + + this.when = condition; + return this; + } + + /** + * Return fields at the current document level and exclude embedded ones if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenDescend() { + return then(DESCEND); + } + + /** + * Return/Keep all fields at the current document/embedded level if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenKeep() { + return then(KEEP); + } + + /** + * Exclude all fields at this current document/embedded level if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenPrune() { + return then(PRUNE); + } + + /** + * Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP}) + * when the condition is met. + * + * @param then must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder then(Object then) { + + this.then = then; + return this; + } + + /** + * Return fields at the current document level and exclude embedded ones if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwiseDescend() { + return otherwise(DESCEND); + } + + /** + * Return/Keep all fields at the current document/embedded level if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwiseKeep() { + return otherwise(KEEP); + } + + /** + * Exclude all fields at this current document/embedded level if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwisePrune() { + return otherwise(PRUNE); + } + + /** + * Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP}) + * when the condition is not met. + * + * @param otherwise must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder otherwise(Object otherwise) { + this.otherwise = otherwise; + return this; + } + + /** + * @return new instance of {@link RedactOperation}. + */ + public RedactOperation build() { + return new RedactOperation(when().then(then).otherwise(otherwise)); + } + + private ThenBuilder when() { + + if (when instanceof CriteriaDefinition criteriaDefinition) { + return ConditionalOperators.Cond.when(criteriaDefinition); + } + if (when instanceof AggregationExpression aggregationExpression) { + return ConditionalOperators.Cond.when(aggregationExpression); + } + if (when instanceof Document document) { + return ConditionalOperators.Cond.when(document); + } + + throw new IllegalArgumentException(String.format( + "Invalid Condition; Expected CriteriaDefinition, AggregationExpression or Document but was %s", when)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java new file mode 100644 index 0000000000..a27b9fcb45 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +/** + * A {@link TypeBasedAggregationOperationContext} with less restrictive field reference handling, suppressing + * {@link InvalidPersistentPropertyPath} exceptions when resolving mapped field names. + * + * @author Christoph Strobl + * @since 3.0 + * @deprecated since 4.3.1 + */ +@Deprecated(since = "4.3.1") +public class RelaxedTypeBasedAggregationOperationContext extends TypeBasedAggregationOperationContext { + + /** + * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and + * {@link QueryMapper}. + * + * @param type must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param mapper must not be {@literal null}. + */ + public RelaxedTypeBasedAggregationOperationContext(Class type, + MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper) { + super(type, mappingContext, mapper, FieldLookupPolicy.relaxed()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java new file mode 100644 index 0000000000..130182a001 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java @@ -0,0 +1,560 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.expression.spel.ast.Projection; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $replaceRoot}-operation.
                    + * We recommend to use the static factory method {@link Aggregation#replaceRoot(String)} instead of creating instances + * of this class directly. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 1.10 + * @see MongoDB Aggregation + * Framework: $replaceRoot + */ +public class ReplaceRootOperation implements FieldsExposingAggregationOperation { + + private final Replacement replacement; + + /** + * Creates a new {@link ReplaceRootOperation} given the {@link Field} field name. + * + * @param field must not be {@literal null} or empty. + */ + public ReplaceRootOperation(Field field) { + this(new FieldReplacement(field)); + } + + /** + * Creates a new {@link ReplaceRootOperation} given the {@link AggregationExpression} pointing to a document. + * + * @param aggregationExpression must not be {@literal null}. + */ + public ReplaceRootOperation(AggregationExpression aggregationExpression) { + this(new AggregationExpressionReplacement(aggregationExpression)); + } + + /** + * Creates a new {@link ReplaceRootOperation} given the {@link Replacement}. + * + * @param replacement must not be {@literal null}. + */ + public ReplaceRootOperation(Replacement replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + this.replacement = replacement; + } + + /** + * Creates a new {@link ReplaceRootDocumentOperationBuilder}. + * + * @return a new {@link ReplaceRootDocumentOperationBuilder}. + */ + public static ReplaceRootOperationBuilder builder() { + return new ReplaceRootOperationBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$replaceRoot", new Document("newRoot", getReplacement().toDocumentExpression(context))); + } + + @Override + public String getOperator() { + return "$replaceRoot"; + } + + @Override + public ExposedFields getFields() { + return ExposedFields.from(); + } + + /** + * Obtain the {@link Replacement}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected Replacement getReplacement() { + return replacement; + } + + /** + * Builder for {@link ReplaceRootOperation}. + * + * @author Mark Paluch + */ + public static class ReplaceRootOperationBuilder { + + /** + * Defines a root document replacement based on a {@literal fieldName} that resolves to a document. + * + * @param fieldName must not be {@literal null} or empty. + * @return the final {@link ReplaceRootOperation}. + */ + public ReplaceRootOperation withValueOf(String fieldName) { + return new ReplaceRootOperation(Fields.field(fieldName)); + } + + /** + * Defines a root document replacement based on a {@link AggregationExpression} that resolves to a document. + * + * @param aggregationExpression must not be {@literal null}. + * @return the final {@link ReplaceRootOperation}. + */ + public ReplaceRootOperation withValueOf(AggregationExpression aggregationExpression) { + return new ReplaceRootOperation(aggregationExpression); + } + + /** + * Defines a root document replacement based on a composable document that is empty initially.
                    + * {@link ReplaceRootOperation} can be populated with individual entries and derive its values from other, existing + * documents. + * + * @return the {@link ReplaceRootDocumentOperation}. + */ + public ReplaceRootDocumentOperation withDocument() { + return new ReplaceRootDocumentOperation(); + } + + /** + * Defines a root document replacement based on a composable document given {@literal document}.
                    + * {@link ReplaceRootOperation} can be populated with individual entries and derive its values from other, existing + * documents. + * + * @param document must not be {@literal null}. + * @return the final {@link ReplaceRootOperation}. + */ + public ReplaceRootOperation withDocument(Document document) { + + Assert.notNull(document, "Document must not be null"); + + return new ReplaceRootDocumentOperation().andValuesOf(document); + } + } + + /** + * Encapsulates the aggregation framework {@code $replaceRoot}-operation to result in a composable replacement + * document.
                    + * Instances of {@link ReplaceRootDocumentOperation} yield empty upon construction and can be populated with single + * values and documents. + * + * @author Mark Paluch + */ + public static class ReplaceRootDocumentOperation extends ReplaceRootOperation { + + private final static ReplacementDocument EMPTY = new ReplacementDocument(); + private final ReplacementDocument current; + + /** + * Creates an empty {@link ReplaceRootDocumentOperation}. + */ + public ReplaceRootDocumentOperation() { + this(EMPTY); + } + + private ReplaceRootDocumentOperation(ReplacementDocument replacementDocument) { + super(replacementDocument); + current = replacementDocument; + } + + /** + * Creates an extended {@link ReplaceRootDocumentOperation} that combines {@link ReplacementDocument}s from the + * {@literal currentOperation} and {@literal extension} operation. + * + * @param currentOperation must not be {@literal null}. + * @param extension must not be {@literal null}. + */ + protected ReplaceRootDocumentOperation(ReplaceRootDocumentOperation currentOperation, + ReplacementDocument extension) { + this(currentOperation.current.extendWith(extension)); + } + + /** + * Creates a new {@link ReplaceRootDocumentOperationBuilder} to define a field for the + * {@link AggregationExpression}. + * + * @param aggregationExpression must not be {@literal null}. + * @return the {@link ReplaceRootDocumentOperationBuilder}. + */ + public ReplaceRootDocumentOperationBuilder and(AggregationExpression aggregationExpression) { + return new ReplaceRootDocumentOperationBuilder(this, aggregationExpression); + } + + /** + * Creates a new {@link ReplaceRootDocumentOperationBuilder} to define a field for the {@literal value}. + * + * @param value must not be {@literal null}. + * @return the {@link ReplaceRootDocumentOperationBuilder}. + */ + public ReplaceRootDocumentOperationBuilder andValue(Object value) { + return new ReplaceRootDocumentOperationBuilder(this, value); + } + + /** + * Creates a new {@link ReplaceRootDocumentOperation} that merges all existing replacement values with values from + * {@literal value}. Existing replacement values are overwritten. + * + * @param value must not be {@literal null}. + * @return the {@link ReplaceRootDocumentOperation}. + */ + public ReplaceRootDocumentOperation andValuesOf(Object value) { + return new ReplaceRootDocumentOperation(this, ReplacementDocument.valueOf(value)); + } + } + + /** + * Builder for {@link ReplaceRootDocumentOperation} to populate {@link ReplacementDocument} + * + * @author Mark Paluch + */ + public static class ReplaceRootDocumentOperationBuilder { + + private final ReplaceRootDocumentOperation currentOperation; + private final Object value; + + protected ReplaceRootDocumentOperationBuilder(ReplaceRootDocumentOperation currentOperation, Object value) { + + Assert.notNull(currentOperation, "Current ReplaceRootDocumentOperation must not be null"); + Assert.notNull(value, "Value must not be null"); + + this.currentOperation = currentOperation; + this.value = value; + } + + public ReplaceRootDocumentOperation as(String fieldName) { + + if (value instanceof AggregationExpression aggregationExpression) { + return new ReplaceRootDocumentOperation(currentOperation, + ReplacementDocument.forExpression(fieldName, aggregationExpression)); + } + + return new ReplaceRootDocumentOperation(currentOperation, ReplacementDocument.forSingleValue(fieldName, value)); + } + } + + /** + * Replacement object that results in a replacement document or an expression that results in a document. + * + * @author Mark Paluch + * @author Christoph Strobl + */ + public interface Replacement { + + /** + * Renders the current {@link Replacement} into a its MongoDB representation based on the given + * {@link AggregationOperationContext}. + * + * @param context will never be {@literal null}. + * @return a replacement document or an expression that results in a document. + */ + Object toDocumentExpression(AggregationOperationContext context); + } + + /** + * {@link Replacement} that uses a {@link AggregationExpression} that results in a replacement document. + * + * @author Mark Paluch + */ + private static class AggregationExpressionReplacement implements Replacement { + + private final AggregationExpression aggregationExpression; + + protected AggregationExpressionReplacement(AggregationExpression aggregationExpression) { + + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); + this.aggregationExpression = aggregationExpression; + } + + @Override + public Document toDocumentExpression(AggregationOperationContext context) { + return aggregationExpression.toDocument(context); + } + } + + /** + * {@link Replacement that references a {@link Field} inside the current aggregation pipeline. + * + * @author Mark Paluch + */ + private static class FieldReplacement implements Replacement { + + private final Field field; + + /** + * Creates {@link FieldReplacement} given {@link Field}. + */ + protected FieldReplacement(Field field) { + + Assert.notNull(field, "Field must not be null"); + this.field = field; + } + + @Override + public Object toDocumentExpression(AggregationOperationContext context) { + return context.getReference(field).toString(); + } + } + + /** + * Replacement document consisting of multiple {@link ReplacementContributor}s. + * + * @author Mark Paluch + */ + private static class ReplacementDocument implements Replacement { + + private final Collection replacements; + + /** + * Creates an empty {@link ReplacementDocument}. + */ + protected ReplacementDocument() { + replacements = new ArrayList(); + } + + /** + * Creates a {@link ReplacementDocument} given {@link ReplacementContributor}. + * + * @param contributor must not be {@literal null}. + */ + protected ReplacementDocument(ReplacementContributor contributor) { + + Assert.notNull(contributor, "ReplacementContributor must not be null"); + replacements = Collections.singleton(contributor); + } + + private ReplacementDocument(Collection replacements) { + this.replacements = replacements; + } + + /** + * Creates a {@link ReplacementDocument} given a {@literal value}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplacementDocument}. + */ + public static ReplacementDocument valueOf(Object value) { + return new ReplacementDocument(new DocumentContributor(value)); + } + + /** + * Creates a {@link ReplacementDocument} given a single {@literal field} and {@link AggregationExpression}. + * + * @param aggregationExpression must not be {@literal null}. + * @return new instance of {@link ReplacementDocument}. + */ + public static ReplacementDocument forExpression(String field, AggregationExpression aggregationExpression) { + return new ReplacementDocument(new ExpressionFieldContributor(Fields.field(field), aggregationExpression)); + } + + /** + * Creates a {@link ReplacementDocument} given a single {@literal field} and {@literal value}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplacementDocument}. + */ + public static ReplacementDocument forSingleValue(String field, Object value) { + return new ReplacementDocument(new ValueFieldContributor(Fields.field(field), value)); + } + + @Override + public Document toDocumentExpression(AggregationOperationContext context) { + + Document document = new Document(); + + for (ReplacementContributor replacement : replacements) { + document.putAll(replacement.toDocument(context)); + } + + return document; + } + + /** + * Extend a replacement document that merges {@code this} and {@literal replacement} {@link ReplacementContributor}s + * in a new {@link ReplacementDocument}. + * + * @param extension must not be {@literal null}. + * @return the new, extended {@link ReplacementDocument} + */ + public ReplacementDocument extendWith(ReplacementDocument extension) { + + Assert.notNull(extension, "ReplacementDocument must not be null"); + + ReplacementDocument replacementDocument = new ReplacementDocument(); + + List replacements = new ArrayList( + this.replacements.size() + extension.replacements.size()); + + replacements.addAll(this.replacements); + replacements.addAll(extension.replacements); + + return new ReplacementDocument(replacements); + } + } + + /** + * Partial {@link Document} contributor for document replacement. + * + * @author Mark Paluch + */ + private interface ReplacementContributor extends AggregationExpression { + + /** + * Renders the current {@link ReplacementContributor} into a {@link Document} based on the given + * {@link AggregationOperationContext}. + * + * @param context will never be {@literal null}. + * @return never {@literal null}. + */ + @Override + Document toDocument(AggregationOperationContext context); + } + + /** + * {@link ReplacementContributor} to contribute multiple fields based on the input {@literal value}.
                    + * The value object is mapped into a MongoDB {@link Document}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ + private static class DocumentContributor implements ReplacementContributor { + + private final Object value; + + /** + * Creates new {@link Projection} for the given {@link Field}. + * + * @param value must not be {@literal null}. + */ + public DocumentContributor(Object value) { + + Assert.notNull(value, "Value must not be null"); + this.value = value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document document = new Document("$set", value); + + return (Document) context.getMappedObject(document).get("$set"); + } + } + + /** + * Base class for {@link ReplacementContributor} implementations to contribute a single {@literal field} Typically + * used to construct a composite document that should contain the resulting key-value pair. + * + * @author Mark Paluch + */ + private abstract static class FieldContributorSupport implements ReplacementContributor { + + private final ExposedField field; + + /** + * Creates new {@link FieldContributorSupport} for the given {@link Field}. + * + * @param field must not be {@literal null}. + */ + public FieldContributorSupport(Field field) { + + Assert.notNull(field, "Field must not be null"); + this.field = new ExposedField(field, true); + } + + /** + * @return the {@link ExposedField}. + */ + public ExposedField getField() { + return field; + } + } + + /** + * {@link ReplacementContributor} to contribute a single {@literal field} and {@literal value}. The {@literal value} + * is mapped to a MongoDB {@link Document} and can be a singular value, a list or subdocument. + * + * @author Mark Paluch + */ + private static class ValueFieldContributor extends FieldContributorSupport { + + private final Object value; + + /** + * Creates new {@link Projection} for the given {@link Field}. + * + * @param field must not be {@literal null}. + * @param value must not be {@literal null}. + */ + public ValueFieldContributor(Field field, Object value) { + + super(field); + + Assert.notNull(value, "Value must not be null"); + + this.value = value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document document = new Document("$set", value); + return new Document(getField().getTarget(), context.getMappedObject(document).get("$set")); + } + } + + /** + * {@link ReplacementContributor} to contribute a single {@literal field} and value based on a + * {@link AggregationExpression}. + * + * @author Mark Paluch + */ + private static class ExpressionFieldContributor extends FieldContributorSupport { + + private final AggregationExpression aggregationExpression; + + /** + * Creates new {@link Projection} for the given {@link Field}. + * + * @param field must not be {@literal null}. + * @param aggregationExpression must not be {@literal null}. + */ + public ExpressionFieldContributor(Field field, AggregationExpression aggregationExpression) { + + super(field); + + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); + + this.aggregationExpression = aggregationExpression; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getField().getTarget(), aggregationExpression.toDocument(context)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java new file mode 100644 index 0000000000..795644ee61 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java @@ -0,0 +1,91 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collection; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $replaceRoot}-operation.
                    + * The operation replaces all existing fields including the {@code id} field with @{code $replaceWith}. This way it is + * possible to promote an embedded document to the top-level or specify a new document. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation + * Framework: $replaceWith + */ +public class ReplaceWithOperation extends ReplaceRootOperation { + + /** + * Creates new instance of {@link ReplaceWithOperation}. + * + * @param replacement must not be {@literal null}. + */ + public ReplaceWithOperation(Replacement replacement) { + super(replacement); + } + + /** + * Creates new instance of {@link ReplaceWithOperation}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceWithOperation}. + */ + public static ReplaceWithOperation replaceWithValue(Object value) { + return new ReplaceWithOperation((ctx) -> value); + } + + /** + * Creates new instance of {@link ReplaceWithOperation} treating a given {@link String} {@literal value} as a + * {@link Field field reference}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceWithOperation}. + */ + public static ReplaceWithOperation replaceWithValueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new ReplaceWithOperation(ctx -> { + + Object target = value instanceof String stringValue ? Fields.field(stringValue) : value; + return computeValue(target, ctx); + }); + } + + private static Object computeValue(Object value, AggregationOperationContext context) { + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + if (value instanceof Collection collection) { + return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList()); + } + + return value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return context.getMappedObject(new Document("$replaceWith", getReplacement().toDocumentExpression(context))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java new file mode 100644 index 0000000000..a366850f8a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java @@ -0,0 +1,54 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Encapsulates the {@code $sample}-operation. + *

                    + * We recommend to use the static factory method {@link Aggregation#sample(long)} instead of creating instances of this + * class directly. + * + * @author Gustavo de Geus + * @since 2.0 + * @see MongoDB Aggregation Framework: + * $sample + */ +public class SampleOperation implements AggregationOperation { + + private final long sampleSize; + + /** + * @param sampleSize number of documents to be randomly selected from its input. + */ + public SampleOperation(long sampleSize) { + + Assert.isTrue(sampleSize > 0, "Sample size must be greater than zero"); + this.sampleSize = sampleSize; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), new Document("size", this.sampleSize)); + } + + @Override + public String getOperator() { + return "$sample"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java new file mode 100644 index 0000000000..9eab041e88 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java @@ -0,0 +1,587 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator.AccumulatorBuilder; +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator.AccumulatorInitBuilder; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Gateway to {@literal $function} and {@literal $accumulator} aggregation operations. + *
                    + * Using {@link ScriptOperators} as part of the {@link Aggregation} requires MongoDB server to have + * server-side JavaScript execution + * enabled. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.1 + */ +public class ScriptOperators { + + /** + * Create a custom aggregation + * $function in JavaScript. + * + * @param body The function definition. Must not be {@literal null}. + * @return new instance of {@link Function}. + */ + public static Function function(String body) { + return Function.function(body); + } + + /** + * Create a custom $accumulator operator + * in Javascript. + * + * @return new instance of {@link AccumulatorInitBuilder}. + */ + public static AccumulatorInitBuilder accumulatorBuilder() { + return new AccumulatorBuilder(); + } + + /** + * {@link Function} defines a custom aggregation + * $function in JavaScript. + *
                    + * + * { + * $function: { + * body: ..., + * args: ..., + * lang: "js" + * } + * } + * + *
                    + * {@link Function} cannot be used as part of {@link org.springframework.data.mongodb.core.schema.MongoJsonSchema + * schema} validation query expression.
                    + * NOTE: Server-Side JavaScript + * execution must be + * enabled + * + * @see MongoDB Documentation: + * $function + */ + public static class Function extends AbstractAggregationExpression { + + private Function(Map values) { + super(values); + } + + /** + * Create a new {@link Function} with the given function definition. + * + * @param body must not be {@literal null}. + * @return new instance of {@link Function}. + */ + public static Function function(String body) { + + Assert.notNull(body, "Function body must not be null"); + + Map function = new LinkedHashMap<>(2); + function.put(Fields.BODY.toString(), body); + function.put(Fields.ARGS.toString(), Collections.emptyList()); + function.put(Fields.LANG.toString(), "js"); + + return new Function(function); + } + + /** + * Set the arguments passed to the function body. + * + * @param args the arguments passed to the function body. Leave empty if the function does not take any arguments. + * @return new instance of {@link Function}. + */ + public Function args(Object... args) { + return args(Arrays.asList(args)); + } + + /** + * Set the arguments passed to the function body. + * + * @param args the arguments passed to the function body. Leave empty if the function does not take any arguments. + * @return new instance of {@link Function}. + */ + public Function args(List args) { + + Assert.notNull(args, "Args must not be null Use an empty list instead"); + + return new Function(appendAt(1, Fields.ARGS.toString(), args)); + } + + /** + * The language used in the body. + * + * @param lang must not be {@literal null} nor empty. + * @return new instance of {@link Function}. + */ + public Function lang(String lang) { + + Assert.hasText(lang, "Lang must not be null nor empty; The default would be 'js'"); + + return new Function(appendAt(2, Fields.LANG.toString(), lang)); + } + + @Nullable + List getArgs() { + return get(Fields.ARGS.toString()); + } + + String getBody() { + return get(Fields.BODY.toString()); + } + + String getLang() { + return get(Fields.LANG.toString()); + } + + @Override + protected String getMongoMethod() { + return "$function"; + } + + enum Fields { + + BODY, ARGS, LANG; + + @Override + public String toString() { + return name().toLowerCase(); + } + } + } + + /** + * {@link Accumulator} defines a custom aggregation + * $accumulator operator, + * one that maintains its state (e.g. totals, maximums, minimums, and related data) as documents progress through the + * pipeline, in JavaScript. + *
                    + * + * { + * $accumulator: { + * init: ..., + * intArgs: ..., + * accumulate: ..., + * accumulateArgs: ..., + * merge: ..., + * finalize: ..., + * lang: "js" + * } + * } + * + *
                    + * {@link Accumulator} can be used as part of {@link GroupOperation $group}, {@link BucketOperation $bucket} and + * {@link BucketAutoOperation $bucketAuto} pipeline stages.
                    + * NOTE: Server-Side JavaScript + * execution must be + * enabled + * + * @see MongoDB Documentation: + * $accumulator + */ + public static class Accumulator extends AbstractAggregationExpression { + + private Accumulator(Map value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$accumulator"; + } + + enum Fields { + + ACCUMULATE("accumulate"), // + ACCUMULATE_ARGS("accumulateArgs"), // + FINALIZE("finalize"), // + INIT("init"), // + INIT_ARGS("initArgs"), // + LANG("lang"), // + MERGE("merge"); // + + private final String field; + + Fields(String field) { + this.field = field; + } + + @Override + public String toString() { + return field; + } + } + + public interface AccumulatorInitBuilder { + + /** + * Define the {@code init} {@link Function} for the {@link Accumulator accumulators} initial state. The function + * receives its arguments from the {@link Function#args(Object...) initArgs} array expression. + *
                    + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + default AccumulatorAccumulateBuilder init(Function function) { + return init(function.getBody()).initArgs(function.getArgs()); + } + + /** + * Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives + * its arguments from the {@link AccumulatorInitArgsBuilder#initArgs(Object...)} array expression. + *
                    + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorInitArgsBuilder init(String function); + + /** + * The language used in the {@code $accumulator} code. + * + * @param lang must not be {@literal null}. Default is {@literal js}. + * @return this. + */ + AccumulatorInitBuilder lang(String lang); + } + + public interface AccumulatorInitArgsBuilder extends AccumulatorAccumulateBuilder { + + /** + * Define the optional {@code initArgs} for the {@link AccumulatorInitBuilder#init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + default AccumulatorAccumulateBuilder initArgs(Object... args) { + return initArgs(Arrays.asList(args)); + } + + /** + * Define the optional {@code initArgs} for the {@link AccumulatorInitBuilder#init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + AccumulatorAccumulateBuilder initArgs(List args); + } + + public interface AccumulatorAccumulateBuilder { + + /** + * Set the {@code accumulate} {@link Function} that updates the state for each document. The functions first + * argument is the current {@code state}, additional arguments can be defined via {@link Function#args(Object...) + * accumulateArgs}. + *
                    + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + default AccumulatorMergeBuilder accumulate(Function function) { + return accumulate(function.getBody()).accumulateArgs(function.getArgs()); + } + + /** + * Set the {@code accumulate} function that updates the state for each document. The functions first argument is + * the current {@code state}, additional arguments can be defined via + * {@link AccumulatorAccumulateArgsBuilder#accumulateArgs(Object...)}. + *
                    + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorAccumulateArgsBuilder accumulate(String function); + } + + public interface AccumulatorAccumulateArgsBuilder extends AccumulatorMergeBuilder { + + /** + * Define additional {@code accumulateArgs} for the {@link AccumulatorAccumulateBuilder#accumulate(String)} + * function. + * + * @param args must not be {@literal null}. + * @return this. + */ + default AccumulatorMergeBuilder accumulateArgs(Object... args) { + return accumulateArgs(Arrays.asList(args)); + } + + /** + * Define additional {@code accumulateArgs} for the {@link AccumulatorAccumulateBuilder#accumulate(String)} + * function. + * + * @param args must not be {@literal null}. + * @return this. + */ + AccumulatorMergeBuilder accumulateArgs(List args); + } + + public interface AccumulatorMergeBuilder { + + /** + * Set the {@code merge} function used to merge two internal states.
                    + * This might be required because the operation is run on a sharded cluster or when the operator exceeds its + * memory limit. + *
                    + * + * function(state1, state2) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorFinalizeBuilder merge(String function); + } + + public interface AccumulatorFinalizeBuilder { + + /** + * Set the {@code finalize} function used to update the result of the accumulation when all documents have been + * processed. + *
                    + * + * function(state) { + * ... + * return finalState + * } + * + * + * @param function must not be {@literal null}. + * @return new instance of {@link Accumulator}. + */ + Accumulator finalize(String function); + + /** + * Build the {@link Accumulator} object without specifying a {@link #finalize(String) finalize function}. + * + * @return new instance of {@link Accumulator}. + */ + Accumulator build(); + } + + static class AccumulatorBuilder + implements AccumulatorInitBuilder, AccumulatorInitArgsBuilder, AccumulatorAccumulateBuilder, + AccumulatorAccumulateArgsBuilder, AccumulatorMergeBuilder, AccumulatorFinalizeBuilder { + + private List initArgs; + private String initFunction; + private List accumulateArgs; + private String accumulateFunction; + private String mergeFunction; + private String finalizeFunction; + private String lang = "js"; + + /** + * Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives + * its arguments from the {@link #initArgs(Object...)} array expression. + *
                    + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder init(String function) { + + this.initFunction = function; + return this; + } + + /** + * Define the optional {@code initArgs} for the {@link #init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder initArgs(List args) { + + Assert.notNull(args, "Args must not be null"); + + this.initArgs = new ArrayList<>(args); + return this; + } + + /** + * Set the {@code accumulate} function that updates the state for each document. The functions first argument is + * the current {@code state}, additional arguments can be defined via {@link #accumulateArgs(Object...)}. + *
                    + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder accumulate(String function) { + + Assert.notNull(function, "Accumulate function must not be null"); + + this.accumulateFunction = function; + return this; + } + + /** + * Define additional {@code accumulateArgs} for the {@link #accumulate(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder accumulateArgs(List args) { + + Assert.notNull(args, "Args must not be null"); + + this.accumulateArgs = new ArrayList<>(args); + return this; + } + + /** + * Set the {@code merge} function used to merge two internal states.
                    + * This might be required because the operation is run on a sharded cluster or when the operator exceeds its + * memory limit. + *
                    + * + * function(state1, state2) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder merge(String function) { + + Assert.notNull(function, "Merge function must not be null"); + + this.mergeFunction = function; + return this; + } + + /** + * The language used in the {@code $accumulator} code. + * + * @param lang must not be {@literal null}. Default is {@literal js}. + * @return this. + */ + public AccumulatorBuilder lang(String lang) { + + Assert.hasText(lang, "Lang must not be null nor empty; The default would be 'js'"); + + this.lang = lang; + return this; + } + + /** + * Set the {@code finalize} function used to update the result of the accumulation when all documents have been + * processed. + *
                    + * + * function(state) { + * ... + * return finalState + * } + * + * + * @param function must not be {@literal null}. + * @return new instance of {@link Accumulator}. + */ + @Override + public Accumulator finalize(String function) { + + Assert.notNull(function, "Finalize function must not be null"); + + this.finalizeFunction = function; + + Map args = createArgumentMap(); + args.put(Fields.FINALIZE.toString(), finalizeFunction); + + return new Accumulator(args); + } + + @Override + public Accumulator build() { + return new Accumulator(createArgumentMap()); + } + + private Map createArgumentMap() { + + Map args = new LinkedHashMap<>(); + args.put(Fields.INIT.toString(), initFunction); + if (!CollectionUtils.isEmpty(initArgs)) { + args.put(Fields.INIT_ARGS.toString(), initArgs); + } + args.put(Fields.ACCUMULATE.toString(), accumulateFunction); + if (!CollectionUtils.isEmpty(accumulateArgs)) { + args.put(Fields.ACCUMULATE_ARGS.toString(), accumulateArgs); + } + args.put(Fields.MERGE.toString(), mergeFunction); + args.put(Fields.LANG.toString(), lang); + + return args; + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java new file mode 100644 index 0000000000..9da80c4668 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java @@ -0,0 +1,424 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; + +import org.springframework.data.domain.Sort; + +/** + * Gateway to {@literal selection operators} such as {@literal $bottom}. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class SelectionOperators { + + /** + * {@link AbstractAggregationExpression} to return the bottom element according to the specified {@link #sortBy(Sort) + * order}. + */ + public static class Bottom extends AbstractAggregationExpression { + + private Bottom(Object value) { + super(value); + } + + /** + * In case a limit value ({@literal n}) is present {@literal $bottomN} is used instead of {@literal $bottom}. + * + * @return + */ + @Override + protected String getMongoMethod() { + return get("n") == null ? "$bottom" : "$bottomN"; + } + + /** + * @return new instance of {@link Bottom}. + */ + public static Bottom bottom() { + return new Bottom(Collections.emptyMap()); + } + + /** + * @param numberOfResults Limits the number of returned elements to the given value. + * @return new instance of {@link Bottom}. + */ + public static Bottom bottom(int numberOfResults) { + return bottom().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Bottom}. + */ + public Bottom limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Bottom limit(Object value) { + return new Bottom(append("n", value)); + } + + /** + * Define result ordering. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom sortBy(Sort sort) { + return new Bottom(append("sortBy", sort)); + } + + /** + * Define result ordering. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom output(Fields out) { + return new Bottom(append("output", out)); + } + + /** + * Define fields included in the output for each element. + * + * @param fieldNames must not be {@literal null}. + * @return new instance of {@link Bottom}. + * @see #output(Fields) + */ + public Bottom output(String... fieldNames) { + return output(Fields.fields(fieldNames)); + } + + /** + * Define expressions building the value included in the output for each element. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Bottom}. + * @see #output(Fields) + */ + public Bottom output(AggregationExpression... out) { + return new Bottom(append("output", Arrays.asList(out))); + } + } + + /** + * {@link AbstractAggregationExpression} to return the top element according to the specified {@link #sortBy(Sort) + * order}. + */ + public static class Top extends AbstractAggregationExpression { + + private Top(Object value) { + super(value); + } + + /** + * In case a limit value ({@literal n}) is present {@literal $topN} is used instead of {@literal $top}. + * + * @return + */ + @Override + protected String getMongoMethod() { + return get("n") == null ? "$top" : "$topN"; + } + + /** + * @return new instance of {@link Top}. + */ + public static Top top() { + return new Top(Collections.emptyMap()); + } + + /** + * @param numberOfResults Limits the number of returned elements to the given value. + * @return new instance of {@link Top}. + */ + public static Top top(int numberOfResults) { + return top().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Top}. + */ + public Top limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Top limit(Object value) { + return new Top(append("n", value)); + } + + /** + * Define result ordering. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top sortBy(Sort sort) { + return new Top(append("sortBy", sort)); + } + + /** + * Define result ordering. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top output(Fields out) { + return new Top(append("output", out)); + } + + /** + * Define fields included in the output for each element. + * + * @param fieldNames must not be {@literal null}. + * @return new instance of {@link Top}. + * @see #output(Fields) + */ + public Top output(String... fieldNames) { + return output(Fields.fields(fieldNames)); + } + + /** + * Define expressions building the value included in the output for each element. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Top}. + * @see #output(Fields) + */ + public Top output(AggregationExpression... out) { + return new Top(append("output", Arrays.asList(out))); + } + } + + /** + * {@link AbstractAggregationExpression} to return the {@literal $firstN} elements. + */ + public static class First extends AbstractAggregationExpression { + + protected First(Object value) { + super(value); + } + + /** + * @return new instance of {@link First}. + */ + public static First first() { + return new First(Collections.emptyMap()); + } + + /** + * @return new instance of {@link First}. + */ + public static First first(int numberOfResults) { + return first().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link First}. + */ + public First limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private First limit(Object value) { + return new First(append("n", value)); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First of(String fieldName) { + return input(fieldName); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First of(AggregationExpression expression) { + return input(expression); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First input(String fieldName) { + return new First(append("input", Fields.field(fieldName))); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First input(AggregationExpression expression) { + return new First(append("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$firstN"; + } + } + + /** + * {@link AbstractAggregationExpression} to return the {@literal $lastN} elements. + */ + public static class Last extends AbstractAggregationExpression { + + protected Last(Object value) { + super(value); + } + + /** + * @return new instance of {@link Last}. + */ + public static Last last() { + return new Last(Collections.emptyMap()); + } + + /** + * @return new instance of {@link Last}. + */ + public static Last last(int numberOfResults) { + return last().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Last}. + */ + public Last limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Last limit(Object value) { + return new Last(append("n", value)); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last of(String fieldName) { + return input(fieldName); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last of(AggregationExpression expression) { + return input(expression); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last input(String fieldName) { + return new Last(append("input", Fields.field(fieldName))); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last input(AggregationExpression expression) { + return new Last(append("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$lastN"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java new file mode 100644 index 0000000000..7f5c1c7722 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java @@ -0,0 +1,194 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.SetOperation.FieldAppender.ValueAppender; +import org.springframework.lang.Nullable; + +/** + * Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + *
                    + * SetOperation.set("totalHomework").toValue("A+").and().set("totalQuiz").toValue("B-")
                    + * 
                    + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation Framework: + * $set + */ +public class SetOperation extends DocumentEnhancingOperation { + + /** + * Create new instance of {@link SetOperation} adding map keys as exposed fields. + * + * @param source must not be {@literal null}. + */ + private SetOperation(Map source) { + super(source); + } + + /** + * Create new instance of {@link SetOperation} + * + * @param field must not be {@literal null}. + * @param value can be {@literal null}. + */ + public SetOperation(Object field, @Nullable Object value) { + this(Collections.singletonMap(field, value)); + } + + /** + * Define the {@link SetOperation} via {@link FieldAppender}. + * + * @return new instance of {@link FieldAppender}. + */ + public static FieldAppender builder() { + return new FieldAppender(); + } + + /** + * Concatenate another field to set. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public static ValueAppender set(String field) { + return new FieldAppender().set(field); + } + + /** + * Append the value for a specific field to the operation. + * + * @param field the target field to set. + * @param value the value to assign. + * @return new instance of {@link SetOperation}. + */ + public SetOperation set(Object field, Object value) { + + LinkedHashMap target = new LinkedHashMap<>(getValueMap()); + target.put(field, value); + + return new SetOperation(target); + } + + /** + * Concatenate additional fields to set. + * + * @return new instance of {@link FieldAppender}. + */ + public FieldAppender and() { + return new FieldAppender(getValueMap()); + } + + @Override + protected String mongoOperator() { + return "$set"; + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public static class FieldAppender { + + private final Map valueMap; + + private FieldAppender() { + this.valueMap = new LinkedHashMap<>(); + } + + private FieldAppender(Map source) { + this.valueMap = new LinkedHashMap<>(source); + } + + /** + * Define the field to set. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public ValueAppender set(String field) { + + return new ValueAppender() { + + @Override + public SetOperation toValue(Object value) { + + valueMap.put(field, value); + return FieldAppender.this.build(); + } + + @Override + public SetOperation toValueOf(Object value) { + + valueMap.put(field, value instanceof String stringValue ? Fields.fields(stringValue) : value); + return FieldAppender.this.build(); + } + + @Override + public SetOperation withValueOfExpression(String operation, Object... values) { + + valueMap.put(field, new ExpressionProjection(operation, values)); + return FieldAppender.this.build(); + } + }; + } + + private SetOperation build() { + return new SetOperation(valueMap); + } + + /** + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ + public interface ValueAppender { + + /** + * Define the value to assign as is. + * + * @param value can be {@literal null}. + * @return new instance of {@link SetOperation}. + */ + SetOperation toValue(@Nullable Object value); + + /** + * Define the value to assign. Plain {@link String} values are treated as {@link Field field references}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link SetOperation}. + */ + SetOperation toValueOf(Object value); + + /** + * Adds a generic projection for the current field. + * + * @param operation the operation key, e.g. {@code $add}. + * @param values the values to be set for the projection operation. + * @return new instance of {@link SetOperation}. + */ + SetOperation withValueOfExpression(String operation, Object... values); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java new file mode 100644 index 0000000000..094ef7365b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java @@ -0,0 +1,666 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal Set expressions} which perform {@literal set} operation on arrays, treating arrays as sets. + * + * @author Christoph Strobl + * @since 1.10 + */ +public class SetOperators { + + /** + * Take the array referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SetOperatorFactory}. + */ + public static SetOperatorFactory arrayAsSet(String fieldReference) { + return new SetOperatorFactory(fieldReference); + } + + /** + * Take the array resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetOperatorFactory}. + */ + public static SetOperatorFactory arrayAsSet(AggregationExpression expression) { + return new SetOperatorFactory(expression); + } + + /** + * @author Christoph Strobl + */ + public static class SetOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link SetOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public SetOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link SetOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public SetOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that compares the previously mentioned field to one or more arrays and + * returns {@literal true} if they have the same distinct elements and {@literal false} otherwise. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public SetEquals isEqualTo(String... arrayReferences) { + return createSetEquals().isEqualTo(arrayReferences); + } + + /** + * Creates new {@link AggregationExpression} that compares the previously mentioned field to one or more arrays and + * returns {@literal true} if they have the same distinct elements and {@literal false} otherwise. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public SetEquals isEqualTo(AggregationExpression... expressions) { + return createSetEquals().isEqualTo(expressions); + } + + private SetEquals createSetEquals() { + return usesFieldRef() ? SetEquals.arrayAsSet(fieldReference) : SetEquals.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and one or more + * arrays and returns an array that contains the elements that appear in every of those. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public SetIntersection intersects(String... arrayReferences) { + return createSetIntersection().intersects(arrayReferences); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and one or more + * arrays and returns an array that contains the elements that appear in every of those. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public SetIntersection intersects(AggregationExpression... expressions) { + return createSetIntersection().intersects(expressions); + } + + private SetIntersection createSetIntersection() { + return usesFieldRef() ? SetIntersection.arrayAsSet(fieldReference) : SetIntersection.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and one or more + * arrays and returns an array that contains the elements that appear in any of those. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public SetUnion union(String... arrayReferences) { + return createSetUnion().union(arrayReferences); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and one or more + * arrays and returns an array that contains the elements that appear in any of those. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public SetUnion union(AggregationExpression... expressions) { + return createSetUnion().union(expressions); + } + + private SetUnion createSetUnion() { + return usesFieldRef() ? SetUnion.arrayAsSet(fieldReference) : SetUnion.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns an array + * containing the elements that do not exist in the given {@literal arrayReference}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public SetDifference differenceTo(String arrayReference) { + return createSetDifference().differenceTo(arrayReference); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns an array + * containing the elements that do not exist in the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public SetDifference differenceTo(AggregationExpression expression) { + return createSetDifference().differenceTo(expression); + } + + private SetDifference createSetDifference() { + return usesFieldRef() ? SetDifference.arrayAsSet(fieldReference) : SetDifference.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns + * {@literal true} if it is a subset of the given {@literal arrayReference}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public SetIsSubset isSubsetOf(String arrayReference) { + return createSetIsSubset().isSubsetOf(arrayReference); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns + * {@literal true} if it is a subset of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public SetIsSubset isSubsetOf(AggregationExpression expression) { + return createSetIsSubset().isSubsetOf(expression); + } + + private SetIsSubset createSetIsSubset() { + return usesFieldRef() ? SetIsSubset.arrayAsSet(fieldReference) : SetIsSubset.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns + * {@literal true} if any of the elements are {@literal true} and {@literal false} otherwise. + * + * @return new instance of {@link AnyElementTrue}. + */ + public AnyElementTrue anyElementTrue() { + return usesFieldRef() ? AnyElementTrue.arrayAsSet(fieldReference) : AnyElementTrue.arrayAsSet(expression); + } + + /** + * Creates new {@link AggregationExpression} that tkes array of the previously mentioned field and returns + * {@literal true} if no elements is {@literal false}. + * + * @return new instance of {@link AllElementsTrue}. + */ + public AllElementsTrue allElementsTrue() { + return usesFieldRef() ? AllElementsTrue.arrayAsSet(fieldReference) : AllElementsTrue.arrayAsSet(expression); + } + + private boolean usesFieldRef() { + return this.fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $setEquals}. + * + * @author Christoph Strobl + */ + public static class SetEquals extends AbstractAggregationExpression { + + private SetEquals(List arrays) { + super(arrays); + } + + @Override + protected String getMongoMethod() { + return "$setEquals"; + } + + /** + * Create new {@link SetEquals}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public static SetEquals arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetEquals(asFields(arrayReference)); + } + + /** + * Create new {@link SetEquals}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public static SetEquals arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetEquals(Collections.singletonList(expression)); + } + + /** + * Creates new {@link java.util.Set} with all previously added arguments appending the given one. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public SetEquals isEqualTo(String... arrayReferences) { + + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); + return new SetEquals(append(Fields.fields(arrayReferences).asList())); + } + + /** + * Creates new {@link Sum} with all previously added arguments appending the given one. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public SetEquals isEqualTo(AggregationExpression... expressions) { + + Assert.notNull(expressions, "Expressions must not be null"); + return new SetEquals(append(Arrays.asList(expressions))); + } + + /** + * Creates new {@link Sum} with all previously added arguments appending the given one. + * + * @param array must not be {@literal null}. + * @return new instance of {@link SetEquals}. + */ + public SetEquals isEqualTo(Object[] array) { + + Assert.notNull(array, "Array must not be null"); + return new SetEquals(append(array)); + } + } + + /** + * {@link AggregationExpression} for {@code $setIntersection}. + * + * @author Christoph Strobl + */ + public static class SetIntersection extends AbstractAggregationExpression { + + private SetIntersection(List arrays) { + super(arrays); + } + + @Override + protected String getMongoMethod() { + return "$setIntersection"; + } + + /** + * Creates new {@link SetIntersection} + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public static SetIntersection arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetIntersection(asFields(arrayReference)); + } + + /** + * Creates new {@link SetIntersection}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public static SetIntersection arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetIntersection(Collections.singletonList(expression)); + } + + /** + * Creates new {@link SetIntersection} with all previously added arguments appending the given one. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public SetIntersection intersects(String... arrayReferences) { + + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); + return new SetIntersection(append(asFields(arrayReferences))); + } + + /** + * Creates new {@link SetIntersection} with all previously added arguments appending the given one. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetIntersection}. + */ + public SetIntersection intersects(AggregationExpression... expressions) { + + Assert.notNull(expressions, "Expressions must not be null"); + return new SetIntersection(append(Arrays.asList(expressions))); + } + } + + /** + * {@link AggregationExpression} for {@code $setUnion}. + * + * @author Christoph Strobl + */ + public static class SetUnion extends AbstractAggregationExpression { + + private SetUnion(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$setUnion"; + } + + /** + * Creates new {@link SetUnion}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public static SetUnion arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetUnion(asFields(arrayReference)); + } + + /** + * Creates new {@link SetUnion}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public static SetUnion arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetUnion(Collections.singletonList(expression)); + } + + /** + * Creates new {@link SetUnion} with all previously added arguments appending the given one. + * + * @param arrayReferences must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public SetUnion union(String... arrayReferences) { + + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); + return new SetUnion(append(asFields(arrayReferences))); + } + + /** + * Creates new {@link SetUnion} with all previously added arguments appending the given one. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link SetUnion}. + */ + public SetUnion union(AggregationExpression... expressions) { + + Assert.notNull(expressions, "Expressions must not be null"); + return new SetUnion(append(Arrays.asList(expressions))); + } + } + + /** + * {@link AggregationExpression} for {@code $setDifference}. + * + * @author Christoph Strobl + */ + public static class SetDifference extends AbstractAggregationExpression { + + private SetDifference(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$setDifference"; + } + + /** + * Creates new {@link SetDifference}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public static SetDifference arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetDifference(asFields(arrayReference)); + } + + /** + * Creates new {@link SetDifference}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public static SetDifference arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetDifference(Collections.singletonList(expression)); + } + + /** + * Creates new {@link SetDifference} with all previously added arguments appending the given one. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public SetDifference differenceTo(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetDifference(append(Fields.field(arrayReference))); + } + + /** + * Creates new {@link SetDifference} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetDifference}. + */ + public SetDifference differenceTo(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetDifference(append(expression)); + } + } + + /** + * {@link AggregationExpression} for {@code $setIsSubset}. + * + * @author Christoph Strobl + */ + public static class SetIsSubset extends AbstractAggregationExpression { + + private SetIsSubset(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$setIsSubset"; + } + + /** + * Creates new {@link SetIsSubset}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public static SetIsSubset arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetIsSubset(asFields(arrayReference)); + } + + /** + * Creates new {@link SetIsSubset}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public static SetIsSubset arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetIsSubset(Collections.singletonList(expression)); + } + + /** + * Creates new {@link SetIsSubset} with all previously added arguments appending the given one. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public SetIsSubset isSubsetOf(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new SetIsSubset(append(Fields.field(arrayReference))); + } + + /** + * Creates new {@link SetIsSubset} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetIsSubset}. + */ + public SetIsSubset isSubsetOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SetIsSubset(append(expression)); + } + } + + /** + * {@link AggregationExpression} for {@code $anyElementTrue}. + * + * @author Christoph Strobl + */ + public static class AnyElementTrue extends AbstractAggregationExpression { + + private AnyElementTrue(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$anyElementTrue"; + } + + /** + * Creates new {@link AnyElementTrue}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link AnyElementTrue}. + */ + public static AnyElementTrue arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new AnyElementTrue(asFields(arrayReference)); + } + + /** + * Creates new {@link AnyElementTrue}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link AnyElementTrue}. + */ + public static AnyElementTrue arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new AnyElementTrue(Collections.singletonList(expression)); + } + + public AnyElementTrue anyElementTrue() { + return this; + } + } + + /** + * {@link AggregationExpression} for {@code $allElementsTrue}. + * + * @author Christoph Strobl + */ + public static class AllElementsTrue extends AbstractAggregationExpression { + + private AllElementsTrue(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$allElementsTrue"; + } + + /** + * Creates new {@link AllElementsTrue}. + * + * @param arrayReference must not be {@literal null}. + * @return new instance of {@link AllElementsTrue}. + */ + public static AllElementsTrue arrayAsSet(String arrayReference) { + + Assert.notNull(arrayReference, "ArrayReference must not be null"); + return new AllElementsTrue(asFields(arrayReference)); + } + + /** + * Creates new {@link AllElementsTrue}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link AllElementsTrue}. + */ + public static AllElementsTrue arrayAsSet(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new AllElementsTrue(Collections.singletonList(expression)); + } + + public AllElementsTrue allElementsTrue() { + return this; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java new file mode 100644 index 0000000000..2b8df539e1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -0,0 +1,857 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.data.domain.Sort; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Encapsulates the {@code setWindowFields}-operation. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/ + */ +public class SetWindowFieldsOperation + implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { + + private static final String CURRENT = "current"; + private static final String UNBOUNDED = "unbounded"; + + private final @Nullable Object partitionBy; + private final @Nullable AggregationOperation sortBy; + private final WindowOutput output; + + /** + * Create a new {@link SetWindowFieldsOperation} with given args. + * + * @param partitionBy The field or {@link AggregationExpression} to group by. + * @param sortBy the {@link SortOperation operation} to sort the documents by in the partition. + * @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective + * values. + */ + protected SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy, + WindowOutput output) { + + this.partitionBy = partitionBy; + this.sortBy = sortBy; + this.output = output; + } + + /** + * Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}. + * + * @return new instance of {@link SetWindowFieldsOperationBuilder}. + */ + public static SetWindowFieldsOperationBuilder builder() { + return new SetWindowFieldsOperationBuilder(); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.synthetic(Fields.from(output.fields.toArray(new Field[0]))); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $setWindowFields = new Document(); + if (partitionBy != null) { + if (partitionBy instanceof AggregationExpression aggregationExpression) { + $setWindowFields.append("partitionBy", aggregationExpression.toDocument(context)); + } else if (partitionBy instanceof Field field) { + $setWindowFields.append("partitionBy", context.getReference(field).toString()); + } else { + $setWindowFields.append("partitionBy", partitionBy); + } + } + + if (sortBy != null) { + $setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator())); + } + + Document output = new Document(); + for (ComputedField field : this.output.fields) { + + Document fieldOperation = field.getWindowOperator().toDocument(context); + if (field.window != null) { + fieldOperation.put("window", field.window.toDocument(context)); + } + output.append(field.getName(), fieldOperation); + } + $setWindowFields.append("output", output); + + return new Document(getOperator(), $setWindowFields); + } + + @Override + public String getOperator() { + return "$setWindowFields"; + } + + /** + * {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField + * field(s)} to append to the documents in the output. + */ + public static class WindowOutput { + + private final List fields; + + /** + * Create a new output containing the single given {@link ComputedField field}. + * + * @param outputField must not be {@literal null}. + */ + public WindowOutput(ComputedField outputField) { + + Assert.notNull(outputField, "OutputField must not be null"); + + this.fields = new ArrayList<>(); + this.fields.add(outputField); + } + + /** + * Append the given {@link ComputedField field} to the outptut. + * + * @param field must not be {@literal null}. + * @return this. + */ + public WindowOutput append(ComputedField field) { + + Assert.notNull(field, "Field must not be null"); + + fields.add(field); + return this; + } + + /** + * Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ComputedFieldAppender}. + * @see #append(ComputedField) + */ + public ComputedFieldAppender append(AggregationExpression expression) { + + return new ComputedFieldAppender() { + + @Nullable private Window window; + + @Override + public WindowOutput as(String fieldname) { + + return WindowOutput.this.append(new ComputedField(fieldname, expression, window)); + } + + @Override + public ComputedFieldAppender within(Window window) { + this.window = window; + return this; + } + }; + } + + /** + * Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}. + */ + public interface ComputedFieldAppender { + + /** + * Specify the target field name. + * + * @param fieldname the name of field to add to the target document. + * @return the {@link WindowOutput} that started the append operation. + */ + WindowOutput as(String fieldname); + + /** + * Specify the window boundaries. + * + * @param window must not be {@literal null}. + * @return this. + */ + ComputedFieldAppender within(Window window); + } + } + + /** + * A {@link Field} that the result of a computation done via an {@link AggregationExpression}. + * + * @author Christoph Strobl + */ + public static class ComputedField implements Field { + + private final String name; + private final AggregationExpression windowOperator; + private final @Nullable Window window; + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + */ + public ComputedField(String name, AggregationExpression windowOperator) { + this(name, windowOperator, null); + } + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + * @param window the boundaries to operate within. Can be {@literal null}. + */ + public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) { + + this.name = name; + this.windowOperator = windowOperator; + this.window = window; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getTarget() { + return getName(); + } + + @Override + public boolean isAliased() { + return false; + } + + public AggregationExpression getWindowOperator() { + return windowOperator; + } + + @Nullable + public Window getWindow() { + return window; + } + } + + /** + * Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}. + * + * @author Christoph Strobl + */ + public interface Windows { + + /** + * Create a document window relative to the position of the current document. + * + * @param lower an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @param upper an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link DocumentWindow}. + */ + static DocumentWindow documents(Object lower, Object upper) { + return new DocumentWindow(lower, upper); + } + + /** + * Create a range window defined based on sort expression. + * + * @param lower a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @param upper a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link RangeWindow}. + */ + static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { + return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit); + } + + /** + * Create a range window based on the {@link Sort sort value} of the current document via a fluent API. + * + * @return new instance of {@link RangeWindowBuilder}. + */ + static RangeWindowBuilder range() { + return new RangeWindowBuilder(); + } + + /** + * Create a document window relative to the position of the current document via a fluent API. + * + * @return new instance of {@link DocumentWindowBuilder}. + */ + static DocumentWindowBuilder documents() { + return new DocumentWindowBuilder(); + } + } + + /** + * A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}. + */ + public interface Window { + + /** + * The lower (inclusive) boundary. + * + * @return + */ + Object getLower(); + + /** + * The upper (inclusive) boundary. + * + * @return + */ + Object getUpper(); + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class RangeWindowBuilder { + + private @Nullable Object lower; + private @Nullable Object upper; + private @Nullable WindowUnit unit; + + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on the sortBy field. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. + * + * @param lower + * @return this. + */ + public RangeWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. + * + * @param upper + * @return this. + */ + public RangeWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + /** + * Use {@literal current} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromCurrent() { + return from(CURRENT); + } + + /** + * Use {@literal unbounded} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromUnbounded() { + return from(UNBOUNDED); + } + + /** + * Use {@literal current} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toCurrent() { + return to(CURRENT); + } + + /** + * Use {@literal unbounded} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toUnbounded() { + return to(UNBOUNDED); + } + + /** + * Set the {@link WindowUnit unit} or measure for the given {@link Window}. + * + * @param windowUnit must not be {@literal null}. Can be on of {@link Windows}. + * @return this. + */ + public RangeWindowBuilder unit(WindowUnit windowUnit) { + + Assert.notNull(windowUnit, "WindowUnit must not be null"); + this.unit = windowUnit; + return this; + } + + /** + * Build the {@link RangeWindow}. + * + * @return new instance of {@link RangeWindow}. + */ + public RangeWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + Assert.notNull(unit, "WindowUnit bound must not be null"); + + return new RangeWindow(lower, upper, unit); + } + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class DocumentWindowBuilder { + + private @Nullable Object lower; + private @Nullable Object upper; + + /** + * The lower (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param lower + * @return this. + */ + public DocumentWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + public DocumentWindowBuilder fromCurrent() { + return from(CURRENT); + } + + public DocumentWindowBuilder fromUnbounded() { + return from(UNBOUNDED); + } + + public DocumentWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on current document. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param upper + * @return this. + */ + public DocumentWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + public DocumentWindowBuilder toCurrent() { + return to(CURRENT); + } + + public DocumentWindowBuilder toUnbounded() { + return to(UNBOUNDED); + } + + public DocumentWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + + return new DocumentWindow(lower, upper); + } + } + + /** + * Common base class for {@link Window} implementation. + * + * @author Christoph Strobl + */ + static abstract class WindowImpl implements Window { + + private final Object lower; + private final Object upper; + + protected WindowImpl(Object lower, Object upper) { + this.lower = lower; + this.upper = upper; + } + + @Override + public Object getLower() { + return lower; + } + + @Override + public Object getUpper() { + return upper; + } + } + + /** + * {@link Window} implementation based on the current document. + * + * @author Christoph Strobl + */ + public static class DocumentWindow extends WindowImpl { + + DocumentWindow(Object lower, Object upper) { + super(lower, upper); + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + return new Document("documents", Arrays.asList(getLower(), getUpper())); + } + } + + /** + * {@link Window} implementation based on the sort fields. + * + * @author Christoph Strobl + */ + public static class RangeWindow extends WindowImpl { + + private final WindowUnit unit; + + protected RangeWindow(Object lower, Object upper, WindowUnit unit) { + + super(lower, upper); + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("range", new Object[] { getLower(), getUpper() }); + if (unit != null && !WindowUnits.DEFAULT.equals(unit)) { + range.append("unit", unit.name().toLowerCase()); + } + return range; + } + } + + /** + * The actual time unit to apply to a {@link Window}. + */ + public interface WindowUnit { + + String name(); + + /** + * Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + return switch (timeUnit) { + case DAYS -> WindowUnits.DAY; + case HOURS -> WindowUnits.HOUR; + case MINUTES -> WindowUnits.MINUTE; + case SECONDS -> WindowUnits.SECOND; + case MILLISECONDS -> WindowUnits.MILLISECOND; + default -> throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit)); + }; + + } + + /** + * Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours, + * minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(ChronoUnit chronoUnit) { + + return switch (chronoUnit) { + case YEARS -> WindowUnits.YEAR; + case WEEKS -> WindowUnits.WEEK; + case MONTHS -> WindowUnits.MONTH; + case DAYS -> WindowUnits.DAY; + case HOURS -> WindowUnits.HOUR; + case MINUTES -> WindowUnits.MINUTE; + case SECONDS -> WindowUnits.SECOND; + case MILLIS -> WindowUnits.MILLISECOND; + default -> throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit)); + }; + + } + } + + /** + * Quick access to available {@link WindowUnit units}. + */ + public enum WindowUnits implements WindowUnit { + DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + /** + * A fluent builder to create a {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ + public static class SetWindowFieldsOperationBuilder { + + private Object partitionBy; + private SortOperation sortOperation; + private WindowOutput output; + + /** + * Specify the field to group by. + * + * @param fieldName must not be {@literal null} or null. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty or null"); + return partitionBy(Fields.field("$" + fieldName, fieldName)); + } + + /** + * Specify the {@link AggregationExpression expression} to group by. + * + * @param expression must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) { + return partitionBy(expression); + } + + /** + * Sort {@link Sort.Direction#ASC ascending} by the given fields. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(String... fields) { + return sortBy(Sort.by(fields)); + } + + /** + * Set the sort order. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(Sort sort) { + return sortBy(new SortOperation(sort)); + } + + /** + * Set the {@link SortOperation} to use. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + + Assert.notNull(sort, "SortOperation must not be null"); + + this.sortOperation = sort; + return this; + } + + /** + * Define the actual output computation. + * + * @param output must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder output(WindowOutput output) { + + Assert.notNull(output, "WindowOutput must not be null"); + + this.output = output; + return this; + } + + /** + * Add a field capturing the result of the given {@link AggregationExpression expression} to the output. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link WindowChoice}. + */ + public WindowChoice output(AggregationExpression expression) { + + return new WindowChoice() { + + @Nullable private Window window; + + @Override + public As within(Window window) { + + Assert.notNull(window, "Window must not be null"); + + this.window = window; + return this; + } + + @Override + public SetWindowFieldsOperationBuilder as(String targetFieldName) { + + Assert.hasText(targetFieldName, "Target field name must not be empty or null"); + + ComputedField computedField = new ComputedField(targetFieldName, expression, window); + + if (SetWindowFieldsOperationBuilder.this.output == null) { + SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField); + } else { + SetWindowFieldsOperationBuilder.this.output.append(computedField); + } + + return SetWindowFieldsOperationBuilder.this; + } + }; + } + + /** + * Interface to capture field name used to capture the computation result. + */ + public interface As { + + /** + * Define the target name field name to hold the computation result. + * + * @param targetFieldName must not be {@literal null} or empty. + * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. + */ + SetWindowFieldsOperationBuilder as(String targetFieldName); + } + + /** + * Interface to capture an optional {@link Window} applicable to the field computation. + */ + public interface WindowChoice extends As { + + /** + * Specify calculation boundaries. + * + * @param window must not be {@literal null}. + * @return never {@literal null}. + */ + As within(Window window); + + } + + /** + * Partition by a value that translates to a valid mongodb expression. + * + * @param value must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionBy(Object value) { + + Assert.notNull(value, "Partition By must not be null"); + + partitionBy = value; + return this; + } + + /** + * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. + * + * @return new instance of {@link SetWindowFieldsOperation}. + */ + public SetWindowFieldsOperation build() { + return new SetWindowFieldsOperation(partitionBy, sortOperation, output); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java index 67d598134e..4d5de23087 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,21 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the aggregation framework {@code $skip}-operation. *

                    - * We recommend to use the static factory method {@link Aggregation#skip(int)} instead of creating instances of this + * We recommend to use the static factory method {@link Aggregation#skip(long)} instead of creating instances of this * class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/skip/ + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl * @since 1.3 + * @see MongoDB Aggregation Framework: + * $skip */ public class SkipOperation implements AggregationOperation { @@ -37,21 +37,22 @@ public class SkipOperation implements AggregationOperation { /** * Creates a new {@link SkipOperation} skipping the given number of elements. - * - * @param skipCount number of documents to skip. + * + * @param skipCount number of documents to skip, must not be less than zero. */ public SkipOperation(long skipCount) { - Assert.isTrue(skipCount >= 0, "Skip count must not be negative!"); + Assert.isTrue(skipCount >= 0, "Skip count must not be negative"); this.skipCount = skipCount; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject("$skip", skipCount); + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), skipCount); + } + + @Override + public String getOperator() { + return "$skip"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java new file mode 100644 index 0000000000..ffc0aa0654 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java @@ -0,0 +1,80 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $sortByCount}-operation. + *
                    + * {@code $sortByCount} stage is typically used with {@link Aggregation} and {@code $facet}. Groups incoming documents + * based on the value of a specified expression and computes the count of documents in each distinct group. + * {@link SortByCountOperation} is equivalent to {@code { $group: { _id: , count: { $sum: 1 } } }, { $sort: + * { count: -1 } }}. + *
                    + * We recommend to use the static factory method {@link Aggregation#sortByCount(String)} instead of creating instances + * of this class directly. + * + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/ + * @author Jérôme Guyon + * @author Mark Paluch + * @since 2.1 + */ +public class SortByCountOperation implements AggregationOperation { + + private final @Nullable Field groupByField; + private final @Nullable AggregationExpression groupByExpression; + + /** + * Creates a new {@link SortByCountOperation} given a {@link Field group-by field}. + * + * @param groupByField must not be {@literal null}. + */ + public SortByCountOperation(Field groupByField) { + + Assert.notNull(groupByField, "Group by field must not be null"); + + this.groupByField = groupByField; + this.groupByExpression = null; + } + + /** + * Creates a new {@link SortByCountOperation} given a {@link AggregationExpression group-by expression}. + * + * @param groupByExpression must not be {@literal null}. + */ + public SortByCountOperation(AggregationExpression groupByExpression) { + + Assert.notNull(groupByExpression, "Group by expression must not be null"); + + this.groupByExpression = groupByExpression; + this.groupByField = null; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + return new Document(getOperator(), groupByExpression == null ? context.getReference(groupByField).toString() + : groupByExpression.toDocument(context)); + } + + @Override + public String getOperator() { + return "$sortByCount"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java index 0b6f6dee2e..b8c6096f1e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,26 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the aggregation framework {@code $sort}-operation. *

                    * We recommend to use the static factory method {@link Aggregation#sort(Direction, String...)} instead of creating * instances of this class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/sort/#pipe._S_sort + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 + * @see MongoDB Aggregation Framework: + * $sort */ public class SortOperation implements AggregationOperation { @@ -41,31 +42,27 @@ public class SortOperation implements AggregationOperation { /** * Creates a new {@link SortOperation} for the given {@link Sort} instance. - * + * * @param sort must not be {@literal null}. */ public SortOperation(Sort sort) { - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(sort, "Sort must not be null"); this.sort = sort; } public SortOperation and(Direction direction, String... fields) { - return and(new Sort(direction, fields)); + return and(Sort.by(direction, fields)); } public SortOperation and(Sort sort) { return new SortOperation(this.sort.and(sort)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public DBObject toDBObject(AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { - BasicDBObject object = new BasicDBObject(); + Document object = new Document(); for (Order order : sort) { @@ -74,6 +71,11 @@ public DBObject toDBObject(AggregationOperationContext context) { object.put(reference.getRaw(), order.isAscending() ? 1 : -1); } - return new BasicDBObject("$sort", object); + return new Document(getOperator(), object); + } + + @Override + public String getOperator() { + return "$sort"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index 58dc08b364..3119e2729c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,39 +15,44 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.springframework.data.mongodb.util.DBObjectUtils.*; - import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.List; +import org.bson.Document; import org.springframework.core.GenericTypeResolver; import org.springframework.data.mongodb.core.spel.ExpressionNode; import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport; import org.springframework.data.mongodb.core.spel.LiteralNode; import org.springframework.data.mongodb.core.spel.MethodReferenceNode; +import org.springframework.data.mongodb.core.spel.MethodReferenceNode.AggregationMethodReference; +import org.springframework.data.mongodb.core.spel.MethodReferenceNode.AggregationMethodReference.ArgumentType; +import org.springframework.data.mongodb.core.spel.NotOperatorNode; import org.springframework.data.mongodb.core.spel.OperatorNode; import org.springframework.expression.spel.ExpressionState; import org.springframework.expression.spel.SpelNode; import org.springframework.expression.spel.SpelParserConfiguration; import org.springframework.expression.spel.ast.CompoundExpression; +import org.springframework.expression.spel.ast.ConstructorReference; import org.springframework.expression.spel.ast.Indexer; import org.springframework.expression.spel.ast.InlineList; +import org.springframework.expression.spel.ast.InlineMap; +import org.springframework.expression.spel.ast.OperatorNot; import org.springframework.expression.spel.ast.PropertyOrFieldReference; import org.springframework.expression.spel.standard.SpelExpression; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.NumberUtils; - -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.util.ObjectUtils; /** * Renders the AST of a SpEL expression as a MongoDB Aggregation Framework projection expression. - * + * * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch */ class SpelExpressionTransformer implements AggregationExpressionTransformer { @@ -59,18 +64,12 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer { /** * Creates a new {@link SpelExpressionTransformer}. */ - public SpelExpressionTransformer() { - - List> conversions = new ArrayList>(); - conversions.add(new OperatorNodeConversion(this)); - conversions.add(new LiteralNodeConversion(this)); - conversions.add(new IndexerNodeConversion(this)); - conversions.add(new InlineListNodeConversion(this)); - conversions.add(new PropertyOrFieldReferenceNodeConversion(this)); - conversions.add(new CompoundExpressionNodeConversion(this)); - conversions.add(new MethodReferenceNodeConversion(this)); - - this.conversions = Collections.unmodifiableList(conversions); + SpelExpressionTransformer() { + this.conversions = List.of(new OperatorNodeConversion(this), new LiteralNodeConversion(this), + new IndexerNodeConversion(this), new InlineListNodeConversion(this), + new PropertyOrFieldReferenceNodeConversion(this), new CompoundExpressionNodeConversion(this), + new MethodReferenceNodeConversion(this), new NotOperatorNodeConversion(this), + new ValueRetrievingNodeConversion(this)); } /** @@ -78,7 +77,7 @@ public SpelExpressionTransformer() { * {@link AggregationOperationContext} {@code context}. *

                    * Exposes the given @{code params} as [0] ... [n]. - * + * * @param expression must not be {@literal null} * @param context must not be {@literal null} * @param params must not be {@literal null} @@ -86,21 +85,17 @@ public SpelExpressionTransformer() { */ public Object transform(String expression, AggregationOperationContext context, Object... params) { - Assert.notNull(expression, "Expression must not be null!"); - Assert.notNull(context, "AggregationOperationContext must not be null!"); - Assert.notNull(params, "Parameters must not be null!"); + Assert.notNull(expression, "Expression must not be null"); + Assert.notNull(context, "AggregationOperationContext must not be null"); + Assert.notNull(params, "Parameters must not be null"); SpelExpression spelExpression = (SpelExpression) PARSER.parseExpression(expression); ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG); ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state); - return transform(new AggregationExpressionTransformationContext(node, null, null, context)); + return transform(new AggregationExpressionTransformationContext<>(node, null, null, context)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionTransformer#transform(org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport) - */ public Object transform(AggregationExpressionTransformationContext context) { return lookupConversionFor(context.getCurrentNode()).convert(context); } @@ -108,7 +103,7 @@ public Object transform(AggregationExpressionTransformationContext lookupConversionFor(ExpressionN } throw new IllegalArgumentException("Unsupported Element: " + node + " Type: " + node.getClass() - + " You probably have a syntax error in your SpEL expression!"); + + " You probably have a syntax error in your SpEL expression"); } /** * Abstract base class for {@link SpelNode} to (Db)-object conversions. - * + * * @author Thomas Darimont * @author Oliver Gierke */ - private static abstract class ExpressionNodeConversion implements - AggregationExpressionTransformer { + private static abstract class ExpressionNodeConversion + implements AggregationExpressionTransformer { private final AggregationExpressionTransformer transformer; private final Class nodeType; /** * Creates a new {@link ExpressionNodeConversion}. - * + * * @param transformer must not be {@literal null}. */ @SuppressWarnings("unchecked") public ExpressionNodeConversion(AggregationExpressionTransformer transformer) { - Assert.notNull(transformer, "Transformer must not be null!"); + Assert.notNull(transformer, "Transformer must not be null"); this.nodeType = (Class) GenericTypeResolver.resolveTypeArgument(this.getClass(), ExpressionNodeConversion.class); @@ -155,7 +150,7 @@ public ExpressionNodeConversion(AggregationExpressionTransformer transformer) { /** * Returns whether the current conversion supports the given {@link ExpressionNode}. By default we will match the * node type against the genric type the subclass types the type parameter to. - * + * * @param node will never be {@literal null}. * @return true if {@literal this} conversion can be applied to the given {@code node}. */ @@ -165,15 +160,15 @@ protected boolean supports(ExpressionNode node) { /** * Triggers the transformation for the given {@link ExpressionNode} and the given current context. - * + * * @param node must not be {@literal null}. * @param context must not be {@literal null}. * @return */ protected Object transform(ExpressionNode node, AggregationExpressionTransformationContext context) { - Assert.notNull(node, "ExpressionNode must not be null!"); - Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!"); + Assert.notNull(node, "ExpressionNode must not be null"); + Assert.notNull(context, "AggregationExpressionTransformationContext must not be null"); return transform(node, context.getParentNode(), null, context); } @@ -181,27 +176,23 @@ protected Object transform(ExpressionNode node, AggregationExpressionTransformat /** * Triggers the transformation with the given new {@link ExpressionNode}, new parent node, the current operation and * the previous context. - * + * * @param node must not be {@literal null}. - * @param parent - * @param operation + * @param parent may be {@literal null}. + * @param operation may be {@literal null}. * @param context must not be {@literal null}. * @return */ - protected Object transform(ExpressionNode node, ExpressionNode parent, DBObject operation, + protected Object transform(ExpressionNode node, @Nullable ExpressionNode parent, @Nullable Document operation, AggregationExpressionTransformationContext context) { - Assert.notNull(node, "ExpressionNode must not be null!"); - Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!"); + Assert.notNull(node, "ExpressionNode must not be null"); + Assert.notNull(context, "AggregationExpressionTransformationContext must not be null"); return transform(new AggregationExpressionTransformationContext(node, parent, operation, context.getAggregationContext())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#transform(org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext) - */ @Override public Object transform(AggregationExpressionTransformationContext context) { return transformer.transform(context); @@ -209,7 +200,7 @@ public Object transform(AggregationExpressionTransformationContext { @@ -227,54 +218,62 @@ public OperatorNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { OperatorNode currentNode = context.getCurrentNode(); + Document operationObject = createOperationObjectAndAddToPreviousArgumentsIfNecessary(context, currentNode); + + if (currentNode.isLogicalOperator()) { + + for (ExpressionNode expressionNode : currentNode) { + transform(expressionNode, currentNode, operationObject, context); + } + + return operationObject; + } - DBObject operationObject = createOperationObjectAndAddToPreviousArgumentsIfNecessary(context, currentNode); Object leftResult = transform(currentNode.getLeft(), currentNode, operationObject, context); if (currentNode.isUnaryMinus()) { return convertUnaryMinusOp(context, leftResult); } - // we deliberately ignore the RHS result - transform(currentNode.getRight(), currentNode, operationObject, context); + if (!currentNode.isUnaryOperator()) { + // we deliberately ignore the RHS result + transform(currentNode.getRight(), currentNode, operationObject, context); + } return operationObject; } - private DBObject createOperationObjectAndAddToPreviousArgumentsIfNecessary( + private Document createOperationObjectAndAddToPreviousArgumentsIfNecessary( AggregationExpressionTransformationContext context, OperatorNode currentNode) { - DBObject nextDbObject = new BasicDBObject(currentNode.getMongoOperator(), new BasicDBList()); + Document nextDocument = new Document(currentNode.getMongoOperator(), new ArrayList<>()); if (!context.hasPreviousOperation()) { - return nextDbObject; + return nextDocument; } if (context.parentIsSameOperation()) { // same operator applied in a row e.g. 1 + 2 + 3 carry on with the operation and render as $add: [1, 2 ,3] - nextDbObject = context.getPreviousOperationObject(); + nextDocument = context.getPreviousOperationObject(); } else if (!currentNode.isUnaryOperator()) { // different operator -> add context object for next level to list if arguments of previous expression - context.addToPreviousOperation(nextDbObject); + context.addToPreviousOperation(nextDocument); } - return nextDbObject; + return nextDocument; } - private Object convertUnaryMinusOp(ExpressionTransformationContextSupport context, Object leftResult) { + private Object convertUnaryMinusOp(ExpressionTransformationContextSupport context, + @Nullable Object leftResult) { Object result = leftResult instanceof Number ? leftResult - : new BasicDBObject("$multiply", dbList(-1, leftResult)); + : new Document("$multiply", Arrays.asList(-1, leftResult)); if (leftResult != null && context.hasPreviousOperation()) { context.addToPreviousOperation(result); @@ -283,41 +282,29 @@ private Object convertUnaryMinusOp(ExpressionTransformationContextSupport { - public IndexerNodeConversion(AggregationExpressionTransformer transformer) { + IndexerNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { return context.addToPreviousOrReturn(context.getCurrentNode().getValue()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode) - */ @Override protected boolean supports(ExpressionNode node) { return node.isOfType(Indexer.class); @@ -326,19 +313,16 @@ protected boolean supports(ExpressionNode node) { /** * A {@link ExpressionNodeConversion} that converts in-line list expressions. - * + * * @author Thomas Darimont */ private static class InlineListNodeConversion extends ExpressionNodeConversion { - public InlineListNodeConversion(AggregationExpressionTransformer transformer) { + InlineListNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ + @Nullable @Override protected Object convert(AggregationExpressionTransformationContext context) { @@ -352,10 +336,6 @@ protected Object convert(AggregationExpressionTransformationContext { - public PropertyOrFieldReferenceNodeConversion(AggregationExpressionTransformer transformer) { + PropertyOrFieldReferenceNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#convert(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionTransformationContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { @@ -385,10 +361,6 @@ protected Object convert(AggregationExpressionTransformationContext { - public LiteralNodeConversion(AggregationExpressionTransformer transformer) { + LiteralNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override @SuppressWarnings("unchecked") protected Object convert(AggregationExpressionTransformationContext context) { @@ -432,10 +400,6 @@ protected Object convert(AggregationExpressionTransformationContext return value; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(org.springframework.expression.spel.SpelNode) - */ @Override protected boolean supports(ExpressionNode node) { return node.isLiteral(); @@ -444,50 +408,66 @@ protected boolean supports(ExpressionNode node) { /** * A {@link ExpressionNodeConversion} that converts method reference expressions. - * + * * @author Thomas Darimont * @author Oliver Gierke */ private static class MethodReferenceNodeConversion extends ExpressionNodeConversion { - public MethodReferenceNodeConversion(AggregationExpressionTransformer transformer) { + MethodReferenceNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { MethodReferenceNode node = context.getCurrentNode(); - List args = new ArrayList(); + AggregationMethodReference methodReference = node.getMethodReference(); - for (ExpressionNode childNode : node) { - args.add(transform(childNode, context)); + Assert.state(methodReference != null, "Cannot resolve current node to AggregationMethodReference"); + + Object args = null; + + if (ObjectUtils.nullSafeEquals(methodReference.getArgumentType(), ArgumentType.SINGLE)) { + args = transform(node.getChild(0), context); + } else if (ObjectUtils.nullSafeEquals(methodReference.getArgumentType(), ArgumentType.MAP)) { + + Document dbo = new Document(); + + int i = 0; + for (ExpressionNode child : node) { + dbo.put(methodReference.getArgumentMap()[i++], transform(child, context)); + } + args = dbo; + } else if (ObjectUtils.nullSafeEquals(methodReference.getArgumentType(), ArgumentType.EMPTY_DOCUMENT)) { + args = new Document(); + } else { + + List argList = new ArrayList<>(); + + for (ExpressionNode childNode : node) { + argList.add(transform(childNode, context)); + } + + args = argList; } - return context.addToPreviousOrReturn(new BasicDBObject(node.getMethodName(), dbList(args.toArray()))); + return context.addToPreviousOrReturn(new Document(methodReference.getMongoOperator(), args)); } } /** * A {@link ExpressionNodeConversion} that converts method compound expressions. - * + * * @author Thomas Darimont * @author Oliver Gierke */ private static class CompoundExpressionNodeConversion extends ExpressionNodeConversion { - public CompoundExpressionNodeConversion(AggregationExpressionTransformer transformer) { + CompoundExpressionNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { @@ -501,13 +481,72 @@ protected Object convert(AggregationExpressionTransformationContext { + + /** + * Creates a new {@link ExpressionNodeConversion}. + * + * @param transformer must not be {@literal null}. + */ + NotOperatorNodeConversion(AggregationExpressionTransformer transformer) { + super(transformer); + } + + @Override + protected Object convert(AggregationExpressionTransformationContext context) { + + NotOperatorNode node = context.getCurrentNode(); + List args = new ArrayList<>(); + + for (ExpressionNode childNode : node) { + args.add(transform(childNode, context)); + } + + return context.addToPreviousOrReturn(new Document(node.getMongoOperator(), args)); + } + + @Override + protected boolean supports(ExpressionNode node) { + return node.isOfType(OperatorNot.class); + } + } + + /** + * @author Christoph Strobl + * @since 1.10 + */ + static class ValueRetrievingNodeConversion extends ExpressionNodeConversion { + + /** + * Creates a new {@link ExpressionNodeConversion}. + * + * @param transformer must not be {@literal null}. + */ + ValueRetrievingNodeConversion(AggregationExpressionTransformer transformer) { + super(transformer); + } + + @Override + protected Object convert(AggregationExpressionTransformationContext context) { + + Object value = context.getCurrentNode().getValue(); + return ObjectUtils.isArray(value) ? Arrays.asList(ObjectUtils.toObjectArray(value)) : value; + } + + @Override + protected boolean supports(ExpressionNode node) { + return node.isOfType(InlineMap.class) || node.isOfType(InlineList.class) + || node.isOfType(ConstructorReference.class); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java new file mode 100644 index 0000000000..9788497601 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java @@ -0,0 +1,2427 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.util.RegexFlags; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal String} aggregation operations. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + * @since 1.10 + */ +public class StringOperators { + + /** + * Take the array referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StringOperatorFactory}. + */ + public static StringOperatorFactory valueOf(String fieldReference) { + return new StringOperatorFactory(fieldReference); + } + + /** + * Take the array referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StringOperatorFactory}. + */ + public static StringOperatorFactory valueOf(AggregationExpression fieldReference) { + return new StringOperatorFactory(fieldReference); + } + + /** + * @author Christoph Strobl + */ + public static class StringOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link StringOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public StringOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link StringOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public StringOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and concats the value + * of the referenced field to it. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concatValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createConcat().concatValueOf(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and concats the result + * of the given {@link AggregationExpression} to it. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concatValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createConcat().concatValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and concats given + * {@literal value} to it. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concat(String value) { + + Assert.notNull(value, "Value must not be null"); + return createConcat().concat(value); + } + + private Concat createConcat() { + return usesFieldRef() ? Concat.valueOf(fieldReference) : Concat.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and returns a substring + * starting at a specified index position. + * + * @param start start index number (including) + * @return new instance of {@link Substr}. + */ + public Substr substring(int start) { + return substring(start, -1); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and returns a substring + * starting at a specified index position including the specified number of characters. + * + * @param start start index number (including) + * @param nrOfChars number of characters. + * @return new instance of {@link Substr}. + */ + public Substr substring(int start, int nrOfChars) { + return createSubstr().substring(start, nrOfChars); + } + + private Substr createSubstr() { + return usesFieldRef() ? Substr.valueOf(fieldReference) : Substr.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and lowers it. + * + * @return new instance of {@link ToLower}. + */ + public ToLower toLower() { + return usesFieldRef() ? ToLower.lowerValueOf(fieldReference) : ToLower.lowerValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and uppers it. + * + * @return new instance of {@link ToUpper}. + */ + public ToUpper toUpper() { + return usesFieldRef() ? ToUpper.upperValueOf(fieldReference) : ToUpper.upperValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and performs + * case-insensitive comparison to the given {@literal value}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link StrCaseCmp}. + */ + public StrCaseCmp strCaseCmp(String value) { + + Assert.notNull(value, "Value must not be null"); + return createStrCaseCmp().strcasecmp(value); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and performs + * case-insensitive comparison to the referenced {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StrCaseCmp}. + */ + public StrCaseCmp strCaseCmpValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createStrCaseCmp().strcasecmpValueOf(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and performs + * case-insensitive comparison to the result of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StrCaseCmp}. + */ + public StrCaseCmp strCaseCmpValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createStrCaseCmp().strcasecmpValueOf(expression); + } + + private StrCaseCmp createStrCaseCmp() { + return usesFieldRef() ? StrCaseCmp.valueOf(fieldReference) : StrCaseCmp.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a given {@literal substring} and returns the UTF-8 byte index (zero-based) of the first + * occurrence. + * + * @param substring must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(String substring) { + + Assert.notNull(substring, "Substring must not be null"); + return createIndexOfBytesSubstringBuilder().indexOf(substring); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a substring contained in the given {@literal field reference} and returns the UTF-8 byte + * index (zero-based) of the first occurrence. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(Field fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createIndexOfBytesSubstringBuilder().indexOf(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a substring resulting from the given {@link AggregationExpression} and returns the UTF-8 + * byte index (zero-based) of the first occurrence. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createIndexOfBytesSubstringBuilder().indexOf(expression); + } + + private IndexOfBytes.SubstringBuilder createIndexOfBytesSubstringBuilder() { + return usesFieldRef() ? IndexOfBytes.valueOf(fieldReference) : IndexOfBytes.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a given {@literal substring} and returns the UTF-8 code point index (zero-based) of the + * first occurrence. + * + * @param substring must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP indexOfCP(String substring) { + + Assert.notNull(substring, "Substring must not be null"); + return createIndexOfCPSubstringBuilder().indexOf(substring); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a substring contained in the given {@literal field reference} and returns the UTF-8 code + * point index (zero-based) of the first occurrence. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP indexOfCP(Field fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createIndexOfCPSubstringBuilder().indexOf(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and searches a string + * for an occurrence of a substring resulting from the given {@link AggregationExpression} and returns the UTF-8 + * code point index (zero-based) of the first occurrence. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP indexOfCP(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createIndexOfCPSubstringBuilder().indexOf(expression); + } + + private IndexOfCP.SubstringBuilder createIndexOfCPSubstringBuilder() { + return usesFieldRef() ? IndexOfCP.valueOf(fieldReference) : IndexOfCP.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated string representation into an array of + * substrings based on the given delimiter. + * + * @param delimiter must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(String delimiter) { + return createSplit().split(delimiter); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated string representation into an array of + * substrings based on the delimiter resulting from the referenced field.. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(Field fieldReference) { + return createSplit().split(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that divides the associated string representation into an array of + * substrings based on a delimiter resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(AggregationExpression expression) { + return createSplit().split(expression); + } + + private Split createSplit() { + return usesFieldRef() ? Split.valueOf(fieldReference) : Split.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the number of UTF-8 bytes in the associated string + * representation. + * + * @return new instance of {@link StrLenBytes}. + */ + public StrLenBytes length() { + return usesFieldRef() ? StrLenBytes.stringLengthOf(fieldReference) : StrLenBytes.stringLengthOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that returns the number of UTF-8 code points in the associated string + * representation. + * + * @return new instance of {@link StrLenCP}. + */ + public StrLenCP lengthCP() { + return usesFieldRef() ? StrLenCP.stringLengthOfCP(fieldReference) : StrLenCP.stringLengthOfCP(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and returns a substring + * starting at a specified code point index position. + * + * @param codePointStart + * @return new instance of {@link SubstrCP}. + */ + public SubstrCP substringCP(int codePointStart) { + return substringCP(codePointStart, -1); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and returns a substring + * starting at a specified code point index position including the specified number of code points. + * + * @param codePointStart start point (including). + * @param nrOfCodePoints + * @return new instance of {@link SubstrCP}. + */ + public SubstrCP substringCP(int codePointStart, int nrOfCodePoints) { + return createSubstrCP().substringCP(codePointStart, nrOfCodePoints); + } + + private SubstrCP createSubstrCP() { + return usesFieldRef() ? SubstrCP.valueOf(fieldReference) : SubstrCP.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims whitespaces + * from the beginning and end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim() { + return createTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the beginning and end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim(String chars) { + return trim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the beginning and end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim(AggregationExpression expression) { + return trim().charsOf(expression); + } + + private Trim createTrim() { + return usesFieldRef() ? Trim.valueOf(fieldReference) : Trim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims whitespaces + * from the beginning.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim() { + return createLTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the beginning.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim(String chars) { + return ltrim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the beginning.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim(AggregationExpression expression) { + return ltrim().charsOf(expression); + } + + private LTrim createLTrim() { + return usesFieldRef() ? LTrim.valueOf(fieldReference) : LTrim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims whitespaces + * from the end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim() { + return createRTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim(String chars) { + return rtrim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the end.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim(AggregationExpression expression) { + return rtrim().charsOf(expression); + } + + private RTrim createRTrim() { + return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find the document with the first match.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(String regex) { + return createRegexFind().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first + * match.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(AggregationExpression expression) { + return createRegexFind().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with + * the options specified in the argument to find the document with the first match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(Pattern pattern) { + return createRegexFind().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find the document with the first match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(String regex, String options) { + return createRegexFind().regex(regex).options(options); + } + + private RegexFind createRegexFind() { + return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find all the documents with the match.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex) { + return createRegexFindAll().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the + * match..
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(AggregationExpression expression) { + return createRegexFindAll().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find all the documents with the match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(Pattern pattern) { + return createRegexFindAll().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex, String options) { + return createRegexFindAll().regex(regex).options(options); + } + + private RegexFindAll createRegexFindAll() { + return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find if a match is found or not.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(String regex) { + return createRegexMatch().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(AggregationExpression expression) { + return createRegexMatch().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find if a match is found or not. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(Pattern pattern) { + return createRegexMatch().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find if a match is found or not. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(String regex, String options) { + return createRegexMatch().regex(regex).options(options); + } + + private RegexMatch createRegexMatch() { + return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first + * occurrence of the search string with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceOne replaceOne(String search, String replacement) { + return createReplaceOne().find(search).replacement(replacement); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first + * occurrence of the search string computed by the given {@link AggregationExpression} with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceOne replaceOne(AggregationExpression search, String replacement) { + return createReplaceOne().findValueOf(search).replacement(replacement); + } + + private ReplaceOne createReplaceOne() { + return usesFieldRef() ? ReplaceOne.valueOf(fieldReference) : ReplaceOne.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces all + * occurrences of the search string with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceAll replaceAll(String search, String replacement) { + return createReplaceAll().find(search).replacement(replacement); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces all + * occurrences of the search string computed by the given {@link AggregationExpression} with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceAll replaceAll(AggregationExpression search, String replacement) { + return createReplaceAll().findValueOf(search).replacement(replacement); + } + + private ReplaceAll createReplaceAll() { + return usesFieldRef() ? ReplaceAll.valueOf(fieldReference) : ReplaceAll.valueOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $concat}. + * + * @author Christoph Strobl + */ + public static class Concat extends AbstractAggregationExpression { + + private Concat(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$concat"; + } + + /** + * Creates new {@link Concat}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public static Concat valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Concat(asFields(fieldReference)); + } + + /** + * Creates new {@link Concat}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public static Concat valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Concat(Collections.singletonList(expression)); + } + + /** + * Creates new {@link Concat}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public static Concat stringValue(String value) { + + Assert.notNull(value, "Value must not be null"); + return new Concat(Collections.singletonList(value)); + } + + /** + * Concat the value of the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concatValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Concat(append(Fields.field(fieldReference))); + } + + /** + * Concat the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concatValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Concat(append(expression)); + } + + /** + * Concat the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Concat}. + */ + public Concat concat(String value) { + return new Concat(append(value)); + } + } + + /** + * {@link AggregationExpression} for {@code $substr}. + * + * @author Christoph Strobl + */ + public static class Substr extends AbstractAggregationExpression { + + private Substr(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$substr"; + } + + /** + * Creates new {@link Substr}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Substr}. + */ + public static Substr valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Substr(asFields(fieldReference)); + } + + /** + * Creates new {@link Substr}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Substr}. + */ + public static Substr valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Substr(Collections.singletonList(expression)); + } + + /** + * @param start start index (including) + * @return new instance of {@link Substr}. + */ + public Substr substring(int start) { + return substring(start, -1); + } + + /** + * @param start start index (including) + * @param nrOfChars + * @return new instance of {@link Substr}. + */ + public Substr substring(int start, int nrOfChars) { + return new Substr(append(Arrays.asList(start, nrOfChars))); + } + } + + /** + * {@link AggregationExpression} for {@code $toLower}. + * + * @author Christoph Strobl + */ + public static class ToLower extends AbstractAggregationExpression { + + private ToLower(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$toLower"; + } + + /** + * Creates new {@link ToLower}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static ToLower lowerValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ToLower(Fields.field(fieldReference)); + } + + /** + * Creates new {@link ToLower}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static ToLower lowerValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ToLower(Collections.singletonList(expression)); + } + + /** + * Creates new {@link ToLower}. + * + * @param value must not be {@literal null}. + * @return + */ + public static ToLower lower(String value) { + + Assert.notNull(value, "Value must not be null"); + return new ToLower(value); + } + } + + /** + * {@link AggregationExpression} for {@code $toUpper}. + * + * @author Christoph Strobl + */ + public static class ToUpper extends AbstractAggregationExpression { + + private ToUpper(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$toUpper"; + } + + /** + * Creates new {@link ToUpper}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static ToUpper upperValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ToUpper(Fields.field(fieldReference)); + } + + /** + * Creates new {@link ToUpper}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static ToUpper upperValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ToUpper(Collections.singletonList(expression)); + } + + /** + * Creates new {@link ToUpper}. + * + * @param value must not be {@literal null}. + * @return + */ + public static ToUpper upper(String value) { + + Assert.notNull(value, "Value must not be null"); + return new ToUpper(value); + } + } + + /** + * {@link AggregationExpression} for {@code $strcasecmp}. + * + * @author Christoph Strobl + */ + public static class StrCaseCmp extends AbstractAggregationExpression { + + private StrCaseCmp(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$strcasecmp"; + } + + /** + * Creates new {@link StrCaseCmp}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static StrCaseCmp valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StrCaseCmp(asFields(fieldReference)); + } + + /** + * Creates new {@link StrCaseCmp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StrCaseCmp}. + */ + public static StrCaseCmp valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StrCaseCmp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link StrCaseCmp}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link StrCaseCmp}. + */ + public static StrCaseCmp stringValue(String value) { + + Assert.notNull(value, "Value must not be null"); + return new StrCaseCmp(Collections.singletonList(value)); + } + + public StrCaseCmp strcasecmp(String value) { + return new StrCaseCmp(append(value)); + } + + public StrCaseCmp strcasecmpValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new StrCaseCmp(append(Fields.field(fieldReference))); + } + + public StrCaseCmp strcasecmpValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StrCaseCmp(append(expression)); + } + } + + /** + * {@link AggregationExpression} for {@code $indexOfBytes}. + * + * @author Christoph Strobl + */ + public static class IndexOfBytes extends AbstractAggregationExpression { + + private IndexOfBytes(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$indexOfBytes"; + } + + /** + * Start creating a new {@link IndexOfBytes}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SubstringBuilder}. + */ + public static SubstringBuilder valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new SubstringBuilder(Fields.field(fieldReference)); + } + + /** + * Start creating a new {@link IndexOfBytes}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SubstringBuilder}. + */ + public static SubstringBuilder valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SubstringBuilder(expression); + } + + /** + * Optionally define the substring search start and end position. + * + * @param range must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes within(Range range) { + return new IndexOfBytes(append(AggregationUtils.toRangeValues(range))); + } + + public static class SubstringBuilder { + + private final Object stringExpression; + + private SubstringBuilder(Object stringExpression) { + this.stringExpression = stringExpression; + } + + /** + * Creates a new {@link IndexOfBytes} given {@literal substring}. + * + * @param substring must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(String substring) { + return new IndexOfBytes(Arrays.asList(stringExpression, substring)); + } + + /** + * Creates a new {@link IndexOfBytes} given {@link AggregationExpression} that resolves to the substring. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(AggregationExpression expression) { + return new IndexOfBytes(Arrays.asList(stringExpression, expression)); + } + + /** + * Creates a new {@link IndexOfBytes} given {@link Field} that resolves to the substring. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IndexOfBytes}. + */ + public IndexOfBytes indexOf(Field fieldReference) { + return new IndexOfBytes(Arrays.asList(stringExpression, fieldReference)); + } + } + } + + /** + * {@link AggregationExpression} for {@code $indexOfCP}. + * + * @author Christoph Strobl + */ + public static class IndexOfCP extends AbstractAggregationExpression { + + private IndexOfCP(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$indexOfCP"; + } + + /** + * Start creating a new {@link IndexOfCP}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public static SubstringBuilder valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new SubstringBuilder(Fields.field(fieldReference)); + } + + /** + * Start creating a new {@link IndexOfCP}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public static SubstringBuilder valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SubstringBuilder(expression); + } + + /** + * Optionally define the substring search start and end position. + * + * @param range must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP within(Range range) { + return new IndexOfCP(append(AggregationUtils.toRangeValues(range))); + } + + public static class SubstringBuilder { + + private final Object stringExpression; + + private SubstringBuilder(Object stringExpression) { + this.stringExpression = stringExpression; + } + + /** + * Creates a new {@link IndexOfCP} given {@literal substring}. + * + * @param substring must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP indexOf(String substring) { + return new IndexOfCP(Arrays.asList(stringExpression, substring)); + } + + /** + * Creates a new {@link IndexOfCP} given {@link AggregationExpression} that resolves to the substring. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IndexOfCP}. + */ + public IndexOfCP indexOf(AggregationExpression expression) { + return new IndexOfCP(Arrays.asList(stringExpression, expression)); + } + + /** + * Creates a new {@link IndexOfCP} given {@link Field} that resolves to the substring. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public IndexOfCP indexOf(Field fieldReference) { + return new IndexOfCP(Arrays.asList(stringExpression, fieldReference)); + } + } + } + + /** + * {@link AggregationExpression} for {@code $split}. + * + * @author Christoph Strobl + */ + public static class Split extends AbstractAggregationExpression { + + private Split(List values) { + super(values); + } + + @Override + protected String getMongoMethod() { + return "$split"; + } + + /** + * Start creating a new {@link Split}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public static Split valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Split(asFields(fieldReference)); + } + + /** + * Start creating a new {@link Split}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public static Split valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Split(Collections.singletonList(expression)); + } + + /** + * Use given {@link String} as delimiter. + * + * @param delimiter must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(String delimiter) { + + Assert.notNull(delimiter, "Delimiter must not be null"); + return new Split(append(delimiter)); + } + + /** + * Use value of referenced field as delimiter. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(Field fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Split(append(fieldReference)); + } + + /** + * Use value resulting from {@link AggregationExpression} as delimiter. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Split}. + */ + public Split split(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Split(append(expression)); + } + } + + /** + * {@link AggregationExpression} for {@code $strLenBytes}. + * + * @author Christoph Strobl + */ + public static class StrLenBytes extends AbstractAggregationExpression { + + private StrLenBytes(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$strLenBytes"; + } + + /** + * Creates new {@link StrLenBytes}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StrLenBytes}. + */ + public static StrLenBytes stringLengthOf(String fieldReference) { + return new StrLenBytes(Fields.field(fieldReference)); + } + + /** + * Creates new {@link StrLenBytes}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StrLenBytes}. + */ + public static StrLenBytes stringLengthOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StrLenBytes(expression); + } + } + + /** + * {@link AggregationExpression} for {@code $strLenCP}. + * + * @author Christoph Strobl + */ + public static class StrLenCP extends AbstractAggregationExpression { + + private StrLenCP(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$strLenCP"; + } + + /** + * Creates new {@link StrLenCP}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link StrLenCP}. + */ + public static StrLenCP stringLengthOfCP(String fieldReference) { + return new StrLenCP(Fields.field(fieldReference)); + } + + /** + * Creates new {@link StrLenCP}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link StrLenCP}. + */ + public static StrLenCP stringLengthOfCP(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new StrLenCP(expression); + } + } + + /** + * {@link AggregationExpression} for {@code $substrCP}. + * + * @author Christoph Strobl + */ + public static class SubstrCP extends AbstractAggregationExpression { + + private SubstrCP(List value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$substrCP"; + } + + /** + * Creates new {@link SubstrCP}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SubstrCP}. + */ + public static SubstrCP valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new SubstrCP(asFields(fieldReference)); + } + + /** + * Creates new {@link SubstrCP}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SubstrCP}. + */ + public static SubstrCP valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new SubstrCP(Collections.singletonList(expression)); + } + + public SubstrCP substringCP(int start) { + return substringCP(start, -1); + } + + public SubstrCP substringCP(int start, int nrOfChars) { + return new SubstrCP(append(Arrays.asList(start, nrOfChars))); + } + } + + /** + * {@link AggregationExpression} for {@code $trim} which removes whitespace or the specified characters from the + * beginning and end of a string.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class Trim extends AbstractAggregationExpression { + + private Trim(Object value) { + super(value); + } + + /** + * Creates new {@link Trim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static Trim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Trim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Trim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public static Trim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Trim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the beginning. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new Trim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the + * beginning. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim charsOf(String fieldReference) { + return new Trim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the + * beginning. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim charsOf(AggregationExpression expression) { + return new Trim(append("chars", expression)); + } + + /** + * Remove whitespace or the specified characters from the beginning of a string.
                    + * + * @return new instance of {@link LTrim}. + */ + public LTrim left() { + return new LTrim(argumentMap()); + } + + /** + * Remove whitespace or the specified characters from the end of a string.
                    + * + * @return new instance of {@link RTrim}. + */ + public RTrim right() { + return new RTrim(argumentMap()); + } + + @Override + protected String getMongoMethod() { + return "$trim"; + } + } + + /** + * {@link AggregationExpression} for {@code $ltrim} which removes whitespace or the specified characters from the + * beginning of a string.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class LTrim extends AbstractAggregationExpression { + + private LTrim(Object value) { + super(value); + } + + /** + * Creates new {@link LTrim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static LTrim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new LTrim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link LTrim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static LTrim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new LTrim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the beginning. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new LTrim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the + * beginning. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim charsOf(String fieldReference) { + return new LTrim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the + * beginning. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim charsOf(AggregationExpression expression) { + return new LTrim(append("chars", expression)); + } + + @Override + protected String getMongoMethod() { + return "$ltrim"; + } + } + + /** + * {@link AggregationExpression} for {@code $rtrim} which removes whitespace or the specified characters from the end + * of a string.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class RTrim extends AbstractAggregationExpression { + + private RTrim(Object value) { + super(value); + } + + /** + * Creates new {@link RTrim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public static RTrim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new RTrim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RTrim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public static RTrim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new RTrim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the end. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new RTrim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the end. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim charsOf(String fieldReference) { + return new RTrim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the end. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim charsOf(AggregationExpression expression) { + return new RTrim(append("chars", expression)); + } + + @Override + protected String getMongoMethod() { + return "$rtrim"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * returns information on the first matched substring.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexFind extends AbstractAggregationExpression { + + protected RegexFind(Object value) { + super(value); + } + + /** + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexFind(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexFind(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(append("options", expression)); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexFind(append("regex", regex)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFind(regex); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFind(append("regex", Fields.field(fieldReference))); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexFind"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * returns information on all the matched substrings.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexFindAll extends AbstractAggregationExpression { + + protected RegexFindAll(Object value) { + super(value); + } + + /** + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as + * {@literal input} value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexFindAll(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFindAll(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(append("options", expression)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFindAll(regex); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexFindAll(append("regex", regex)); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFindAll(append("regex", Fields.field(fieldReference))); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * returns a boolean that indicates if a match is found or not.
                    + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexMatch extends AbstractAggregationExpression { + + protected RegexMatch(Object value) { + super(value); + } + + /** + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexMatch(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(append("options", expression)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexMatch(regex); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexMatch(append("regex", regex)); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(append("regex", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } + } + + /** + * {@link AggregationExpression} for {@code $replaceOne} which replaces the first instance of a search string in an + * input string with a replacement string.
                    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class ReplaceOne extends AbstractAggregationExpression { + + protected ReplaceOne(Object value) { + super(value); + } + + /** + * Creates new {@link ReplaceOne} using the given as {@literal input}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne value(String value) { + + Assert.notNull(value, "Value must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link ReplaceOne} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link ReplaceOne} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", expression)); + } + + /** + * The string to use to replace the first matched instance of {@code find} in input. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacement(String replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReplaceOne(append("replacement", replacement)); + } + + /** + * Specifies the reference to the {@link Field field} holding the string to use to replace the first matched + * instance of {@code find} in input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacementOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceOne(append("replacement", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance + * of {@code find} in {@code input}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacementOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(append("replacement", expression)); + } + + /** + * The string to search for within the given input field. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne find(String value) { + + Assert.notNull(value, "Search string must not be null"); + + return new ReplaceOne(append("find", value)); + } + + /** + * Specify the reference to the {@link Field field} holding the string to search for within the given input field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne findValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new ReplaceOne(append("find", fieldReference)); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the the string to search for within the given input + * field. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne findValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(append("find", expression)); + } + + @Override + protected String getMongoMethod() { + return "$replaceOne"; + } + } + + /** + * {@link AggregationExpression} for {@code $replaceAll} which replaces all instances of a search string in an input + * string with a replacement string.
                    + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class ReplaceAll extends AbstractAggregationExpression { + + protected ReplaceAll(Object value) { + super(value); + } + + /** + * Creates new {@link ReplaceAll} using the given as {@literal input}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceAll value(String value) { + + Assert.notNull(value, "Value must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link ReplaceAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public static ReplaceAll valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link ReplaceAll} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public static ReplaceAll valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", expression)); + } + + /** + * The string to use to replace the first matched instance of {@code find} in input. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacement(String replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReplaceAll(append("replacement", replacement)); + } + + /** + * Specifies the reference to the {@link Field field} holding the string to use to replace the first matched + * instance of {@code find} in input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacementValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceAll(append("replacement", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance + * of {@code find} in input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacementValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(append("replacement", expression)); + } + + /** + * The string to search for within the given input field. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll find(String value) { + + Assert.notNull(value, "Search string must not be null"); + + return new ReplaceAll(append("find", value)); + } + + /** + * Specify the reference to the {@link Field field} holding the string to search for within the given input field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll findValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new ReplaceAll(append("find", fieldReference)); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the string to search for within the given input field. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll findValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(append("find", expression)); + } + + @Override + protected String getMongoMethod() { + return "$replaceAll"; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java new file mode 100644 index 0000000000..1fcf87d2a0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.lang.Nullable; + +/** + * Describes the system variables available in MongoDB aggregation framework pipeline expressions. + * + * @author Thomas Darimont + * @author Christoph Strobl + * @see Aggregation Variables. + */ +public enum SystemVariable implements AggregationVariable { + + /** + * Variable for the current datetime. + * + * @since 4.0 + */ + NOW, + + /** + * Variable for the current timestamp. + * + * @since 4.0 + */ + CLUSTER_TIME, + + /** + * Variable that references the root document. + */ + ROOT, + + /** + * Variable that references the start of the field path being processed. + */ + CURRENT, + + /** + * Variable that evaluates to a missing value. + */ + REMOVE, + + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + DESCEND, + + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + PRUNE, + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + KEEP, + + /** + * A variable that stores the metadata results of an Atlas Search query. + * + * @since 4.0 + */ + SEARCH_META; + + /** + * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} + * otherwise. + * + * @param fieldRef may be {@literal null}. + * @return {@literal true} if the given field refers to a {@link SystemVariable}. + */ + public static boolean isReferingToSystemVariable(@Nullable String fieldRef) { + + String candidate = variableNameFrom(fieldRef); + if (candidate == null) { + return false; + } + + candidate = candidate.startsWith(PREFIX) ? candidate.substring(2) : candidate; + for (SystemVariable value : values()) { + if (value.name().equals(candidate)) { + return true; + } + } + + return false; + } + + @Override + public String toString() { + return PREFIX.concat(name()); + } + + @Override + public String getTarget() { + return toString(); + } + + @Nullable + static String variableNameFrom(@Nullable String fieldRef) { + + if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { + return null; + } + + int indexOfFirstDot = fieldRef.indexOf('.'); + return indexOfFirstDot == -1 ? fieldRef : fieldRef.substring(2, indexOfFirstDot); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java index c800c419c6..f30ebf394b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,23 +17,32 @@ import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import org.springframework.data.mapping.PropertyPath; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; - /** * {@link AggregationOperationContext} aware of a particular type and a {@link MappingContext} to potentially translate * property references into document field names. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 */ public class TypeBasedAggregationOperationContext implements AggregationOperationContext { @@ -41,63 +50,152 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio private final Class type; private final MappingContext, MongoPersistentProperty> mappingContext; private final QueryMapper mapper; + private final Lazy> entity; + private final FieldLookupPolicy lookupPolicy; /** * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and * {@link QueryMapper}. - * + * * @param type must not be {@literal null}. * @param mappingContext must not be {@literal null}. * @param mapper must not be {@literal null}. */ public TypeBasedAggregationOperationContext(Class type, MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper) { + this(type, mappingContext, mapper, FieldLookupPolicy.strict()); + } + + /** + * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and + * {@link QueryMapper}. + * + * @param type must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param mapper must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. + * @since 4.3.1 + */ + public TypeBasedAggregationOperationContext(Class type, + MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper, + FieldLookupPolicy lookupPolicy) { - Assert.notNull(type, "Type must not be null!"); - Assert.notNull(mappingContext, "MappingContext must not be null!"); - Assert.notNull(mapper, "QueryMapper must not be null!"); + Assert.notNull(type, "Type must not be null"); + Assert.notNull(mappingContext, "MappingContext must not be null"); + Assert.notNull(mapper, "QueryMapper must not be null"); + Assert.notNull(lookupPolicy, "FieldLookupPolicy must not be null"); this.type = type; this.mappingContext = mappingContext; this.mapper = mapper; + this.entity = Lazy.of(() -> mappingContext.getPersistentEntity(type)); + this.lookupPolicy = lookupPolicy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject) - */ @Override - public DBObject getMappedObject(DBObject dbObject) { - return mapper.getMappedObject(dbObject, mappingContext.getPersistentEntity(type)); + public Document getMappedObject(Document document) { + return getMappedObject(document, type); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field) - */ @Override - public FieldReference getReference(Field field) { + public Document getMappedObject(Document document, @Nullable Class type) { + return mapper.getMappedObject(document, type != null ? mappingContext.getPersistentEntity(type) : null); + } - PropertyPath.from(field.getTarget(), type); + @Override + public FieldReference getReference(Field field) { return getReferenceFor(field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return getReferenceFor(field(name)); } - private FieldReference getReferenceFor(Field field) { + @Override + public Fields getFields(Class type) { + + Assert.notNull(type, "Type must not be null"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(type); + + if (entity == null) { + return AggregationOperationContext.super.getFields(type); + } + + List fields = new ArrayList<>(); + + for (MongoPersistentProperty property : entity) { + fields.add(Fields.field(property.getName(), property.getFieldName())); + } + + return Fields.from(fields.toArray(new Field[0])); + } + + @Override + @Deprecated(since = "4.3.1", forRemoval = true) + public AggregationOperationContext continueOnMissingFieldReference() { + return continueOnMissingFieldReference(type); + } + + /** + * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * are not present in one of the previous stages or the input source, throughout the pipeline. + * + * @param type The domain type to map fields to. + * @return a more relaxed {@link AggregationOperationContext}. + * @since 3.1 + * @see RelaxedTypeBasedAggregationOperationContext + */ + public AggregationOperationContext continueOnMissingFieldReference(Class type) { + return new TypeBasedAggregationOperationContext(type, mappingContext, mapper, FieldLookupPolicy.relaxed()); + } + + @Override + public AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } - PersistentPropertyPath propertyPath = mappingContext.getPersistentPropertyPath( - field.getTarget(), type); - Field mappedField = field(propertyPath.getLeafProperty().getName(), + @Override + public AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + + protected FieldReference getReferenceFor(Field field) { + + try { + return doGetFieldReference(field); + } catch (MappingException e) { + + if (lookupPolicy.isStrict()) { + throw e; + } + + return new DirectFieldReference(new ExposedField(field, true)); + } + } + + private DirectFieldReference doGetFieldReference(Field field) { + + if (entity.getNullable() == null || AggregationVariable.isVariable(field)) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + PersistentPropertyPath propertyPath = mappingContext + .getPersistentPropertyPath(field.getTarget(), type); + Field mappedField = field(field.getName(), propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)); - return new FieldReference(new ExposedField(mappedField, true)); + return new DirectFieldReference(new ExposedField(mappedField, true)); + } + + public Class getType() { + return type; + } + + @Override + public CodecRegistry getCodecRegistry() { + return this.mapper.getConverter().getCodecRegistry(); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java index 0841de225c..432a0c6c6b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,7 @@ /** * A {@code TypedAggregation} is a special {@link Aggregation} that holds information of the input aggregation type. - * + * * @author Thomas Darimont * @author Oliver Gierke */ @@ -31,7 +31,7 @@ public class TypedAggregation extends Aggregation { /** * Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s. - * + * * @param inputType must not be {@literal null}. * @param operations must not be {@literal null} or empty. */ @@ -41,7 +41,7 @@ public TypedAggregation(Class inputType, AggregationOperation... operations) /** * Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s. - * + * * @param inputType must not be {@literal null}. * @param operations must not be {@literal null} or empty. */ @@ -52,7 +52,7 @@ public TypedAggregation(Class inputType, List operation /** * Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s and the given * {@link AggregationOptions}. - * + * * @param inputType must not be {@literal null}. * @param operations must not be {@literal null} or empty. * @param options must not be {@literal null}. @@ -61,26 +61,22 @@ public TypedAggregation(Class inputType, List operation super(operations, options); - Assert.notNull(inputType, "Input type must not be null!"); + Assert.notNull(inputType, "Input type must not be null"); this.inputType = inputType; } /** * Returns the input type for the {@link Aggregation}. - * + * * @return the inputType will never be {@literal null}. */ public Class getInputType() { return inputType; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Aggregation#withOptions(org.springframework.data.mongodb.core.aggregation.AggregationOptions) - */ public TypedAggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); - return new TypedAggregation(inputType, operations, options); + Assert.notNull(options, "AggregationOptions must not be null"); + return new TypedAggregation(inputType, pipeline.getOperations(), options); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java new file mode 100644 index 0000000000..057ada12d5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java @@ -0,0 +1,156 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * The $unionWith aggregation + * stage (available since MongoDB 4.4) performs a union of two collections by combining pipeline results, potentially + * containing duplicates, into a single result set that is handed over to the next stage.
                    + * In order to remove duplicates it is possible to append a {@link GroupOperation} right after + * {@link UnionWithOperation}. + *
                    + * If the {@link UnionWithOperation} uses a + * pipeline + * to process documents, field names within the pipeline will be treated as is. In order to map domain type property + * names to actual field names (considering potential {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations) make sure the enclosing aggregation is a {@link TypedAggregation} and provide the target type for the + * {@code $unionWith} stage via {@link #mapFieldsTo(Class)}. + * + * @author Christoph Strobl + * @see Aggregation Pipeline Stage: + * $unionWith + * @since 3.1 + */ +public class UnionWithOperation implements AggregationOperation { + + private final String collection; + + private final @Nullable AggregationPipeline pipeline; + + private final @Nullable Class domainType; + + public UnionWithOperation(String collection, @Nullable AggregationPipeline pipeline, @Nullable Class domainType) { + + Assert.notNull(collection, "Collection must not be null"); + + this.collection = collection; + this.pipeline = pipeline; + this.domainType = domainType; + } + + /** + * Set the name of the collection from which pipeline results should be included in the result set.
                    + * The collection name is used to set the {@code coll} parameter of {@code $unionWith}. + * + * @param collection the MongoDB collection name. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public static UnionWithOperation unionWith(String collection) { + return new UnionWithOperation(collection, null, null); + } + + /** + * Set the {@link AggregationPipeline} to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param pipeline the {@link AggregationPipeline} that computes the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(AggregationPipeline pipeline) { + return new UnionWithOperation(collection, pipeline, domainType); + } + + /** + * Set the aggregation pipeline stages to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param aggregationStages the aggregation pipeline stages that compute the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(List aggregationStages) { + return new UnionWithOperation(collection, new AggregationPipeline(aggregationStages), domainType); + } + + /** + * Set the aggregation pipeline stages to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param aggregationStages the aggregation pipeline stages that compute the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(AggregationOperation... aggregationStages) { + return new UnionWithOperation(collection, new AggregationPipeline(Arrays.asList(aggregationStages)), domainType); + } + + /** + * Set domain type used for field name mapping of property references used by the {@link AggregationPipeline}. + * Remember to also use a {@link TypedAggregation} in the outer pipeline.
                    + * If not set, field names used within {@link AggregationOperation pipeline operations} are taken as is. + * + * @param domainType the domain type to map field names used in pipeline operations to. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation mapFieldsTo(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + return new UnionWithOperation(collection, pipeline, domainType); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $unionWith = new Document("coll", collection); + if (pipeline == null || pipeline.isEmpty()) { + return new Document(getOperator(), $unionWith); + } + + $unionWith.append("pipeline", pipeline.toDocuments(computeContext(context))); + return new Document(getOperator(), $unionWith); + } + + private AggregationOperationContext computeContext(AggregationOperationContext source) { + + if (source instanceof TypeBasedAggregationOperationContext aggregationOperationContext) { + return aggregationOperationContext.continueOnMissingFieldReference(domainType != null ? domainType : Object.class); + } + + if (source instanceof ExposedFieldsAggregationOperationContext aggregationOperationContext) { + return computeContext(aggregationOperationContext.getRootContext()); + } + + return source; + } + + @Override + public String getOperator() { + return "$unionWith"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java new file mode 100644 index 0000000000..ff765c37f7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Removes fields from documents. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation Framework: + * $unset + */ +public class UnsetOperation implements InheritsFieldsAggregationOperation { + + private final Collection fields; + + /** + * Create new instance of {@link UnsetOperation}. + * + * @param fields must not be {@literal null}. + */ + public UnsetOperation(Collection fields) { + + Assert.notNull(fields, "Fields must not be null"); + Assert.noNullElements(fields, "Fields must not contain null values"); + + this.fields = fields; + } + + /** + * Create new instance of {@link UnsetOperation}. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public static UnsetOperation unset(String... fields) { + return new UnsetOperation(Arrays.asList(fields)); + } + + /** + * Also unset the given fields. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public UnsetOperation and(String... fields) { + + List target = new ArrayList<>(this.fields); + CollectionUtils.mergeArrayIntoCollection(fields, target); + return new UnsetOperation(target); + } + + /** + * Also unset the given fields. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public UnsetOperation and(Field... fields) { + + List target = new ArrayList<>(this.fields); + CollectionUtils.mergeArrayIntoCollection(fields, target); + return new UnsetOperation(target); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.from(); + } + + Collection removedFieldNames() { + + List fieldNames = new ArrayList<>(fields.size()); + for (Object it : fields) { + if (it instanceof Field field) { + fieldNames.add(field.getName()); + } else { + fieldNames.add(it.toString()); + } + } + return fieldNames; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (fields.size() == 1) { + return new Document(getOperator(), computeFieldName(fields.iterator().next(), context)); + } + + return new Document(getOperator(), + fields.stream().map(it -> computeFieldName(it, context)).collect(Collectors.toList())); + } + + @Override + public String getOperator() { + return "$unset"; + } + + private Object computeFieldName(Object field, AggregationOperationContext context) { + + if (field instanceof Field fieldObject) { + return context.getReference(fieldObject).getRaw(); + } + + if (field instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (field instanceof String stringValue) { + return context.getReference(stringValue).getRaw(); + } + + return field; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java index 883cb8a8c4..d59ae01b12 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,44 +15,230 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Encapsulates the aggregation framework {@code $unwind}-operation. *

                    * We recommend to use the static factory method {@link Aggregation#unwind(String)} instead of creating instances of * this class directly. - * - * @see http://docs.mongodb.org/manual/reference/aggregation/unwind/#pipe._S_unwind + * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl * @since 1.3 + * @see MongoDB Aggregation Framework: + * $unwind */ -public class UnwindOperation implements AggregationOperation { +public class UnwindOperation + implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { private final ExposedField field; + private final @Nullable ExposedField arrayIndex; + private final boolean preserveNullAndEmptyArrays; /** * Creates a new {@link UnwindOperation} for the given {@link Field}. - * + * * @param field must not be {@literal null}. */ public UnwindOperation(Field field) { + this(new ExposedField(field, true), false); + } + + /** + * Creates a new {@link UnwindOperation} using Mongo 3.2 syntax. + * + * @param field must not be {@literal null}. + * @param preserveNullAndEmptyArrays {@literal true} to output the document if path is {@literal null}, missing or + * array is empty. + * @since 1.10 + */ + public UnwindOperation(Field field, boolean preserveNullAndEmptyArrays) { + Assert.notNull(field, "Field must not be null"); - Assert.notNull(field); this.field = new ExposedField(field, true); + this.arrayIndex = null; + this.preserveNullAndEmptyArrays = preserveNullAndEmptyArrays; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates a new {@link UnwindOperation} using Mongo 3.2 syntax. + * + * @param field must not be {@literal null}. + * @param arrayIndex optional field name to expose the field array index, must not be {@literal null}. + * @param preserveNullAndEmptyArrays {@literal true} to output the document if path is {@literal null}, missing or + * array is empty. + * @since 1.10 */ + public UnwindOperation(Field field, Field arrayIndex, boolean preserveNullAndEmptyArrays) { + + Assert.notNull(field, "Field must not be null"); + Assert.notNull(arrayIndex, "ArrayIndex must not be null"); + + this.field = new ExposedField(field, true); + this.arrayIndex = new ExposedField(arrayIndex, true); + this.preserveNullAndEmptyArrays = preserveNullAndEmptyArrays; + } + @Override - public DBObject toDBObject(AggregationOperationContext context) { - return new BasicDBObject("$unwind", context.getReference(field).toString()); + public Document toDocument(AggregationOperationContext context) { + + String path = context.getReference(field).toString(); + + if (!preserveNullAndEmptyArrays && arrayIndex == null) { + return new Document(getOperator(), path); + } + + Document unwindArgs = new Document(); + unwindArgs.put("path", path); + if (arrayIndex != null) { + unwindArgs.put("includeArrayIndex", arrayIndex.getName()); + } + unwindArgs.put("preserveNullAndEmptyArrays", preserveNullAndEmptyArrays); + + return new Document(getOperator(), unwindArgs); + } + + @Override + public String getOperator() { + return "$unwind"; + } + + @Override + public ExposedFields getFields() { + return arrayIndex != null ? ExposedFields.from(arrayIndex) : ExposedFields.from(); + } + + /** + * Get a builder that allows creation of {@link LookupOperation}. + * + * @return + * @since 1.10 + */ + public static PathBuilder newUnwind() { + return UnwindOperationBuilder.newBuilder(); + } + + /** + * @author Mark Paluch + * @since 1.10 + */ + public interface PathBuilder { + + /** + * @param path the path to unwind, must not be {@literal null} or empty. + * @return + */ + IndexBuilder path(String path); + } + + /** + * @author Mark Paluch + * @since 1.10 + */ + public interface IndexBuilder { + + /** + * Exposes the array index as {@code field}. + * + * @param field field name to expose the field array index, must not be {@literal null} or empty. + * @return + */ + EmptyArraysBuilder arrayIndex(String field); + + /** + * Do not expose the array index. + * + * @return + */ + EmptyArraysBuilder noArrayIndex(); + } + + public interface EmptyArraysBuilder { + + /** + * Output documents if the array is null or empty. + * + * @return + */ + UnwindOperation preserveNullAndEmptyArrays(); + + /** + * Do not output documents if the array is null or empty. + * + * @return + */ + UnwindOperation skipNullAndEmptyArrays(); + } + + /** + * Builder for fluent {@link UnwindOperation} creation. + * + * @author Mark Paluch + * @since 1.10 + */ + public static final class UnwindOperationBuilder implements PathBuilder, IndexBuilder, EmptyArraysBuilder { + + private @Nullable Field field; + private @Nullable Field arrayIndex; + + private UnwindOperationBuilder() {} + + /** + * Creates new builder for {@link UnwindOperation}. + * + * @return never {@literal null}. + */ + public static PathBuilder newBuilder() { + return new UnwindOperationBuilder(); + } + + @Override + public UnwindOperation preserveNullAndEmptyArrays() { + + if (arrayIndex != null) { + return new UnwindOperation(field, arrayIndex, true); + } + + return new UnwindOperation(field, true); + } + + @Override + public UnwindOperation skipNullAndEmptyArrays() { + + if (arrayIndex != null) { + return new UnwindOperation(field, arrayIndex, false); + } + + return new UnwindOperation(field, false); + } + + @Override + public EmptyArraysBuilder arrayIndex(String field) { + + Assert.hasText(field, "'ArrayIndex' must not be null or empty"); + arrayIndex = Fields.field(field); + return this; + } + + @Override + public EmptyArraysBuilder noArrayIndex() { + + arrayIndex = null; + return this; + } + + @Override + public UnwindOperationBuilder path(String path) { + + Assert.hasText(path, "'Path' must not be null or empty"); + field = Fields.field(path); + return this; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java new file mode 100644 index 0000000000..8e676c72bc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java @@ -0,0 +1,402 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal variable} aggregation operations. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 1.10 + */ +public class VariableOperators { + + /** + * Starts building new {@link Map} that applies an {@link AggregationExpression} to each item of a referenced array + * and returns an array with the applied results. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static Map.AsBuilder mapItemsOf(String fieldReference) { + return Map.itemsOf(fieldReference); + } + + /** + * Starts building new {@link Map} that applies an {@link AggregationExpression} to each item of a referenced array + * and returns an array with the applied results. + * + * @param expression must not be {@literal null}. + * @return + */ + public static Map.AsBuilder mapItemsOf(AggregationExpression expression) { + return Map.itemsOf(expression); + } + + /** + * Start creating new {@link Let} that allows definition of {@link ExpressionVariable} that can be used within a + * nested {@link AggregationExpression}. + * + * @param variables must not be {@literal null}. + * @return + */ + public static Let.LetBuilder define(ExpressionVariable... variables) { + return Let.define(variables); + } + + /** + * Start creating new {@link Let} that allows definition of {@link ExpressionVariable} that can be used within a + * nested {@link AggregationExpression}. + * + * @param variables must not be {@literal null}. + * @return + */ + public static Let.LetBuilder define(Collection variables) { + return Let.define(variables); + } + + /** + * {@link AggregationExpression} for {@code $map}. + */ + public static class Map implements AggregationExpression { + + private Object sourceArray; + private String itemVariableName; + private AggregationExpression functionToApply; + + private Map(Object sourceArray, String itemVariableName, AggregationExpression functionToApply) { + + Assert.notNull(sourceArray, "SourceArray must not be null"); + Assert.notNull(itemVariableName, "ItemVariableName must not be null"); + Assert.notNull(functionToApply, "FunctionToApply must not be null"); + + this.sourceArray = sourceArray; + this.itemVariableName = itemVariableName; + this.functionToApply = functionToApply; + } + + /** + * Starts building new {@link Map} that applies an {@link AggregationExpression} to each item of a referenced array + * and returns an array with the applied results. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static AsBuilder itemsOf(final String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new AsBuilder() { + + @Override + public FunctionBuilder as(final String variableName) { + + Assert.notNull(variableName, "VariableName must not be null"); + + return new FunctionBuilder() { + + @Override + public Map andApply(final AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + return new Map(Fields.field(fieldReference), variableName, expression); + } + }; + } + + }; + } + + /** + * Starts building new {@link Map} that applies an {@link AggregationExpression} to each item of a referenced array + * and returns an array with the applied results. + * + * @param source must not be {@literal null}. + * @return + */ + public static AsBuilder itemsOf(final AggregationExpression source) { + + Assert.notNull(source, "AggregationExpression must not be null"); + + return new AsBuilder() { + + @Override + public FunctionBuilder as(final String variableName) { + + Assert.notNull(variableName, "VariableName must not be null"); + + return new FunctionBuilder() { + + @Override + public Map andApply(final AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + return new Map(source, variableName, expression); + } + }; + } + }; + } + + @Override + public Document toDocument(final AggregationOperationContext context) { + return toMap(ExposedFields.synthetic(Fields.fields(itemVariableName)), context); + } + + private Document toMap(ExposedFields exposedFields, AggregationOperationContext context) { + + Document map = new Document(); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + + Document input; + if (sourceArray instanceof Field field) { + input = new Document("input", context.getReference(field).toString()); + } else { + input = new Document("input", ((AggregationExpression) sourceArray).toDocument(context)); + } + + map.putAll(context.getMappedObject(input)); + map.put("as", itemVariableName); + map.put("in", + functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext, + Collections.singleton(Fields.field(itemVariableName))))); + + return new Document("$map", map); + } + + public interface AsBuilder { + + /** + * Define the {@literal variableName} for addressing items within the array. + * + * @param variableName must not be {@literal null}. + * @return + */ + FunctionBuilder as(String variableName); + } + + public interface FunctionBuilder { + + /** + * Creates new {@link Map} that applies the given {@link AggregationExpression} to each item of the referenced + * array and returns an array with the applied results. + * + * @param expression must not be {@literal null}. + * @return + */ + Map andApply(AggregationExpression expression); + } + } + + /** + * {@link AggregationExpression} for {@code $let} that binds {@link AggregationExpression} to variables for use in the + * specified {@code in} expression, and returns the result of the expression. + * + * @author Christoph Strobl + * @since 1.10 + */ + public static class Let implements AggregationExpression { + + private final List vars; + + @Nullable // + private final AggregationExpression expression; + + private Let(List vars, @Nullable AggregationExpression expression) { + + this.vars = vars; + this.expression = expression; + } + + /** + * Create a new {@link Let} holding just the given {@literal variables}. + * + * @param variables must not be {@literal null}. + * @return new instance of {@link Let}. + * @since 4.1 + */ + public static Let just(ExpressionVariable... variables) { + return new Let(List.of(variables), null); + } + + /** + * Start creating new {@link Let} by defining the variables for {@code $vars}. + * + * @param variables must not be {@literal null}. + * @return + */ + public static LetBuilder define(Collection variables) { + + Assert.notNull(variables, "Variables must not be null"); + + return new LetBuilder() { + + @Override + public Let andApply(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Let(new ArrayList(variables), expression); + } + }; + } + + /** + * Start creating new {@link Let} by defining the variables for {@code $vars}. + * + * @param variables must not be {@literal null}. + * @return + */ + public static LetBuilder define(ExpressionVariable... variables) { + + Assert.notNull(variables, "Variables must not be null"); + return define(List.of(variables)); + } + + public interface LetBuilder { + + /** + * Define the {@link AggregationExpression} to evaluate. + * + * @param expression must not be {@literal null}. + * @return + */ + Let andApply(AggregationExpression expression); + + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context); + } + + String[] getVariableNames() { + + String[] varNames = new String[this.vars.size()]; + for (int i = 0; i < this.vars.size(); i++) { + varNames[i] = this.vars.get(i).variableName; + } + + return varNames; + } + + private Document toLet(ExposedFields exposedFields, AggregationOperationContext context) { + + Document letExpression = new Document(); + Document mappedVars = new Document(); + + for (ExpressionVariable var : this.vars) { + mappedVars.putAll(getMappedVariable(var, context)); + } + + letExpression.put("vars", mappedVars); + if (expression != null) { + + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + letExpression.put("in", getMappedIn(operationContext)); + } + + return new Document("$let", letExpression); + } + + private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) { + + if (var.expression instanceof AggregationExpression expression) { + return new Document(var.variableName, expression.toDocument(context)); + } + if (var.expression instanceof Field field) { + return new Document(var.variableName, context.getReference(field).toString()); + } + return new Document(var.variableName, var.expression); + } + + private Object getMappedIn(AggregationOperationContext context) { + return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context, + this.vars.stream().map(var -> Fields.field(var.variableName)).collect(Collectors.toList()))); + } + + /** + * @author Christoph Strobl + */ + public static class ExpressionVariable { + + private final @Nullable String variableName; + private final @Nullable Object expression; + + /** + * Creates new {@link ExpressionVariable}. + * + * @param variableName can be {@literal null}. + * @param expression can be {@literal null}. + */ + private ExpressionVariable(@Nullable String variableName, @Nullable Object expression) { + + this.variableName = variableName; + this.expression = expression; + } + + /** + * Create a new {@link ExpressionVariable} with given name. + * + * @param variableName must not be {@literal null}. + * @return never {@literal null}. + */ + public static ExpressionVariable newVariable(String variableName) { + + Assert.notNull(variableName, "VariableName must not be null"); + return new ExpressionVariable(variableName, null); + } + + /** + * Create a new {@link ExpressionVariable} with current name and given {@literal expression}. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + */ + public ExpressionVariable forExpression(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ExpressionVariable(variableName, expression); + } + + public ExpressionVariable forField(String fieldRef) { + return new ExpressionVariable(variableName, Fields.field(fieldRef)); + } + + /** + * Create a new {@link ExpressionVariable} with current name and given {@literal expressionObject}. + * + * @param expressionObject must not be {@literal null}. + * @return never {@literal null}. + */ + public ExpressionVariable forExpression(Document expressionObject) { + + Assert.notNull(expressionObject, "Expression must not be null"); + return new ExpressionVariable(variableName, expressionObject); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java new file mode 100644 index 0000000000..bcc5fbd7bc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java @@ -0,0 +1,519 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.bson.BinaryVector; +import org.bson.Document; + +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.mapping.MongoVector; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Contract; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Performs a semantic search on data in your Atlas cluster. This stage is only available for Atlas Vector Search. + * Vector data must be less than or equal to 4096 dimensions in width. + *

                    Limitations

                    You cannot use this stage together with: + *
                      + *
                    • {@link org.springframework.data.mongodb.core.aggregation.LookupOperation Lookup} stages
                    • + *
                    • {@link org.springframework.data.mongodb.core.aggregation.FacetOperation Facet} stage
                    • + *
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class VectorSearchOperation implements AggregationOperation { + + private final SearchType searchType; + private final @Nullable CriteriaDefinition filter; + private final String indexName; + private final Limit limit; + private final @Nullable Integer numCandidates; + private final QueryPaths path; + private final Vector vector; + private final String score; + private final Consumer scoreCriteria; + + private VectorSearchOperation(SearchType searchType, @Nullable CriteriaDefinition filter, String indexName, + Limit limit, @Nullable Integer numCandidates, QueryPaths path, Vector vector, @Nullable String searchScore, + Consumer scoreCriteria) { + + this.searchType = searchType; + this.filter = filter; + this.indexName = indexName; + this.limit = limit; + this.numCandidates = numCandidates; + this.path = path; + this.vector = vector; + this.score = searchScore; + this.scoreCriteria = scoreCriteria; + } + + VectorSearchOperation(String indexName, QueryPaths path, Limit limit, Vector vector) { + this(SearchType.DEFAULT, null, indexName, limit, null, path, vector, null, null); + } + + /** + * Entrypoint to build a {@link VectorSearchOperation} starting from the {@code index} name to search. Atlas Vector + * Search doesn't return results if you misspell the index name or if the specified index doesn't already exist on the + * cluster. + * + * @param index must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + */ + public static PathContributor search(String index) { + return new VectorSearchBuilder().index(index); + } + + /** + * Configure the search type to use. {@link SearchType#ENN} leads to an exact search while {@link SearchType#ANN} uses + * {@code exact=false}. + * + * @param searchType must not be null. + * @return a new {@link VectorSearchOperation} with {@link SearchType} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation searchType(SearchType searchType) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Criteria expression that compares an indexed field with a boolean, date, objectId, number (not decimals), string, + * or UUID to use as a pre-filter. + *

                    + * Atlas Vector Search supports only the filters for the following MQL match expressions: + *

                      + *
                    • $gt
                    • + *
                    • $lt
                    • + *
                    • $gte
                    • + *
                    • $lte
                    • + *
                    • $eq
                    • + *
                    • $ne
                    • + *
                    • $in
                    • + *
                    • $nin
                    • + *
                    • $nor
                    • + *
                    • $not
                    • + *
                    • $and
                    • + *
                    • $or
                    • + *
                    + * + * @param filter must not be null. + * @return a new {@link VectorSearchOperation} with {@link CriteriaDefinition} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation filter(CriteriaDefinition filter) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Criteria expression that compares an indexed field with a boolean, date, objectId, number (not decimals), string, + * or UUID to use as a pre-filter. + *

                    + * Atlas Vector Search supports only the filters for the following MQL match expressions: + *

                      + *
                    • $gt
                    • + *
                    • $lt
                    • + *
                    • $gte
                    • + *
                    • $lte
                    • + *
                    • $eq
                    • + *
                    • $ne
                    • + *
                    • $in
                    • + *
                    • $nin
                    • + *
                    • $nor
                    • + *
                    • $not
                    • + *
                    • $and
                    • + *
                    • $or
                    • + *
                    + * + * @param filter must not be null. + * @return a new {@link VectorSearchOperation} with {@link CriteriaDefinition} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation filter(Document filter) { + + return filter(new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return filter; + } + + @Nullable + @Override + public String getKey() { + return null; + } + }); + } + + /** + * Number of nearest neighbors to use during the search. Value must be less than or equal to (<=) {@code 10000}. You + * can't specify a number less than the number of documents to return (limit). This field is required if + * {@link #searchType(SearchType)} is {@link SearchType#ANN} or {@link SearchType#DEFAULT}. + * + * @param numCandidates number of nearest neighbors to use during the search + * @return a new {@link VectorSearchOperation} with {@code numCandidates} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation numCandidates(int numCandidates) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Add a {@link AddFieldsOperation} stage including the search score using {@code score} as field name. + * + * @return a new {@link VectorSearchOperation} with search score applied. + * @see #withSearchScore(String) + */ + @Contract("-> new") + public VectorSearchOperation withSearchScore() { + return withSearchScore("score"); + } + + /** + * Add a {@link AddFieldsOperation} stage including the search score using {@code scoreFieldName} as field name. + * + * @param scoreFieldName name of the score field. + * @return a new {@link VectorSearchOperation} with {@code scoreFieldName} applied. + * @see #withSearchScore() + */ + @Contract("_ -> new") + public VectorSearchOperation withSearchScore(String scoreFieldName) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, scoreFieldName, + scoreCriteria); + } + + /** + * Add a {@link MatchOperation} stage targeting the score field name. Implies that the score field is present by + * either reusing a previous {@link AddFieldsOperation} from {@link #withSearchScore()} or + * {@link #withSearchScore(String)} or by adding a new {@link AddFieldsOperation} stage. + * + * @return a new {@link VectorSearchOperation} with search score filter applied. + */ + @Contract("_ -> new") + public VectorSearchOperation withFilterBySore(Consumer score) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, + StringUtils.hasText(this.score) ? this.score : "score", score); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $vectorSearch = new Document(); + + if (searchType != null && !searchType.equals(SearchType.DEFAULT)) { + $vectorSearch.append("exact", searchType.equals(SearchType.ENN)); + } + + if (filter != null) { + $vectorSearch.append("filter", context.getMappedObject(filter.getCriteriaObject())); + } + + $vectorSearch.append("index", indexName); + $vectorSearch.append("limit", limit.max()); + + if (numCandidates != null) { + $vectorSearch.append("numCandidates", numCandidates); + } + + Object path = this.path.getPathObject(); + + if (path instanceof String pathFieldName) { + Document mappedObject = context.getMappedObject(new Document(pathFieldName, 1)); + path = mappedObject.keySet().iterator().next(); + } + + Object source = vector.getSource(); + + if (source instanceof float[]) { + source = vector.toDoubleArray(); + } + + if (source instanceof double[] ds) { + source = Arrays.stream(ds).boxed().collect(Collectors.toList()); + } + + $vectorSearch.append("path", path); + $vectorSearch.append("queryVector", source); + + return new Document(getOperator(), $vectorSearch); + } + + @Override + public List toPipelineStages(AggregationOperationContext context) { + + if (!StringUtils.hasText(score)) { + return List.of(toDocument(context)); + } + + AddFieldsOperation $vectorSearchScore = Aggregation.addFields().addField(score) + .withValueOfExpression("{\"$meta\":\"vectorSearchScore\"}").build(); + + if (scoreCriteria == null) { + return List.of(toDocument(context), $vectorSearchScore.toDocument(context)); + } + + Criteria criteria = Criteria.where(score); + scoreCriteria.accept(criteria); + MatchOperation $filterByScore = Aggregation.match(criteria); + + return List.of(toDocument(context), $vectorSearchScore.toDocument(context), $filterByScore.toDocument(context)); + } + + @Override + public String getOperator() { + return "$vectorSearch"; + } + + /** + * Builder helper to create a {@link VectorSearchOperation}. + */ + private static class VectorSearchBuilder implements PathContributor, VectorContributor, LimitContributor { + + String index; + QueryPath paths; + Vector vector; + + PathContributor index(String index) { + this.index = index; + return this; + } + + @Override + public VectorContributor path(String path) { + + this.paths = QueryPath.path(path); + return this; + } + + @Override + public VectorSearchOperation limit(Limit limit) { + return new VectorSearchOperation(index, QueryPaths.of(paths), limit, vector); + } + + @Override + public LimitContributor vector(Vector vector) { + this.vector = vector; + return this; + } + } + + /** + * Search type, ANN as approximation or ENN for exact search. + */ + public enum SearchType { + + /** MongoDB Server default (value will be omitted) */ + DEFAULT, + /** Approximate Nearest Neighbour */ + ANN, + /** Exact Nearest Neighbour */ + ENN + } + + /** + * Value object capturing query paths. + */ + public static class QueryPaths { + + private final Set> paths; + + private QueryPaths(Set> paths) { + this.paths = paths; + } + + /** + * Factory method to create {@link QueryPaths} from a single {@link QueryPath}. + * + * @param path + * @return a new {@link QueryPaths} instance. + */ + public static QueryPaths of(QueryPath path) { + return new QueryPaths(Set.of(path)); + } + + Object getPathObject() { + + if (paths.size() == 1) { + return paths.iterator().next().value(); + } + return paths.stream().map(QueryPath::value).collect(Collectors.toList()); + } + } + + /** + * Interface describing a query path contract. Query paths might be simple field names, wildcard paths, or + * multi-paths. paths. + * + * @param + */ + public interface QueryPath { + + T value(); + + static QueryPath path(String field) { + return new SimplePath(field); + } + } + + public static class SimplePath implements QueryPath { + + String name; + + public SimplePath(String name) { + this.name = name; + } + + @Override + public String value() { + return name; + } + } + + /** + * Fluent API to configure a path on the VectorSearchOperation builder. + */ + public interface PathContributor { + + /** + * Indexed vector type field to search. + * + * @param path name of the search path. + * @return + */ + @Contract("_ -> this") + VectorContributor path(String path); + } + + /** + * Fluent API to configure a vector on the VectorSearchOperation builder. + */ + public interface VectorContributor { + + /** + * Array of float numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(float... vector) { + return vector(Vector.of(vector)); + } + + /** + * Array of byte numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(byte[] vector) { + return vector(BinaryVector.int8Vector(vector)); + } + + /** + * Array of double numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(double... vector) { + return vector(Vector.of(vector)); + } + + /** + * Array of numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(List vector) { + return vector(Vector.of(vector)); + } + + /** + * Binary vector (BSON BinData vector subtype float32, or BSON BinData vector subtype int1 or int8 type) that + * represent the query vector. The number type must match the indexed field value type. Otherwise, Atlas Vector + * Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(BinaryVector vector) { + return vector(MongoVector.of(vector)); + } + + /** + * The query vector. The number type must match the indexed field value type. Otherwise, Atlas Vector Search doesn't + * return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + LimitContributor vector(Vector vector); + } + + /** + * Fluent API to configure a limit on the VectorSearchOperation builder. + */ + public interface LimitContributor { + + /** + * Number (of type int only) of documents to return in the results. This value can't exceed the value of + * numCandidates if you specify numCandidates. + * + * @param limit + * @return + */ + @Contract("_ -> this") + default VectorSearchOperation limit(int limit) { + return limit(Limit.of(limit)); + } + + /** + * Number (of type int only) of documents to return in the results. This value can't exceed the value of + * numCandidates if you specify numCandidates. + * + * @param limit + * @return + */ + @Contract("_ -> this") + VectorSearchOperation limit(Limit limit); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/package-info.java index a098ec0228..0e30b8b855 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/package-info.java @@ -1,5 +1,8 @@ /** * Support for the MongoDB aggregation framework. + * * @since 1.3 */ -package org.springframework.data.mongodb.core.aggregation; \ No newline at end of file +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.aggregation; + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java new file mode 100644 index 0000000000..2ce2d7ed46 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Collation} allows to define the rules used for language-specific string comparison. + * + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @author Christoph Strobl + * @since 4.0 + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +public @interface Collation { + + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation. + * + * @return an empty {@link String} by default. + */ + String value() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java new file mode 100644 index 0000000000..3e08dc1014 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java @@ -0,0 +1,6 @@ +/** + * Core Spring Data MongoDB annotations not limited to a special use case (like Query,...). + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.annotation; + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java index b2726f9603..7a01677939 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,55 +16,65 @@ package org.springframework.data.mongodb.core.convert; import java.math.BigInteger; +import java.util.Date; +import org.bson.types.Code; import org.bson.types.ObjectId; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.data.convert.EntityInstantiators; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mapping.model.EntityInstantiators; import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToObjectIdConverter; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * Base class for {@link MongoConverter} implementations. Sets up a {@link GenericConversionService} and populates basic * converters. Allows registering {@link CustomConversions}. - * + * * @author Jon Brisbin * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ public abstract class AbstractMongoConverter implements MongoConverter, InitializingBean { protected final GenericConversionService conversionService; - protected CustomConversions conversions = new CustomConversions(); + protected CustomConversions conversions = new MongoCustomConversions(); protected EntityInstantiators instantiators = new EntityInstantiators(); /** * Creates a new {@link AbstractMongoConverter} using the given {@link GenericConversionService}. - * - * @param conversionService + * + * @param conversionService can be {@literal null} and defaults to {@link DefaultConversionService}. */ - public AbstractMongoConverter(GenericConversionService conversionService) { + public AbstractMongoConverter(@Nullable GenericConversionService conversionService) { this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService; } /** * Registers the given custom conversions with the converter. - * - * @param conversions + * + * @param conversions must not be {@literal null}. */ public void setCustomConversions(CustomConversions conversions) { + + Assert.notNull(conversions, "Conversions must not be null"); this.conversions = conversions; } /** * Registers {@link EntityInstantiators} to customize entity instantiation. - * - * @param instantiators + * + * @param instantiators can be {@literal null}. Uses default {@link EntityInstantiators} if so. */ - public void setInstantiators(EntityInstantiators instantiators) { + public void setInstantiators(@Nullable EntityInstantiators instantiators) { this.instantiators = instantiators == null ? new EntityInstantiators() : instantiators; } @@ -86,28 +96,31 @@ private void initializeConverters() { conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE); } - conversions.registerConvertersIn(conversionService); - } + if (!conversionService.canConvert(Date.class, Long.class)) { + conversionService + .addConverter(ConverterBuilder.writing(Date.class, Long.class, Date::getTime).getWritingConverter()); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object) - */ - public Object convertToMongoType(Object obj) { - return convertToMongoType(obj, null); + if (!conversionService.canConvert(Long.class, Date.class)) { + conversionService.addConverter(ConverterBuilder.reading(Long.class, Date.class, Date::new).getReadingConverter()); + } + + if (!conversionService.canConvert(ObjectId.class, Date.class)) { + + conversionService.addConverter(ConverterBuilder + .reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp())).getReadingConverter()); + } + + conversionService + .addConverter(ConverterBuilder.reading(Code.class, String.class, Code::getCode).getReadingConverter()); + conversions.registerConvertersIn(conversionService); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.convert.MongoConverter#getConversionService() - */ + @Override public ConversionService getConversionService() { return conversionService; } - /* (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ public void afterPropertiesSet() { initializeConverters(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ConverterRegistration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ConverterRegistration.java deleted file mode 100644 index 3365c2359f..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ConverterRegistration.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import org.springframework.core.convert.converter.GenericConverter.ConvertiblePair; -import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.util.Assert; - -/** - * Conversion registration information. - * - * @author Oliver Gierke - */ -class ConverterRegistration { - - private final ConvertiblePair convertiblePair; - private final boolean reading; - private final boolean writing; - - /** - * Creates a new {@link ConverterRegistration}. - * - * @param convertiblePair must not be {@literal null}. - * @param isReading whether to force to consider the converter for reading. - * @param isWritingwhether to force to consider the converter for reading. - */ - public ConverterRegistration(ConvertiblePair convertiblePair, boolean isReading, boolean isWriting) { - - Assert.notNull(convertiblePair); - - this.convertiblePair = convertiblePair; - this.reading = isReading; - this.writing = isWriting; - } - - /** - * Creates a new {@link ConverterRegistration} from the given source and target type and read/write flags. - * - * @param source the source type to be converted from, must not be {@literal null}. - * @param target the target type to be converted to, must not be {@literal null}. - * @param isReading whether to force to consider the converter for reading. - * @param isWriting whether to force to consider the converter for writing. - */ - public ConverterRegistration(Class source, Class target, boolean isReading, boolean isWriting) { - this(new ConvertiblePair(source, target), isReading, isWriting); - } - - /** - * Returns whether the converter shall be used for writing. - * - * @return - */ - public boolean isWriting() { - return writing == true || (!reading && isSimpleTargetType()); - } - - /** - * Returns whether the converter shall be used for reading. - * - * @return - */ - public boolean isReading() { - return reading == true || (!writing && isSimpleSourceType()); - } - - /** - * Returns the actual conversion pair. - * - * @return - */ - public ConvertiblePair getConvertiblePair() { - return convertiblePair; - } - - /** - * Returns whether the source type is a Mongo simple one. - * - * @return - */ - public boolean isSimpleSourceType() { - return isMongoBasicType(convertiblePair.getSourceType()); - } - - /** - * Returns whether the target type is a Mongo simple one. - * - * @return - */ - public boolean isSimpleTargetType() { - return isMongoBasicType(convertiblePair.getTargetType()); - } - - /** - * Returns whether the given type is a type that Mongo can handle basically. - * - * @param type - * @return - */ - private static boolean isMongoBasicType(Class type) { - return MongoSimpleTypes.HOLDER.isSimpleType(type); - } -} \ No newline at end of file diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java deleted file mode 100644 index 14054d84cd..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java +++ /dev/null @@ -1,420 +0,0 @@ -/* - * Copyright 2011-2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.GenericTypeResolver; -import org.springframework.core.convert.TypeDescriptor; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.convert.converter.ConverterFactory; -import org.springframework.core.convert.converter.GenericConverter; -import org.springframework.core.convert.converter.GenericConverter.ConvertiblePair; -import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.data.convert.JodaTimeConverters; -import org.springframework.data.convert.Jsr310Converters; -import org.springframework.data.convert.ReadingConverter; -import org.springframework.data.convert.ThreeTenBackPortConverters; -import org.springframework.data.convert.WritingConverter; -import org.springframework.data.mapping.model.SimpleTypeHolder; -import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.data.util.CacheValue; -import org.springframework.util.Assert; - -/** - * Value object to capture custom conversion. That is essentially a {@link List} of converters and some additional logic - * around them. The converters are pretty much builds up two sets of types which Mongo basic types {@see #MONGO_TYPES} - * can be converted into and from. These types will be considered simple ones (which means they neither need deeper - * inspection nor nested conversion. Thus the {@link CustomConversions} also act as factory for {@link SimpleTypeHolder} - * . - * - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class CustomConversions { - - private static final Logger LOG = LoggerFactory.getLogger(CustomConversions.class); - private static final String READ_CONVERTER_NOT_SIMPLE = "Registering converter from %s to %s as reading converter although it doesn't convert from a Mongo supported type! You might wanna check you annotation setup at the converter implementation."; - private static final String WRITE_CONVERTER_NOT_SIMPLE = "Registering converter from %s to %s as writing converter although it doesn't convert to a Mongo supported type! You might wanna check you annotation setup at the converter implementation."; - - private final Set readingPairs; - private final Set writingPairs; - private final Set> customSimpleTypes; - private final SimpleTypeHolder simpleTypeHolder; - - private final List converters; - - private final Map>> customReadTargetTypes; - private final Map>> customWriteTargetTypes; - private final Map, CacheValue>> rawWriteTargetTypes; - - /** - * Creates an empty {@link CustomConversions} object. - */ - CustomConversions() { - this(new ArrayList()); - } - - /** - * Creates a new {@link CustomConversions} instance registering the given converters. - * - * @param converters - */ - public CustomConversions(List converters) { - - Assert.notNull(converters); - - this.readingPairs = new LinkedHashSet(); - this.writingPairs = new LinkedHashSet(); - this.customSimpleTypes = new HashSet>(); - this.customReadTargetTypes = new ConcurrentHashMap>>(); - this.customWriteTargetTypes = new ConcurrentHashMap>>(); - this.rawWriteTargetTypes = new ConcurrentHashMap, CacheValue>>(); - - List toRegister = new ArrayList(); - - // Add user provided converters to make sure they can override the defaults - toRegister.addAll(converters); - toRegister.add(CustomToStringConverter.INSTANCE); - toRegister.addAll(MongoConverters.getConvertersToRegister()); - toRegister.addAll(JodaTimeConverters.getConvertersToRegister()); - toRegister.addAll(GeoConverters.getConvertersToRegister()); - toRegister.addAll(Jsr310Converters.getConvertersToRegister()); - toRegister.addAll(ThreeTenBackPortConverters.getConvertersToRegister()); - - for (Object c : toRegister) { - registerConversion(c); - } - - Collections.reverse(toRegister); - - this.converters = Collections.unmodifiableList(toRegister); - this.simpleTypeHolder = new SimpleTypeHolder(customSimpleTypes, MongoSimpleTypes.HOLDER); - } - - /** - * Returns the underlying {@link SimpleTypeHolder}. - * - * @return - */ - public SimpleTypeHolder getSimpleTypeHolder() { - return simpleTypeHolder; - } - - /** - * Returns whether the given type is considered to be simple. That means it's either a general simple type or we have - * a writing {@link Converter} registered for a particular type. - * - * @see SimpleTypeHolder#isSimpleType(Class) - * @param type - * @return - */ - public boolean isSimpleType(Class type) { - return simpleTypeHolder.isSimpleType(type); - } - - /** - * Populates the given {@link GenericConversionService} with the convertes registered. - * - * @param conversionService - */ - public void registerConvertersIn(GenericConversionService conversionService) { - - for (Object converter : converters) { - - boolean added = false; - - if (converter instanceof Converter) { - conversionService.addConverter((Converter) converter); - added = true; - } - - if (converter instanceof ConverterFactory) { - conversionService.addConverterFactory((ConverterFactory) converter); - added = true; - } - - if (converter instanceof GenericConverter) { - conversionService.addConverter((GenericConverter) converter); - added = true; - } - - if (!added) { - throw new IllegalArgumentException( - "Given set contains element that is neither Converter nor ConverterFactory!"); - } - } - } - - /** - * Registers a conversion for the given converter. Inspects either generics of {@link Converter} and - * {@link ConverterFactory} or the {@link ConvertiblePair}s returned by a {@link GenericConverter}. - * - * @param converter - */ - private void registerConversion(Object converter) { - - Class type = converter.getClass(); - boolean isWriting = type.isAnnotationPresent(WritingConverter.class); - boolean isReading = type.isAnnotationPresent(ReadingConverter.class); - - if (converter instanceof GenericConverter) { - GenericConverter genericConverter = (GenericConverter) converter; - for (ConvertiblePair pair : genericConverter.getConvertibleTypes()) { - register(new ConverterRegistration(pair, isReading, isWriting)); - } - } else if (converter instanceof ConverterFactory) { - - Class[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), ConverterFactory.class); - register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting)); - } else if (converter instanceof Converter) { - Class[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), Converter.class); - register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting)); - } else { - throw new IllegalArgumentException("Unsupported Converter type!"); - } - } - - /** - * Registers the given {@link ConvertiblePair} as reading or writing pair depending on the type sides being basic - * Mongo types. - * - * @param pair - */ - private void register(ConverterRegistration converterRegistration) { - - ConvertiblePair pair = converterRegistration.getConvertiblePair(); - - if (converterRegistration.isReading()) { - - readingPairs.add(pair); - - if (LOG.isWarnEnabled() && !converterRegistration.isSimpleSourceType()) { - LOG.warn(String.format(READ_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType())); - } - } - - if (converterRegistration.isWriting()) { - - writingPairs.add(pair); - customSimpleTypes.add(pair.getSourceType()); - - if (LOG.isWarnEnabled() && !converterRegistration.isSimpleTargetType()) { - LOG.warn(String.format(WRITE_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType())); - } - } - } - - /** - * Returns the target type to convert to in case we have a custom conversion registered to convert the given source - * type into a Mongo native one. - * - * @param sourceType must not be {@literal null} - * @return - */ - public Class getCustomWriteTarget(final Class sourceType) { - - return getOrCreateAndCache(sourceType, rawWriteTargetTypes, new Producer() { - - @Override - public Class get() { - return getCustomTarget(sourceType, null, writingPairs); - } - }); - } - - /** - * Returns the target type we can readTargetWriteLocl an inject of the given source type to. The returned type might - * be a subclass of the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply - * return the first target type matching or {@literal null} if no conversion can be found. - * - * @param sourceType must not be {@literal null} - * @param requestedTargetType - * @return - */ - public Class getCustomWriteTarget(final Class sourceType, final Class requestedTargetType) { - - if (requestedTargetType == null) { - return getCustomWriteTarget(sourceType); - } - - return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customWriteTargetTypes, - new Producer() { - - @Override - public Class get() { - return getCustomTarget(sourceType, requestedTargetType, writingPairs); - } - }); - } - - /** - * Returns whether we have a custom conversion registered to readTargetWriteLocl into a Mongo native type. The - * returned type might be a subclass of the given expected type though. - * - * @param sourceType must not be {@literal null} - * @return - */ - public boolean hasCustomWriteTarget(Class sourceType) { - return hasCustomWriteTarget(sourceType, null); - } - - /** - * Returns whether we have a custom conversion registered to readTargetWriteLocl an object of the given source type - * into an object of the given Mongo native target type. - * - * @param sourceType must not be {@literal null}. - * @param requestedTargetType - * @return - */ - public boolean hasCustomWriteTarget(Class sourceType, Class requestedTargetType) { - return getCustomWriteTarget(sourceType, requestedTargetType) != null; - } - - /** - * Returns whether we have a custom conversion registered to readTargetReadLock the given source into the given target - * type. - * - * @param sourceType must not be {@literal null} - * @param requestedTargetType must not be {@literal null} - * @return - */ - public boolean hasCustomReadTarget(Class sourceType, Class requestedTargetType) { - return getCustomReadTarget(sourceType, requestedTargetType) != null; - } - - /** - * Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the - * returned {@link Class} could be an assignable type to the given {@code requestedTargetType}. - * - * @param sourceType must not be {@literal null}. - * @param requestedTargetType can be {@literal null}. - * @return - */ - private Class getCustomReadTarget(final Class sourceType, final Class requestedTargetType) { - - if (requestedTargetType == null) { - return null; - } - - return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customReadTargetTypes, - new Producer() { - - @Override - public Class get() { - return getCustomTarget(sourceType, requestedTargetType, readingPairs); - } - }); - } - - /** - * Inspects the given {@link ConvertiblePair}s for ones that have a source compatible type as source. Additionally - * checks assignability of the target type if one is given. - * - * @param sourceType must not be {@literal null}. - * @param requestedTargetType can be {@literal null}. - * @param pairs must not be {@literal null}. - * @return - */ - private static Class getCustomTarget(Class sourceType, Class requestedTargetType, - Collection pairs) { - - Assert.notNull(sourceType); - Assert.notNull(pairs); - - if (requestedTargetType != null && pairs.contains(new ConvertiblePair(sourceType, requestedTargetType))) { - return requestedTargetType; - } - - for (ConvertiblePair typePair : pairs) { - if (typePair.getSourceType().isAssignableFrom(sourceType)) { - Class targetType = typePair.getTargetType(); - if (requestedTargetType == null || targetType.isAssignableFrom(requestedTargetType)) { - return targetType; - } - } - } - - return null; - } - - /** - * Will try to find a value for the given key in the given cache or produce one using the given {@link Producer} and - * store it in the cache. - * - * @param key the key to lookup a potentially existing value, must not be {@literal null}. - * @param cache the cache to find the value in, must not be {@literal null}. - * @param producer the {@link Producer} to create values to cache, must not be {@literal null}. - * @return - */ - private static Class getOrCreateAndCache(T key, Map>> cache, Producer producer) { - - CacheValue> cacheValue = cache.get(key); - - if (cacheValue != null) { - return cacheValue.getValue(); - } - - Class type = producer.get(); - cache.put(key, CacheValue.> ofNullable(type)); - - return type; - } - - private interface Producer { - - Class get(); - } - - @WritingConverter - private enum CustomToStringConverter implements GenericConverter { - - INSTANCE; - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.GenericConverter#getConvertibleTypes() - */ - public Set getConvertibleTypes() { - - ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class); - ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class); - - return new HashSet(Arrays.asList(localeToString, booleanToString)); - } - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.GenericConverter#convert(java.lang.Object, org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor) - */ - public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { - return source.toString(); - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectAccessor.java deleted file mode 100644 index a3d9cc9b1c..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectAccessor.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright 2013-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import java.util.Arrays; -import java.util.Iterator; -import java.util.Map; - -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.util.Assert; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * Wrapper value object for a {@link BasicDBObject} to be able to access raw values by {@link MongoPersistentProperty} - * references. The accessors will transparently resolve nested document values that a {@link MongoPersistentProperty} - * might refer to through a path expression in field names. - * - * @author Oliver Gierke - */ -class DBObjectAccessor { - - private final BasicDBObject dbObject; - - /** - * Creates a new {@link DBObjectAccessor} for the given {@link DBObject}. - * - * @param dbObject must be a {@link BasicDBObject} effectively, must not be {@literal null}. - */ - public DBObjectAccessor(DBObject dbObject) { - - Assert.notNull(dbObject, "DBObject must not be null!"); - Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!"); - - this.dbObject = (BasicDBObject) dbObject; - } - - /** - * Puts the given value into the backing {@link DBObject} based on the coordinates defined through the given - * {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist - * of path traversals so we might need to create intermediate {@link BasicDBObject}s. - * - * @param prop must not be {@literal null}. - * @param value - */ - public void put(MongoPersistentProperty prop, Object value) { - - Assert.notNull(prop, "MongoPersistentProperty must not be null!"); - String fieldName = prop.getFieldName(); - - if (!fieldName.contains(".")) { - dbObject.put(fieldName, value); - return; - } - - Iterator parts = Arrays.asList(fieldName.split("\\.")).iterator(); - DBObject dbObject = this.dbObject; - - while (parts.hasNext()) { - - String part = parts.next(); - - if (parts.hasNext()) { - dbObject = getOrCreateNestedDbObject(part, dbObject); - } else { - dbObject.put(part, value); - } - } - } - - /** - * Returns the value the given {@link MongoPersistentProperty} refers to. By default this will be a direct field but - * the method will also transparently resolve nested values the {@link MongoPersistentProperty} might refer to through - * a path expression in the field name metadata. - * - * @param property must not be {@literal null}. - * @return - */ - public Object get(MongoPersistentProperty property) { - - String fieldName = property.getFieldName(); - - if (!fieldName.contains(".")) { - return this.dbObject.get(fieldName); - } - - Iterator parts = Arrays.asList(fieldName.split("\\.")).iterator(); - Map source = this.dbObject; - Object result = null; - - while (source != null && parts.hasNext()) { - - result = source.get(parts.next()); - - if (parts.hasNext()) { - source = getAsMap(result); - } - } - - return result; - } - - /** - * Returns the given source object as map, i.e. {@link BasicDBObject}s and maps as is or {@literal null} otherwise. - * - * @param source can be {@literal null}. - * @return - */ - @SuppressWarnings("unchecked") - private static Map getAsMap(Object source) { - - if (source instanceof BasicDBObject) { - return (BasicDBObject) source; - } - - if (source instanceof Map) { - return (Map) source; - } - - return null; - } - - /** - * Returns the {@link DBObject} which either already exists in the given source under the given key, or creates a new - * nested one, registers it with the source and returns it. - * - * @param key must not be {@literal null} or empty. - * @param source must not be {@literal null}. - * @return - */ - private static DBObject getOrCreateNestedDbObject(String key, DBObject source) { - - Object existing = source.get(key); - - if (existing instanceof BasicDBObject) { - return (BasicDBObject) existing; - } - - DBObject nested = new BasicDBObject(); - source.put(key, nested); - - return nested; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java index a5612d8ab8..40afbb8c10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +16,15 @@ package org.springframework.data.mongodb.core.convert; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; import com.mongodb.DBRef; /** * @author Oliver Gierke + * @author Mark Paluch */ public interface DbRefProxyHandler { - Object populateId(MongoPersistentProperty property, DBRef source, Object proxy); + Object populateId(MongoPersistentProperty property, @Nullable DBRef source, Object proxy); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java index a43b742c6d..0235694030 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,80 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.List; + +import org.bson.Document; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; -import com.mongodb.DBObject; import com.mongodb.DBRef; /** * Used to resolve associations annotated with {@link org.springframework.data.mongodb.core.mapping.DBRef}. - * + * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.4 */ -public interface DbRefResolver { +public interface DbRefResolver extends ReferenceResolver { /** * Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method * might return a proxy object for the {@link DBRef} or resolve it immediately. In both cases the * {@link DbRefResolverCallback} will be used to obtain the actual backing object. - * + * * @param property will never be {@literal null}. * @param dbref the {@link DBRef} to resolve. * @param callback will never be {@literal null}. - * @return + * @return can be {@literal null}. */ - Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + @Nullable + Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler); /** * Creates a {@link DBRef} instance for the given {@link org.springframework.data.mongodb.core.mapping.DBRef} * annotation, {@link MongoPersistentEntity} and id. - * + * * @param annotation will never be {@literal null}. * @param entity will never be {@literal null}. * @param id will never be {@literal null}. - * @return + * @return new instance of {@link DBRef}. */ - DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity entity, - Object id); + default DBRef createDbRef(@Nullable org.springframework.data.mongodb.core.mapping.DBRef annotation, + MongoPersistentEntity entity, Object id) { + + if (annotation != null && StringUtils.hasText(annotation.db())) { + return new DBRef(annotation.db(), entity.getCollection(), id); + } + + return new DBRef(entity.getCollection(), id); + } /** * Actually loads the {@link DBRef} from the datasource. - * + * * @param dbRef must not be {@literal null}. - * @return + * @return can be {@literal null}. * @since 1.7 */ - DBObject fetch(DBRef dbRef); + @Nullable + Document fetch(DBRef dbRef); + + /** + * Loads a given {@link List} of {@link DBRef}s from the datasource in one batch. The resulting {@link List} of + * {@link Document} will reflect the ordering of the {@link DBRef} passed in.
                    + * The {@link DBRef} elements in the list must not reference different collections. + * + * @param dbRefs must not be {@literal null}. + * @return never {@literal null}. + * @throws InvalidDataAccessApiUsageException in case not all {@link DBRef} target the same collection. + * @since 1.10 + */ + List bulkFetch(List dbRefs); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java index a33264f91e..bf6b882375 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,7 @@ /** * Callback interface to be used in conjunction with {@link DbRefResolver}. - * + * * @author Thomas Darimont * @author Oliver Gierke */ @@ -27,7 +27,7 @@ public interface DbRefResolverCallback { /** * Resolve the final object for the given {@link MongoPersistentProperty}. - * + * * @param property will never be {@literal null}. * @return */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java index 13759ed888..22b1ce7981 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,60 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.function.Function; + +import org.bson.Document; + import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator; -import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; import com.mongodb.DBRef; /** * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ class DefaultDbRefProxyHandler implements DbRefProxyHandler { - private final SpELContext spELContext; private final MappingContext, MongoPersistentProperty> mappingContext; private final ValueResolver resolver; + private final Function evaluatorFactory; /** - * @param spELContext must not be {@literal null}. - * @param conversionService must not be {@literal null}. * @param mappingContext must not be {@literal null}. + * @param resolver must not be {@literal null}. */ - public DefaultDbRefProxyHandler(SpELContext spELContext, - MappingContext, MongoPersistentProperty> mappingContext, ValueResolver resolver) { + public DefaultDbRefProxyHandler(MappingContext, MongoPersistentProperty> mappingContext, + ValueResolver resolver, Function evaluatorFactory) { - this.spELContext = spELContext; this.mappingContext = mappingContext; this.resolver = resolver; + this.evaluatorFactory = evaluatorFactory; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefProxyHandler#populateId(com.mongodb.DBRef, java.lang.Object) - */ @Override - public Object populateId(MongoPersistentProperty property, DBRef source, Object proxy) { + public Object populateId(MongoPersistentProperty property, @Nullable DBRef source, Object proxy) { if (source == null) { return proxy; } - MongoPersistentEntity entity = mappingContext.getPersistentEntity(property); - MongoPersistentProperty idProperty = entity.getIdProperty(); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(property); + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); if (idProperty.usePropertyAccess()) { return proxy; } - SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(proxy, spELContext); + ValueExpressionEvaluator evaluator = evaluatorFactory.apply(proxy); PersistentPropertyAccessor accessor = entity.getPropertyAccessor(proxy); - DBObject object = new BasicDBObject(idProperty.getFieldName(), source.getId()); + Document object = new Document(idProperty.getFieldName(), source.getId()); ObjectPath objectPath = ObjectPath.ROOT.push(proxy, entity, null); accessor.setProperty(idProperty, resolver.getValueInternal(idProperty, object, evaluator, objectPath)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index 7e543e857b..de66c3ea94 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,73 +15,68 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.springframework.util.ReflectionUtils.*; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.lang.reflect.Method; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.cglib.proxy.Callback; -import org.springframework.cglib.proxy.Enhancer; -import org.springframework.cglib.proxy.Factory; -import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.LazyLoadingException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.objenesis.ObjenesisStd; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import org.springframework.util.ReflectionUtils; +import org.springframework.util.StringUtils; -import com.mongodb.DBObject; import com.mongodb.DBRef; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; /** * A {@link DbRefResolver} that resolves {@link org.springframework.data.mongodb.core.mapping.DBRef}s by delegating to a * {@link DbRefResolverCallback} than is able to generate lazy loading proxies. - * + * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.4 */ -public class DefaultDbRefResolver implements DbRefResolver { +public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver { + + private static final Log LOGGER = LogFactory.getLog(DefaultDbRefResolver.class); - private final MongoDbFactory mongoDbFactory; - private final PersistenceExceptionTranslator exceptionTranslator; - private final ObjenesisStd objenesis; + private final MongoDatabaseFactory mongoDbFactory; /** - * Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}. - * + * Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}. + * * @param mongoDbFactory must not be {@literal null}. */ - public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) { + public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) { + + super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator()); - Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null"); this.mongoDbFactory = mongoDbFactory; - this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); - this.objenesis = new ObjenesisStd(true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#resolveDbRef(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.convert.DbRefResolverCallback) - */ @Override - public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler handler) { - Assert.notNull(property, "Property must not be null!"); - Assert.notNull(callback, "Callback must not be null!"); + Assert.notNull(property, "Property must not be null"); + Assert.notNull(callback, "Callback must not be null"); + Assert.notNull(handler, "Handler must not be null"); if (isLazyDbRef(property)) { return createLazyLoadingProxy(property, dbref, callback, handler); @@ -90,80 +85,75 @@ public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefR return callback.resolve(property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#created(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.Object) - */ @Override - public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, - MongoPersistentEntity entity, Object id) { - return new DBRef(entity.getCollection(), id); + public Document fetch(DBRef dbRef) { + return getReferenceLoader().fetchOne( + DocumentReferenceQuery.forSingleDocument(Filters.eq(FieldName.ID.name(), dbRef.getId())), + ReferenceCollection.fromDBRef(dbRef)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#fetch(com.mongodb.DBRef) - */ @Override - public DBObject fetch(DBRef dbRef) { - return ReflectiveDBRefResolver.fetch(mongoDbFactory, dbRef); - } + public List bulkFetch(List refs) { - /** - * Creates a proxy for the given {@link MongoPersistentProperty} using the given {@link DbRefResolverCallback} to - * eventually resolve the value of the property. - * - * @param property must not be {@literal null}. - * @param dbref can be {@literal null}. - * @param callback must not be {@literal null}. - * @return - */ - private Object createLazyLoadingProxy(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, - DbRefProxyHandler handler) { + Assert.notNull(mongoDbFactory, "Factory must not be null"); + Assert.notNull(refs, "DBRef to fetch must not be null"); - Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback); + if (refs.isEmpty()) { + return Collections.emptyList(); + } - if (!propertyType.isInterface()) { + String collection = refs.iterator().next().getCollectionName(); + List ids = new ArrayList<>(refs.size()); - Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); - factory.setCallbacks(new Callback[] { interceptor }); + for (DBRef ref : refs) { - return handler.populateId(property, dbref, factory); + if (!collection.equals(ref.getCollectionName())) { + throw new InvalidDataAccessApiUsageException( + "DBRefs must all target the same collection for bulk fetch operation"); + } + + ids.add(ref.getId()); } - ProxyFactory proxyFactory = new ProxyFactory(); + DBRef databaseSource = refs.iterator().next(); + MongoCollection mongoCollection = getCollection(databaseSource); - for (Class type : propertyType.getInterfaces()) { - proxyFactory.addInterface(type); + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Bulk fetching DBRefs %s from %s.%s", ids, + StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName() + : mongoCollection.getNamespace().getDatabaseName(), + databaseSource.getCollectionName())); } - proxyFactory.addInterface(LazyLoadingProxy.class); - proxyFactory.addInterface(propertyType); - proxyFactory.addAdvice(interceptor); + List result = mongoCollection // + .find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) // + .into(new ArrayList<>(ids.size())); - return handler.populateId(property, dbref, proxyFactory.getProxy()); + return ids.stream() // + .flatMap(id -> documentWithId(id, result)) // + .collect(Collectors.toList()); } /** - * Returns the CGLib enhanced type for the given source type. - * - * @param type + * Creates a proxy for the given {@link MongoPersistentProperty} using the given {@link DbRefResolverCallback} to + * eventually resolve the value of the property. + * + * @param property must not be {@literal null}. + * @param dbref can be {@literal null}. + * @param callback must not be {@literal null}. * @return */ - private Class getEnhancedTypeFor(Class type) { + private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref, + DbRefResolverCallback callback, DbRefProxyHandler handler) { - Enhancer enhancer = new Enhancer(); - enhancer.setSuperclass(type); - enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class); - enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); + Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref); - return enhancer.createClass(); + return handler.populateId(property, dbref, lazyLoadingProxy); } /** * Returns whether the property shall be resolved lazily. - * + * * @param property must not be {@literal null}. * @return */ @@ -172,226 +162,35 @@ private boolean isLazyDbRef(MongoPersistentProperty property) { } /** - * A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a - * {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is - * guaranteed to be performed only once. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Christoph Strobl + * Returns document with the given identifier from the given list of {@link Document}s. + * + * @param identifier + * @param documents + * @return */ - static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, - Serializable { - - private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD; - - private final DbRefResolverCallback callback; - private final MongoPersistentProperty property; - private final PersistenceExceptionTranslator exceptionTranslator; - - private volatile boolean resolved; - private Object result; - private DBRef dbref; - - static { - try { - INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); - TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); - FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - /** - * Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty}, - * {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}. - * - * @param property must not be {@literal null}. - * @param dbref can be {@literal null}. - * @param callback must not be {@literal null}. - */ - public LazyLoadingInterceptor(MongoPersistentProperty property, DBRef dbref, - PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) { - - Assert.notNull(property, "Property must not be null!"); - Assert.notNull(exceptionTranslator, "Exception translator must not be null!"); - Assert.notNull(callback, "Callback must not be null!"); - - this.dbref = dbref; - this.callback = callback; - this.exceptionTranslator = exceptionTranslator; - this.property = property; - } - - /* - * (non-Javadoc) - * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) - */ - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); - } - - /* - * (non-Javadoc) - * @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy) - */ - @Override - public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable { - - if (INITIALIZE_METHOD.equals(method)) { - return ensureResolved(); - } - - if (TO_DBREF_METHOD.equals(method)) { - return this.dbref; - } - - if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { - - if (ReflectionUtils.isToStringMethod(method)) { - return proxyToString(proxy); - } - - if (ReflectionUtils.isEqualsMethod(method)) { - return proxyEquals(proxy, args[0]); - } + private static Stream documentWithId(Object identifier, Collection documents) { - if (ReflectionUtils.isHashCodeMethod(method)) { - return proxyHashCode(proxy); - } - - // DATAMONGO-1076 - finalize methods should not trigger proxy initialization - if (FINALIZE_METHOD.equals(method)) { - return null; - } - } - - Object target = ensureResolved(); - - if (target == null) { - return null; - } - - return method.invoke(target, args); - } - - /** - * Returns a to string representation for the given {@code proxy}. - * - * @param proxy - * @return - */ - private String proxyToString(Object proxy) { - - StringBuilder description = new StringBuilder(); - if (dbref != null) { - description.append(dbref.getCollectionName()); - description.append(":"); - description.append(dbref.getId()); - } else { - description.append(System.identityHashCode(proxy)); - } - description.append("$").append(LazyLoadingProxy.class.getSimpleName()); - - return description.toString(); - } - - /** - * Returns the hashcode for the given {@code proxy}. - * - * @param proxy - * @return - */ - private int proxyHashCode(Object proxy) { - return proxyToString(proxy).hashCode(); - } - - /** - * Performs an equality check for the given {@code proxy}. - * - * @param proxy - * @param that - * @return - */ - private boolean proxyEquals(Object proxy, Object that) { - - if (!(that instanceof LazyLoadingProxy)) { - return false; - } - - if (that == proxy) { - return true; - } - - return proxyToString(proxy).equals(that.toString()); - } - - /** - * Will trigger the resolution if the proxy is not resolved already or return a previously resolved result. - * - * @return - */ - private Object ensureResolved() { - - if (!resolved) { - this.result = resolve(); - this.resolved = true; - } - - return this.result; - } - - /** - * Callback method for serialization. - * - * @param out - * @throws IOException - */ - private void writeObject(ObjectOutputStream out) throws IOException { - - ensureResolved(); - out.writeObject(this.result); - } - - /** - * Callback method for deserialization. - * - * @param in - * @throws IOException - */ - private void readObject(ObjectInputStream in) throws IOException { - - try { - this.resolved = true; - this.result = in.readObject(); - } catch (ClassNotFoundException e) { - throw new LazyLoadingException("Could not deserialize result", e); - } - } - - /** - * Resolves the proxy into its backing object. - * - * @return - */ - private synchronized Object resolve() { - - if (!resolved) { - - try { + return documents.stream() // + .filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) // + .limit(1); + } - return callback.resolve(property); + /** + * Customization hook for obtaining the {@link MongoCollection} for a given {@link DBRef}. + * + * @param dbref must not be {@literal null}. + * @return the {@link MongoCollection} the given {@link DBRef} points to. + * @since 2.1 + */ + protected MongoCollection getCollection(DBRef dbref) { - } catch (RuntimeException ex) { + return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory) + .getCollection(dbref.getCollectionName(), Document.class); + } - DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex); - throw new LazyLoadingException("Unable to lazily resolve DBRef!", translatedException); - } - } + protected MongoCollection getCollection(ReferenceCollection context) { - return result; - } + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), + Document.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java index d4785f1906..82e5c9d0eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,35 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import com.mongodb.DBObject; - /** * Default implementation of {@link DbRefResolverCallback}. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ class DefaultDbRefResolverCallback implements DbRefResolverCallback { - private final DBObject surroundingObject; + private final Bson surroundingObject; private final ObjectPath path; private final ValueResolver resolver; - private final SpELExpressionEvaluator evaluator; + private final ValueExpressionEvaluator evaluator; /** - * Creates a new {@link DefaultDbRefResolverCallback} using the given {@link DBObject}, {@link ObjectPath}, - * {@link ValueResolver} and {@link SpELExpressionEvaluator}. - * + * Creates a new {@link DefaultDbRefResolverCallback} using the given {@link Document}, {@link ObjectPath}, + * {@link ValueResolver} and {@link ValueExpressionEvaluator}. + * * @param surroundingObject must not be {@literal null}. * @param path must not be {@literal null}. * @param evaluator must not be {@literal null}. * @param resolver must not be {@literal null}. */ - public DefaultDbRefResolverCallback(DBObject surroundingObject, ObjectPath path, SpELExpressionEvaluator evaluator, + DefaultDbRefResolverCallback(Bson surroundingObject, ObjectPath path, ValueExpressionEvaluator evaluator, ValueResolver resolver) { this.surroundingObject = surroundingObject; @@ -50,10 +52,6 @@ public DefaultDbRefResolverCallback(DBObject surroundingObject, ObjectPath path, this.evaluator = evaluator; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolverCallback#resolve(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty) - */ @Override public Object resolve(MongoPersistentProperty property) { return resolver.getValueInternal(property, surroundingObject, evaluator, path); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java index 15e97380a1..2c2b52afd5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,105 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.UnaryOperator; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.convert.DefaultTypeMapper; import org.springframework.data.convert.SimpleTypeInformationMapper; import org.springframework.data.convert.TypeAliasAccessor; import org.springframework.data.convert.TypeInformationMapper; +import org.springframework.data.mapping.Alias; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; import com.mongodb.DBObject; /** * Default implementation of {@link MongoTypeMapper} allowing configuration of the key to lookup and store type - * information in {@link DBObject}. The key defaults to {@link #DEFAULT_TYPE_KEY}. Actual type-to-{@link String} - * conversion and back is done in {@link #getTypeString(TypeInformation)} or {@link #getTypeInformation(String)} - * respectively. - * + * information in {@link Document}. The key defaults to {@link #DEFAULT_TYPE_KEY}. + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch */ -public class DefaultMongoTypeMapper extends DefaultTypeMapper implements MongoTypeMapper { +public class DefaultMongoTypeMapper extends DefaultTypeMapper implements MongoTypeMapper { public static final String DEFAULT_TYPE_KEY = "_class"; @SuppressWarnings("rawtypes") // - private static final TypeInformation LIST_TYPE_INFO = ClassTypeInformation.from(List.class); + private static final TypeInformation LIST_TYPE_INFO = TypeInformation.of(List.class); @SuppressWarnings("rawtypes") // - private static final TypeInformation MAP_TYPE_INFO = ClassTypeInformation.from(Map.class); + private static final TypeInformation MAP_TYPE_INFO = TypeInformation.MAP; - private final TypeAliasAccessor accessor; - private final String typeKey; + private final TypeAliasAccessor accessor; + private final @Nullable String typeKey; + private UnaryOperator> writeTarget = UnaryOperator.identity(); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code _class}. + */ public DefaultMongoTypeMapper() { this(DEFAULT_TYPE_KEY); } - public DefaultMongoTypeMapper(String typeKey) { - this(typeKey, Arrays.asList(new SimpleTypeInformationMapper())); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + */ + public DefaultMongoTypeMapper(@Nullable String typeKey) { + this(typeKey, Collections.singletonList(new SimpleTypeInformationMapper())); } - public DefaultMongoTypeMapper(String typeKey, MappingContext, ?> mappingContext) { - this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext, - Arrays.asList(new SimpleTypeInformationMapper())); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context. + */ + public DefaultMongoTypeMapper(@Nullable String typeKey, + MappingContext, ?> mappingContext) { + this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, + Collections.singletonList(new SimpleTypeInformationMapper())); } - public DefaultMongoTypeMapper(String typeKey, List mappers) { - this(typeKey, new DBObjectTypeAliasAccessor(typeKey), null, mappers); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link UnaryOperator} to apply {@link CustomConversions}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context to look up types using type hints. + * @see MappingMongoConverter#getWriteTarget(Class) + */ + public DefaultMongoTypeMapper(@Nullable String typeKey, + MappingContext, ?> mappingContext, UnaryOperator> writeTarget) { + this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, + Collections.singletonList(new SimpleTypeInformationMapper())); + this.writeTarget = writeTarget; } - private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor accessor, + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link TypeInformationMapper} to map type hints. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappers must not be {@literal null}. + */ + public DefaultMongoTypeMapper(@Nullable String typeKey, List mappers) { + this(typeKey, new DocumentTypeAliasAccessor(typeKey), null, mappers); + } + + private DefaultMongoTypeMapper(@Nullable String typeKey, TypeAliasAccessor accessor, MappingContext, ?> mappingContext, List mappers) { @@ -80,22 +123,14 @@ private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor acces this.accessor = accessor; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#isTypeKey(java.lang.String) - */ public boolean isTypeKey(String key) { - return typeKey == null ? false : typeKey.equals(key); + return typeKey != null && typeKey.equals(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#writeTypeRestrictions(java.util.Set) - */ @Override - public void writeTypeRestrictions(DBObject result, Set> restrictedTypes) { + public void writeTypeRestrictions(Document result, @Nullable Set> restrictedTypes) { - if (restrictedTypes == null || restrictedTypes.isEmpty()) { + if (ObjectUtils.isEmpty(restrictedTypes)) { return; } @@ -103,57 +138,64 @@ public void writeTypeRestrictions(DBObject result, Set> restrictedTypes for (Class restrictedType : restrictedTypes) { - Object typeAlias = getAliasFor(ClassTypeInformation.from(restrictedType)); + Alias typeAlias = getAliasFor(TypeInformation.of(restrictedType)); - if (typeAlias != null) { - restrictedMappedTypes.add(typeAlias); + if (!ObjectUtils.nullSafeEquals(Alias.NONE, typeAlias) && typeAlias.isPresent()) { + restrictedMappedTypes.add(typeAlias.getValue()); } } - accessor.writeTypeTo(result, new BasicDBObject("$in", restrictedMappedTypes)); + accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes)); } - /* (non-Javadoc) - * @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object) - */ @Override - protected TypeInformation getFallbackTypeFor(DBObject source) { + public Class getWriteTargetTypeFor(Class source) { + return writeTarget.apply(source); + } + + @Override + protected TypeInformation getFallbackTypeFor(Bson source) { return source instanceof BasicDBList ? LIST_TYPE_INFO : MAP_TYPE_INFO; } /** - * {@link TypeAliasAccessor} to store aliases in a {@link DBObject}. - * + * {@link TypeAliasAccessor} to store aliases in a {@link Document}. + * * @author Oliver Gierke */ - public static final class DBObjectTypeAliasAccessor implements TypeAliasAccessor { + public static final class DocumentTypeAliasAccessor implements TypeAliasAccessor { - private final String typeKey; + private final @Nullable String typeKey; - public DBObjectTypeAliasAccessor(String typeKey) { + public DocumentTypeAliasAccessor(@Nullable String typeKey) { this.typeKey = typeKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.TypeAliasAccessor#readAliasFrom(java.lang.Object) - */ - public Object readAliasFrom(DBObject source) { + @Override + public Alias readAliasFrom(Bson source) { + + if (source instanceof List) { + return Alias.NONE; + } - if (source instanceof BasicDBList) { - return null; + if (source instanceof Document document) { + return Alias.ofNullable(document.get(typeKey)); + } else if (source instanceof DBObject dbObject) { + return Alias.ofNullable(dbObject.get(typeKey)); } - return source.get(typeKey); + throw new IllegalArgumentException("Cannot read alias from " + source.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.TypeAliasAccessor#writeTypeTo(java.lang.Object, java.lang.Object) - */ - public void writeTypeTo(DBObject sink, Object alias) { + public void writeTypeTo(Bson sink, Object alias) { + if (typeKey != null) { - sink.put(typeKey, alias); + + if (sink instanceof Document document) { + document.put(typeKey, alias); + } else if (sink instanceof DBObject dbObject) { + dbObject.put(typeKey, alias); + } } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java new file mode 100644 index 0000000000..a7b3d6f21f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -0,0 +1,115 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*; + +import java.util.Collections; + +import org.bson.Document; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.util.Assert; + +/** + * {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity + * associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy + * proxies} for associations that should be lazily loaded. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Anton Buzdalkin + * @since 3.3 + */ +public class DefaultReferenceResolver implements ReferenceResolver { + + private final ReferenceLoader referenceLoader; + private final LazyLoadingProxyFactory proxyFactory; + + private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx); + private final LookupFunction singleValueLookupFunction = (filter, ctx) -> { + Document target = getReferenceLoader().fetchOne(filter, ctx); + return target == null ? Collections.emptyList() : Collections.singleton(target); + }; + + /** + * Create a new instance of {@link DefaultReferenceResolver}. + * + * @param referenceLoader must not be {@literal null}. + * @param exceptionTranslator must not be {@literal null}. + */ + public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) { + + Assert.notNull(referenceLoader, "ReferenceLoader must not be null"); + Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null"); + + this.referenceLoader = referenceLoader; + this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator); + } + + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + + LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction + : singleValueLookupFunction; + + if (isLazyReference(property)) { + return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader); + } + + return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); + } + + /** + * Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily. + * + * @param property + * @return return {@literal true} if the defined association is lazy. + * @see DBRef#lazy() + * @see DocumentReference#lazy() + */ + protected boolean isLazyReference(MongoPersistentProperty property) { + + if (property.isDocumentReference()) { + return property.getDocumentReference().lazy(); + } + + return property.getDBRef() != null && property.getDBRef().lazy(); + } + + /** + * The {@link ReferenceLoader} executing the lookup. + * + * @return never {@literal null}. + */ + protected ReferenceLoader getReferenceLoader() { + return referenceLoader; + } + + LazyLoadingProxyFactory getProxyFactory() { + return proxyFactory; + } + + private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { + return proxyFactory.createLazyLoadingProxy(property, + it -> referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader), + source instanceof DocumentReferenceSource documentSource ? documentSource.getTargetSource() : source); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java new file mode 100644 index 0000000000..c795add9c8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -0,0 +1,180 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.Map; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.DBObject; + +/** + * Wrapper value object for a {@link Document} to be able to access raw values by {@link MongoPersistentProperty} + * references. The accessors will transparently resolve nested document values that a {@link MongoPersistentProperty} + * might refer to through a path expression in field names. + * + * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + */ +class DocumentAccessor { + + private final Bson document; + + /** + * Creates a new {@link DocumentAccessor} for the given {@link Document}. + * + * @param document must be a {@link Document} effectively, must not be {@literal null}. + */ + public DocumentAccessor(Bson document) { + + Assert.notNull(document, "Document must not be null"); + + if (!(document instanceof Document) && !(document instanceof DBObject)) { + Assert.isInstanceOf(Document.class, document, "Given Bson must be a Document or DBObject"); + } + + this.document = document; + } + + /** + * @return the underlying {@link Bson document}. + * @since 2.1 + */ + Bson getDocument() { + return this.document; + } + + /** + * Copies all of the mappings from the given {@link Document} to the underlying target {@link Document}. These + * mappings will replace any mappings that the target document had for any of the keys currently in the specified map. + * + * @param source + */ + public void putAll(Document source) { + + Map target = BsonUtils.asMap(document); + + target.putAll(source); + } + + /** + * Puts the given value into the backing {@link Document} based on the coordinates defined through the given + * {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist + * of path traversals so we might need to create intermediate {@link Document}s. + * + * @param prop must not be {@literal null}. + * @param value can be {@literal null}. + */ + public void put(MongoPersistentProperty prop, @Nullable Object value) { + + Assert.notNull(prop, "MongoPersistentProperty must not be null"); + + if (value == null && !prop.writeNullValues()) { + return; + } + + Iterator parts = Arrays.asList(prop.getMongoField().getName().parts()).iterator(); + Bson document = this.document; + + while (parts.hasNext()) { + + String part = parts.next(); + + if (parts.hasNext()) { + document = getOrCreateNestedDocument(part, document); + } else { + BsonUtils.addToMap(document, part, value); + } + } + } + + /** + * Returns the value the given {@link MongoPersistentProperty} refers to. By default this will be a direct field but + * the method will also transparently resolve nested values the {@link MongoPersistentProperty} might refer to through + * a path expression in the field name metadata. + * + * @param property must not be {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + public Object get(MongoPersistentProperty property) { + return BsonUtils.resolveValue(document, getFieldName(property)); + } + + /** + * Returns the raw identifier for the given {@link MongoPersistentEntity} or the value of the default identifier + * field. + * + * @param entity must not be {@literal null}. + * @return + */ + @Nullable + public Object getRawId(MongoPersistentEntity entity) { + return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, FieldName.ID.name()); + } + + /** + * Returns whether the underlying {@link Document} has a value ({@literal null} or non-{@literal null}) for the given + * {@link MongoPersistentProperty}. + * + * @param property must not be {@literal null}. + * @return {@literal true} if no non {@literal null} value present. + */ + @SuppressWarnings("unchecked") + public boolean hasValue(MongoPersistentProperty property) { + + Assert.notNull(property, "Property must not be null"); + + return BsonUtils.hasValue(document, getFieldName(property)); + } + + FieldName getFieldName(MongoPersistentProperty prop) { + return prop.getMongoField().getName(); + } + + /** + * Returns the {@link Document} which either already exists in the given source under the given key, or creates a new + * nested one, registers it with the source and returns it. + * + * @param key must not be {@literal null} or empty. + * @param source must not be {@literal null}. + * @return + */ + private static Document getOrCreateNestedDocument(String key, Bson source) { + + Object existing = BsonUtils.asMap(source).get(key); + + if (existing instanceof Document document) { + return document; + } + + Document nested = new Document(); + BsonUtils.addToMap(source, key, nested); + + return nested; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java new file mode 100644 index 0000000000..8429584a6f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -0,0 +1,263 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.WeakHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPathAccessor; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +/** + * Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy}, + * registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter}, + * simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query. + * + * @author Christoph Strobl + * @since 3.3 + */ +class DocumentPointerFactory { + + private final ConversionService conversionService; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Map cache; + + /** + * A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of + * {'_id' : ?#{#target} }. + */ + private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt) + "['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id" + "?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces + "['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression + "\\s*}"); // some optional whitespaces and document close + + DocumentPointerFactory(ConversionService conversionService, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.conversionService = conversionService; + this.mappingContext = mappingContext; + this.cache = new WeakHashMap<>(); + } + + DocumentPointer computePointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentProperty property, Object value, Class typeHint) { + + if (value instanceof LazyLoadingProxy proxy) { + return proxy::getSource; + } + + if (conversionService.canConvert(typeHint, DocumentPointer.class)) { + return conversionService.convert(value, DocumentPointer.class); + } + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(property.getAssociationTargetType()); + + if (usesDefaultLookup(property)) { + + MongoPersistentProperty idProperty = persistentEntity.getIdProperty(); + Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + if (idProperty.hasExplicitWriteTarget() + && conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) { + return () -> conversionService.convert(idValue, idProperty.getFieldType()); + } + + if (idValue instanceof String stringValue && ObjectId.isValid((String) idValue)) { + return () -> new ObjectId(stringValue); + } + + return () -> idValue; + } + + MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); + PersistentPropertyAccessor propertyAccessor; + if (valueEntity == null) { + propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value); + } else { + propertyAccessor = valueEntity.getPropertyPathAccessor(value); + } + + return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from) + .getDocumentPointer(mappingContext, persistentEntity, propertyAccessor); + } + + private boolean usesDefaultLookup(MongoPersistentProperty property) { + + if (property.isDocumentReference()) { + return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches(); + } + + Reference atReference = property.findAnnotation(Reference.class); + if (atReference != null) { + return true; + } + + throw new IllegalStateException(String.format("%s does not seem to be define Reference", property)); + } + + /** + * Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and + * inverting it. + * + *
                    +	 * // source
                    +	 * { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
                    +	 *
                    +	 * // target
                    +	 * { 'fn' : ..., 'ln' : ... }
                    +	 * 
                    + * + * The actual pointer is the computed via + * {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from + * the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions + * from the source. + */ + static class LinkageDocument { + + static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?[\\w\\d\\.\\-)]*)\\}"); + static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?\\d*)_###"); + + private final String lookup; + private final org.bson.Document documentPointer; + private final Map placeholderMap; + private final boolean isSimpleTargetPointer; + + static LinkageDocument from(String lookup) { + return new LinkageDocument(lookup); + } + + private LinkageDocument(String lookup) { + + this.lookup = lookup; + this.placeholderMap = new LinkedHashMap<>(); + + int index = 0; + Matcher matcher = EXPRESSION_PATTERN.matcher(lookup); + String targetLookup = lookup; + + while (matcher.find()) { + + String expression = matcher.group(); + String fieldName = matcher.group("fieldName").replace("target.", ""); + + String placeholder = placeholder(index); + placeholderMap.put(placeholder, fieldName); + targetLookup = targetLookup.replace(expression, "'" + placeholder + "'"); + index++; + } + + this.documentPointer = org.bson.Document.parse(targetLookup); + this.isSimpleTargetPointer = placeholderMap.size() == 1 && placeholderMap.containsValue("target") + && lookup.contains("#target"); + } + + private String placeholder(int index) { + return "###_" + index + "_###"; + } + + private boolean isPlaceholder(String key) { + return PLACEHOLDER_PATTERN.matcher(key).matches(); + } + + DocumentPointer getDocumentPointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity, + propertyAccessor); + } + + Object updatePlaceholders(org.bson.Document source, org.bson.Document target, + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + + for (Entry entry : source.entrySet()) { + + if (entry.getKey().startsWith("$")) { + throw new InvalidDataAccessApiUsageException(String.format( + "Cannot derive document pointer from lookup '%s' using query operator (%s); Please consider registering a custom converter", + lookup, entry.getKey())); + } + + if (entry.getValue() instanceof Document document) { + + MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey()); + if (persistentProperty != null && persistentProperty.isEntity()) { + + MongoPersistentEntity nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType()); + target.put(entry.getKey(), updatePlaceholders(document, new Document(), mappingContext, + nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty)))); + } else { + target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext, + persistentEntity, propertyAccessor)); + } + continue; + } + + if (placeholderMap.containsKey(entry.getValue())) { + + String attribute = placeholderMap.get(entry.getValue()); + if (attribute.contains(".")) { + attribute = attribute.substring(attribute.lastIndexOf('.') + 1); + } + + String fieldName = entry.getKey().equals(FieldName.ID.name()) ? "id" : entry.getKey(); + if (!fieldName.contains(".")) { + + Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName)); + target.put(attribute, targetValue); + continue; + } + + PersistentPropertyPathAccessor propertyPathAccessor = persistentEntity + .getPropertyPathAccessor(propertyAccessor.getBean()); + PersistentPropertyPath path = mappingContext + .getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation())); + Object targetValue = propertyPathAccessor.getProperty(path); + target.put(attribute, targetValue); + continue; + } + + target.put(entry.getKey(), entry.getValue()); + } + + if (target.size() == 1 && isSimpleTargetPointer) { + return target.values().iterator().next(); + } + + return target; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectPropertyAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java similarity index 56% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectPropertyAccessor.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java index 0a18d29b5d..ea5ce01b44 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DBObjectPropertyAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,47 +17,40 @@ import java.util.Map; +import org.bson.Document; import org.springframework.context.expression.MapAccessor; import org.springframework.expression.EvaluationContext; import org.springframework.expression.PropertyAccessor; import org.springframework.expression.TypedValue; - -import com.mongodb.DBObject; +import org.springframework.lang.Nullable; /** - * {@link PropertyAccessor} to allow entity based field access to {@link DBObject}s. - * + * {@link PropertyAccessor} to allow entity based field access to {@link Document}s. + * * @author Oliver Gierke + * @author Christoph Strobl */ -class DBObjectPropertyAccessor extends MapAccessor { +class DocumentPropertyAccessor extends MapAccessor { - static final MapAccessor INSTANCE = new DBObjectPropertyAccessor(); + static final MapAccessor INSTANCE = new DocumentPropertyAccessor(); - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#getSpecificTargetClasses() - */ @Override public Class[] getSpecificTargetClasses() { - return new Class[] { DBObject.class }; + return new Class[] { Document.class }; } - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#canRead(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String) - */ @Override - public boolean canRead(EvaluationContext context, Object target, String name) { + public boolean canRead(EvaluationContext context, @Nullable Object target, String name) { return true; } - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#read(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String) - */ @Override @SuppressWarnings("unchecked") - public TypedValue read(EvaluationContext context, Object target, String name) { + public TypedValue read(EvaluationContext context, @Nullable Object target, String name) { + + if (target == null) { + return TypedValue.NULL; + } Map source = (Map) target; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java new file mode 100644 index 0000000000..bf21781058 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -0,0 +1,84 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * The source object to resolve document references upon. Encapsulates the actual source and the reference specific + * values. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentReferenceSource { + + private final Object self; + + private final @Nullable Object targetSource; + + /** + * Create a new instance of {@link DocumentReferenceSource}. + * + * @param self the entire wrapper object holding references. Must not be {@literal null}. + * @param targetSource the reference value source. + */ + DocumentReferenceSource(Object self, @Nullable Object targetSource) { + + this.self = self; + this.targetSource = targetSource; + } + + /** + * Get the outer document. + * + * @return never {@literal null}. + */ + public Object getSelf() { + return self; + } + + /** + * Get the actual (property specific) reference value. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getTargetSource() { + return targetSource; + } + + /** + * Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise. + * + * @param source + * @return + */ + @Nullable + static Object getTargetSource(Object source) { + return source instanceof DocumentReferenceSource referenceSource ? referenceSource.getTargetSource() : source; + } + + /** + * Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise. + * + * @param self + * @return + */ + static Object getSelf(Object self) { + return self instanceof DocumentReferenceSource referenceSource ? referenceSource.getSelf() : self; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java index bcbdef5e24..2bca260b79 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,15 @@ */ package org.springframework.data.mongodb.core.convert; +import java.text.Collator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.bson.Document; import org.springframework.core.convert.converter.Converter; import org.springframework.data.convert.ReadingConverter; @@ -41,22 +46,42 @@ import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.query.GeoCommand; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import com.mongodb.Function; /** * Wrapper class to contain useful geo structure converters for the usage with Mongo. - * + * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Thiago Diniz da Silveira * @since 1.5 */ +@SuppressWarnings("ConstantConditions") abstract class GeoConverters { + private final static Map>> converters; + + static { + + Collator caseInsensitive = Collator.getInstance(); + caseInsensitive.setStrength(Collator.PRIMARY); + + Map>> geoConverters = new TreeMap<>(caseInsensitive); + geoConverters.put("point", DocumentToGeoJsonPointConverter.INSTANCE::convert); + geoConverters.put("multipoint", DocumentToGeoJsonMultiPointConverter.INSTANCE::convert); + geoConverters.put("linestring", DocumentToGeoJsonLineStringConverter.INSTANCE::convert); + geoConverters.put("multilinestring", DocumentToGeoJsonMultiLineStringConverter.INSTANCE::convert); + geoConverters.put("polygon", DocumentToGeoJsonPolygonConverter.INSTANCE::convert); + geoConverters.put("multipolygon", DocumentToGeoJsonMultiPolygonConverter.INSTANCE::convert); + geoConverters.put("geometrycollection", DocumentToGeoJsonGeometryCollectionConverter.INSTANCE::convert); + + converters = geoConverters; + } + /** * Private constructor to prevent instantiation. */ @@ -64,52 +89,48 @@ private GeoConverters() {} /** * Returns the geo converters to be registered. - * - * @return + * + * @return never {@literal null}. */ - @SuppressWarnings("unchecked") public static Collection getConvertersToRegister() { return Arrays.asList( // - BoxToDbObjectConverter.INSTANCE // - , PolygonToDbObjectConverter.INSTANCE // - , CircleToDbObjectConverter.INSTANCE // - , SphereToDbObjectConverter.INSTANCE // - , DbObjectToBoxConverter.INSTANCE // - , DbObjectToPolygonConverter.INSTANCE // - , DbObjectToCircleConverter.INSTANCE // - , DbObjectToSphereConverter.INSTANCE // - , DbObjectToPointConverter.INSTANCE // - , PointToDbObjectConverter.INSTANCE // - , GeoCommandToDbObjectConverter.INSTANCE // - , GeoJsonToDbObjectConverter.INSTANCE // - , GeoJsonPointToDbObjectConverter.INSTANCE // - , GeoJsonPolygonToDbObjectConverter.INSTANCE // - , DbObjectToGeoJsonPointConverter.INSTANCE // - , DbObjectToGeoJsonPolygonConverter.INSTANCE // - , DbObjectToGeoJsonLineStringConverter.INSTANCE // - , DbObjectToGeoJsonMultiLineStringConverter.INSTANCE // - , DbObjectToGeoJsonMultiPointConverter.INSTANCE // - , DbObjectToGeoJsonMultiPolygonConverter.INSTANCE // - , DbObjectToGeoJsonGeometryCollectionConverter.INSTANCE); + BoxToDocumentConverter.INSTANCE // + , PolygonToDocumentConverter.INSTANCE // + , CircleToDocumentConverter.INSTANCE // + , SphereToDocumentConverter.INSTANCE // + , DocumentToBoxConverter.INSTANCE // + , DocumentToPolygonConverter.INSTANCE // + , DocumentToCircleConverter.INSTANCE // + , DocumentToSphereConverter.INSTANCE // + , DocumentToPointConverter.INSTANCE // + , PointToDocumentConverter.INSTANCE // + , GeoCommandToDocumentConverter.INSTANCE // + , GeoJsonToDocumentConverter.INSTANCE // + , GeoJsonPointToDocumentConverter.INSTANCE // + , GeoJsonPolygonToDocumentConverter.INSTANCE // + , DocumentToGeoJsonPointConverter.INSTANCE // + , DocumentToGeoJsonPolygonConverter.INSTANCE // + , DocumentToGeoJsonLineStringConverter.INSTANCE // + , DocumentToGeoJsonMultiLineStringConverter.INSTANCE // + , DocumentToGeoJsonMultiPointConverter.INSTANCE // + , DocumentToGeoJsonMultiPolygonConverter.INSTANCE // + , DocumentToGeoJsonGeometryCollectionConverter.INSTANCE // + , DocumentToGeoJsonConverter.INSTANCE); } /** * Converts a {@link List} of {@link Double}s into a {@link Point}. - * + * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DbObjectToPointConverter implements Converter { + enum DocumentToPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public Point convert(DBObject source) { + public Point convert(Document source) { if (source == null) { return null; @@ -117,115 +138,99 @@ public Point convert(DBObject source) { Assert.isTrue(source.keySet().size() == 2, "Source must contain 2 elements"); - if (source.containsField("type")) { - return DbObjectToGeoJsonPointConverter.INSTANCE.convert(source); + if (source.containsKey("type")) { + return DocumentToGeoJsonPointConverter.INSTANCE.convert(source); } - return new Point((Double) source.get("x"), (Double) source.get("y")); + return new Point(toPrimitiveDoubleValue(source.get("x")), toPrimitiveDoubleValue(source.get("y"))); } } /** * Converts a {@link Point} into a {@link List} of {@link Double}s. - * + * * @author Thomas Darimont * @since 1.5 */ - static enum PointToDbObjectConverter implements Converter { + enum PointToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(Point source) { - return source == null ? null : new BasicDBObject("x", source.getX()).append("y", source.getY()); + public Document convert(Point source) { + return source == null ? null : new Document("x", source.getX()).append("y", source.getY()); } } /** - * Converts a {@link Box} into a {@link BasicDBList}. - * + * Converts a {@link Box} into a {@link Document}. + * * @author Thomas Darimont * @since 1.5 */ @WritingConverter - static enum BoxToDbObjectConverter implements Converter { + enum BoxToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(Box source) { + public Document convert(Box source) { if (source == null) { return null; } - BasicDBObject result = new BasicDBObject(); - result.put("first", PointToDbObjectConverter.INSTANCE.convert(source.getFirst())); - result.put("second", PointToDbObjectConverter.INSTANCE.convert(source.getSecond())); + Document result = new Document(); + result.put("first", PointToDocumentConverter.INSTANCE.convert(source.getFirst())); + result.put("second", PointToDocumentConverter.INSTANCE.convert(source.getSecond())); return result; } } /** - * Converts a {@link BasicDBList} into a {@link Box}. - * + * Converts a {@link Document} into a {@link Box}. + * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DbObjectToBoxConverter implements Converter { + enum DocumentToBoxConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public Box convert(DBObject source) { + public Box convert(Document source) { if (source == null) { return null; } - Point first = DbObjectToPointConverter.INSTANCE.convert((DBObject) source.get("first")); - Point second = DbObjectToPointConverter.INSTANCE.convert((DBObject) source.get("second")); + Point first = DocumentToPointConverter.INSTANCE.convert((Document) source.get("first")); + Point second = DocumentToPointConverter.INSTANCE.convert((Document) source.get("second")); return new Box(first, second); } } /** - * Converts a {@link Circle} into a {@link BasicDBList}. - * + * Converts a {@link Circle} into a {@link Document}. + * * @author Thomas Darimont * @since 1.5 */ - static enum CircleToDbObjectConverter implements Converter { + enum CircleToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(Circle source) { + public Document convert(Circle source) { if (source == null) { return null; } - DBObject result = new BasicDBObject(); - result.put("center", PointToDbObjectConverter.INSTANCE.convert(source.getCenter())); + Document result = new Document(); + result.put("center", PointToDocumentConverter.INSTANCE.convert(source.getCenter())); result.put("radius", source.getRadius().getNormalizedValue()); result.put("metric", source.getRadius().getMetric().toString()); return result; @@ -233,70 +238,62 @@ public DBObject convert(Circle source) { } /** - * Converts a {@link DBObject} into a {@link Circle}. - * + * Converts a {@link Document} into a {@link Circle}. + * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DbObjectToCircleConverter implements Converter { + enum DocumentToCircleConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public Circle convert(DBObject source) { + public Circle convert(Document source) { if (source == null) { return null; } - DBObject center = (DBObject) source.get("center"); - Double radius = (Double) source.get("radius"); + Document center = (Document) source.get("center"); + Number radius = (Number) source.get("radius"); - Distance distance = new Distance(radius); + Assert.notNull(center, "Center must not be null"); + Assert.notNull(radius, "Radius must not be null"); - if (source.containsField("metric")) { + Distance distance = new Distance(toPrimitiveDoubleValue(radius)); + + if (source.containsKey("metric")) { String metricString = (String) source.get("metric"); - Assert.notNull(metricString, "Metric must not be null!"); + Assert.notNull(metricString, "Metric must not be null"); distance = distance.in(Metrics.valueOf(metricString)); } - Assert.notNull(center, "Center must not be null!"); - Assert.notNull(radius, "Radius must not be null!"); - - return new Circle(DbObjectToPointConverter.INSTANCE.convert(center), distance); + return new Circle(DocumentToPointConverter.INSTANCE.convert(center), distance); } } /** - * Converts a {@link Sphere} into a {@link BasicDBList}. - * + * Converts a {@link Sphere} into a {@link Document}. + * * @author Thomas Darimont * @since 1.5 */ - static enum SphereToDbObjectConverter implements Converter { + enum SphereToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(Sphere source) { + public Document convert(Sphere source) { if (source == null) { return null; } - DBObject result = new BasicDBObject(); - result.put("center", PointToDbObjectConverter.INSTANCE.convert(source.getCenter())); + Document result = new Document(); + result.put("center", PointToDocumentConverter.INSTANCE.convert(source.getCenter())); result.put("radius", source.getRadius().getNormalizedValue()); result.put("metric", source.getRadius().getMetric().toString()); return result; @@ -304,111 +301,99 @@ public DBObject convert(Sphere source) { } /** - * Converts a {@link BasicDBList} into a {@link Sphere}. - * + * Converts a {@link Document} into a {@link Sphere}. + * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DbObjectToSphereConverter implements Converter { + enum DocumentToSphereConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public Sphere convert(DBObject source) { + public Sphere convert(Document source) { if (source == null) { return null; } - DBObject center = (DBObject) source.get("center"); - Double radius = (Double) source.get("radius"); + Document center = (Document) source.get("center"); + Number radius = (Number) source.get("radius"); - Distance distance = new Distance(radius); + Assert.notNull(center, "Center must not be null"); + Assert.notNull(radius, "Radius must not be null"); - if (source.containsField("metric")) { + Distance distance = new Distance(toPrimitiveDoubleValue(radius)); + + if (source.containsKey("metric")) { String metricString = (String) source.get("metric"); - Assert.notNull(metricString, "Metric must not be null!"); + Assert.notNull(metricString, "Metric must not be null"); distance = distance.in(Metrics.valueOf(metricString)); } - Assert.notNull(center, "Center must not be null!"); - Assert.notNull(radius, "Radius must not be null!"); - - return new Sphere(DbObjectToPointConverter.INSTANCE.convert(center), distance); + return new Sphere(DocumentToPointConverter.INSTANCE.convert(center), distance); } } /** - * Converts a {@link Polygon} into a {@link BasicDBList}. - * + * Converts a {@link Polygon} into a {@link Document}. + * * @author Thomas Darimont * @since 1.5 */ - static enum PolygonToDbObjectConverter implements Converter { + enum PolygonToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(Polygon source) { + public Document convert(Polygon source) { if (source == null) { return null; } List points = source.getPoints(); - List pointTuples = new ArrayList(points.size()); + List pointTuples = new ArrayList<>(points.size()); for (Point point : points) { - pointTuples.add(PointToDbObjectConverter.INSTANCE.convert(point)); + pointTuples.add(PointToDocumentConverter.INSTANCE.convert(point)); } - DBObject result = new BasicDBObject(); + Document result = new Document(); result.put("points", pointTuples); return result; } } /** - * Converts a {@link BasicDBList} into a {@link Polygon}. - * + * Converts a {@link Document} into a {@link Polygon}. + * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DbObjectToPolygonConverter implements Converter { + enum DocumentToPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings({ "unchecked" }) - public Polygon convert(DBObject source) { + public Polygon convert(Document source) { if (source == null) { return null; } - List points = (List) source.get("points"); - List newPoints = new ArrayList(points.size()); + List points = (List) source.get("points"); + List newPoints = new ArrayList<>(points.size()); - for (DBObject element : points) { + for (Document element : points) { - Assert.notNull(element, "Point elements of polygon must not be null!"); - newPoints.add(DbObjectToPointConverter.INSTANCE.convert(element)); + Assert.notNull(element, "Point elements of polygon must not be null"); + newPoints.add(DocumentToPointConverter.INSTANCE.convert(element)); } return new Polygon(newPoints); @@ -416,63 +401,56 @@ public Polygon convert(DBObject source) { } /** - * Converts a {@link Sphere} into a {@link BasicDBList}. - * + * Converts a {@link Sphere} into a {@link Document}. + * * @author Thomas Darimont * @since 1.5 */ - static enum GeoCommandToDbObjectConverter implements Converter { + enum GeoCommandToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings("rawtypes") - public DBObject convert(GeoCommand source) { + public Document convert(GeoCommand source) { if (source == null) { return null; } - BasicDBList argument = new BasicDBList(); + List argument = new ArrayList<>(2); Shape shape = source.getShape(); - if (shape instanceof GeoJson) { - return GeoJsonToDbObjectConverter.INSTANCE.convert((GeoJson) shape); + if (shape instanceof GeoJson geoJson) { + return GeoJsonToDocumentConverter.INSTANCE.convert(geoJson); } - if (shape instanceof Box) { - - argument.add(toList(((Box) shape).getFirst())); - argument.add(toList(((Box) shape).getSecond())); + if (shape instanceof Box box) { - } else if (shape instanceof Circle) { + argument.add(toList(box.getFirst())); + argument.add(toList(box.getSecond())); - argument.add(toList(((Circle) shape).getCenter())); - argument.add(((Circle) shape).getRadius().getNormalizedValue()); + } else if (shape instanceof Circle circle) { - } else if (shape instanceof Circle) { + argument.add(toList(circle.getCenter())); + argument.add(circle.getRadius().getNormalizedValue()); - argument.add(toList(((Circle) shape).getCenter())); - argument.add(((Circle) shape).getRadius()); + } else if (shape instanceof Polygon polygon) { - } else if (shape instanceof Polygon) { - - for (Point point : ((Polygon) shape).getPoints()) { + List points = polygon.getPoints(); + argument = new ArrayList<>(points.size()); + for (Point point : points) { argument.add(toList(point)); } - } else if (shape instanceof Sphere) { + } else if (shape instanceof Sphere sphere) { - argument.add(toList(((Sphere) shape).getCenter())); - argument.add(((Sphere) shape).getRadius().getNormalizedValue()); + argument.add(toList(sphere.getCenter())); + argument.add(sphere.getRadius().getNormalizedValue()); } - return new BasicDBObject(source.getCommand(), argument); + return new Document(source.getCommand(), argument); } } @@ -480,60 +458,55 @@ public DBObject convert(GeoCommand source) { * @author Christoph Strobl * @since 1.7 */ - @SuppressWarnings("rawtypes") - static enum GeoJsonToDbObjectConverter implements Converter { + enum GeoJsonToDocumentConverter implements Converter, Document> { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(GeoJson source) { + public Document convert(GeoJson source) { if (source == null) { return null; } - DBObject dbo = new BasicDBObject("type", source.getType()); + Document dbo = new Document("type", source.getType()); - if (source instanceof GeoJsonGeometryCollection) { + if (source instanceof GeoJsonGeometryCollection collection) { - BasicDBList dbl = new BasicDBList(); + List dbl = new ArrayList<>(); - for (GeoJson geometry : ((GeoJsonGeometryCollection) source).getCoordinates()) { + for (GeoJson geometry : collection.getCoordinates()) { dbl.add(convert(geometry)); } dbo.put("geometries", dbl); } else { - dbo.put("coordinates", convertIfNecessarry(source.getCoordinates())); + dbo.put("coordinates", convertIfNecessary(source.getCoordinates())); } return dbo; } - private Object convertIfNecessarry(Object candidate) { + private Object convertIfNecessary(Object candidate) { - if (candidate instanceof GeoJson) { - return convertIfNecessarry(((GeoJson) candidate).getCoordinates()); + if (candidate instanceof GeoJson geoJson) { + return convertIfNecessary(geoJson.getCoordinates()); } - if (candidate instanceof Iterable) { + if (candidate instanceof Iterable iterable) { - BasicDBList dbl = new BasicDBList(); + List dbl = new ArrayList<>(); - for (Object element : (Iterable) candidate) { - dbl.add(convertIfNecessarry(element)); + for (Object element : iterable) { + dbl.add(convertIfNecessary(element)); } return dbl; } - if (candidate instanceof Point) { - return toList((Point) candidate); + if (candidate instanceof Point point) { + return toList(point); } return candidate; @@ -544,17 +517,13 @@ private Object convertIfNecessarry(Object candidate) { * @author Christoph Strobl * @since 1.7 */ - static enum GeoJsonPointToDbObjectConverter implements Converter { + enum GeoJsonPointToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(GeoJsonPoint source) { - return GeoJsonToDbObjectConverter.INSTANCE.convert(source); + public Document convert(GeoJsonPoint source) { + return GeoJsonToDocumentConverter.INSTANCE.convert(source); } } @@ -562,17 +531,13 @@ public DBObject convert(GeoJsonPoint source) { * @author Christoph Strobl * @since 1.7 */ - static enum GeoJsonPolygonToDbObjectConverter implements Converter { + enum GeoJsonPolygonToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public DBObject convert(GeoJsonPolygon source) { - return GeoJsonToDbObjectConverter.INSTANCE.convert(source); + public Document convert(GeoJsonPolygon source) { + return GeoJsonToDocumentConverter.INSTANCE.convert(source); } } @@ -580,27 +545,23 @@ public DBObject convert(GeoJsonPolygon source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonPointConverter implements Converter { + enum DocumentToGeoJsonPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings("unchecked") - public GeoJsonPoint convert(DBObject source) { + public GeoJsonPoint convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Point"), - String.format("Cannot convert type '%s' to Point.", source.get("type"))); + String.format("Cannot convert type '%s' to Point", source.get("type"))); - List dbl = (List) source.get("coordinates"); - return new GeoJsonPoint(dbl.get(0).doubleValue(), dbl.get(1).doubleValue()); + List dbl = (List) source.get("coordinates"); + return new GeoJsonPoint(toPrimitiveDoubleValue(dbl.get(0)), toPrimitiveDoubleValue(dbl.get(1))); } } @@ -608,25 +569,21 @@ public GeoJsonPoint convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonPolygonConverter implements Converter { + enum DocumentToGeoJsonPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public GeoJsonPolygon convert(DBObject source) { + public GeoJsonPolygon convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Polygon"), - String.format("Cannot convert type '%s' to Polygon.", source.get("type"))); + String.format("Cannot convert type '%s' to Polygon", source.get("type"))); - return toGeoJsonPolygon((BasicDBList) source.get("coordinates")); + return toGeoJsonPolygon((List) source.get("coordinates")); } } @@ -634,29 +591,25 @@ public GeoJsonPolygon convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonMultiPolygonConverter implements Converter { + enum DocumentToGeoJsonMultiPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public GeoJsonMultiPolygon convert(DBObject source) { + public GeoJsonMultiPolygon convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPolygon"), - String.format("Cannot convert type '%s' to MultiPolygon.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiPolygon", source.get("type"))); - BasicDBList dbl = (BasicDBList) source.get("coordinates"); - List polygones = new ArrayList(); + List dbl = (List) source.get("coordinates"); + List polygones = new ArrayList<>(); for (Object polygon : dbl) { - polygones.add(toGeoJsonPolygon((BasicDBList) polygon)); + polygones.add(toGeoJsonPolygon((List) polygon)); } return new GeoJsonMultiPolygon(polygones); @@ -667,25 +620,21 @@ public GeoJsonMultiPolygon convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonLineStringConverter implements Converter { + enum DocumentToGeoJsonLineStringConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public GeoJsonLineString convert(DBObject source) { + public GeoJsonLineString convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "LineString"), - String.format("Cannot convert type '%s' to LineString.", source.get("type"))); + String.format("Cannot convert type '%s' to LineString", source.get("type"))); - BasicDBList cords = (BasicDBList) source.get("coordinates"); + List cords = (List) source.get("coordinates"); return new GeoJsonLineString(toListOfPoint(cords)); } @@ -695,25 +644,21 @@ public GeoJsonLineString convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonMultiPointConverter implements Converter { + enum DocumentToGeoJsonMultiPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public GeoJsonMultiPoint convert(DBObject source) { + public GeoJsonMultiPoint convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPoint"), - String.format("Cannot convert type '%s' to MultiPoint.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiPoint", source.get("type"))); - BasicDBList cords = (BasicDBList) source.get("coordinates"); + List cords = (List) source.get("coordinates"); return new GeoJsonMultiPoint(toListOfPoint(cords)); } @@ -723,29 +668,25 @@ public GeoJsonMultiPoint convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonMultiLineStringConverter implements Converter { + enum DocumentToGeoJsonMultiLineStringConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public GeoJsonMultiLineString convert(DBObject source) { + public GeoJsonMultiLineString convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiLineString"), - String.format("Cannot convert type '%s' to MultiLineString.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiLineString", source.get("type"))); - List lines = new ArrayList(); - BasicDBList cords = (BasicDBList) source.get("coordinates"); + List lines = new ArrayList<>(); + List cords = (List) source.get("coordinates"); for (Object line : cords) { - lines.add(new GeoJsonLineString(toListOfPoint((BasicDBList) line))); + lines.add(new GeoJsonLineString(toListOfPoint((List) line))); } return new GeoJsonMultiLineString(lines); } @@ -755,60 +696,27 @@ public GeoJsonMultiLineString convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - static enum DbObjectToGeoJsonGeometryCollectionConverter implements Converter { + enum DocumentToGeoJsonGeometryCollectionConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @SuppressWarnings("rawtypes") @Override - public GeoJsonGeometryCollection convert(DBObject source) { + public GeoJsonGeometryCollection convert(Document source) { if (source == null) { return null; } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "GeometryCollection"), - String.format("Cannot convert type '%s' to GeometryCollection.", source.get("type"))); + String.format("Cannot convert type '%s' to GeometryCollection", source.get("type"))); - List> geometries = new ArrayList>(); + List> geometries = new ArrayList<>(); for (Object o : (List) source.get("geometries")) { - geometries.add(convertGeometries((DBObject) o)); - } - return new GeoJsonGeometryCollection(geometries); - - } - - private static GeoJson convertGeometries(DBObject source) { - - Object type = source.get("type"); - if (ObjectUtils.nullSafeEquals(type, "Point")) { - return DbObjectToGeoJsonPointConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "MultiPoint")) { - return DbObjectToGeoJsonMultiPointConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "LineString")) { - return DbObjectToGeoJsonLineStringConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "MultiLineString")) { - return DbObjectToGeoJsonMultiLineStringConverter.INSTANCE.convert(source); + geometries.add(toGenericGeoJson((Document) o)); } - if (ObjectUtils.nullSafeEquals(type, "Polygon")) { - return DbObjectToGeoJsonPolygonConverter.INSTANCE.convert(source); - } - if (ObjectUtils.nullSafeEquals(type, "MultiPolygon")) { - return DbObjectToGeoJsonMultiPolygonConverter.INSTANCE.convert(source); - } - - throw new IllegalArgumentException(String.format("Cannot convert unknown GeoJson type %s", type)); + return new GeoJsonGeometryCollection(geometries); } } @@ -817,36 +725,78 @@ static List toList(Point point) { } /** - * Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPoint}s. - * - * @param listOfCoordinatePairs - * @return + * Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPoint}s. + * + * @param listOfCoordinatePairs must not be {@literal null}. + * @return never {@literal null}. * @since 1.7 */ @SuppressWarnings("unchecked") - static List toListOfPoint(BasicDBList listOfCoordinatePairs) { + static List toListOfPoint(List listOfCoordinatePairs) { - List points = new ArrayList(); + List points = new ArrayList<>(listOfCoordinatePairs.size()); for (Object point : listOfCoordinatePairs) { Assert.isInstanceOf(List.class, point); - List coordinatesList = (List) point; + List coordinatesList = (List) point; - points.add(new GeoJsonPoint(coordinatesList.get(0).doubleValue(), coordinatesList.get(1).doubleValue())); + points.add(new GeoJsonPoint(toPrimitiveDoubleValue(coordinatesList.get(0)), + toPrimitiveDoubleValue(coordinatesList.get(1)))); } return points; } /** - * Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPolygon}. - * - * @param dbList - * @return + * Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPolygon}. + * + * @param dbList must not be {@literal null}. + * @return never {@literal null}. * @since 1.7 */ - static GeoJsonPolygon toGeoJsonPolygon(BasicDBList dbList) { - return new GeoJsonPolygon(toListOfPoint((BasicDBList) dbList.get(0))); + static GeoJsonPolygon toGeoJsonPolygon(List dbList) { + + GeoJsonPolygon polygon = new GeoJsonPolygon(toListOfPoint((List) dbList.get(0))); + return dbList.size() > 1 ? polygon.withInnerRing(toListOfPoint((List) dbList.get(1))) : polygon; + } + + /** + * Converter implementation transforming a {@link Document} into a concrete {@link GeoJson} based on the embedded + * {@literal type} information. + * + * @since 2.1 + * @author Christoph Strobl + */ + @ReadingConverter + enum DocumentToGeoJsonConverter implements Converter> { + INSTANCE; + + @Override + public GeoJson convert(Document source) { + return toGenericGeoJson(source); + } + } + + private static GeoJson toGenericGeoJson(Document source) { + + String type = source.get("type", String.class); + + if (type != null) { + + Function> converter = converters.get(type); + + if (converter != null) { + return converter.apply(source); + } + } + + throw new IllegalArgumentException(String.format("No converter found capable of converting GeoJson type %s", type)); + } + + private static double toPrimitiveDoubleValue(Object value) { + + Assert.isInstanceOf(Number.class, value, "Argument must be a Number"); + return NumberUtils.convertNumberToTargetClass((Number) value, Double.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java new file mode 100644 index 0000000000..0afba67025 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.bson.Document; + +/** + * {@link JsonSchemaMapper} allows mapping a given {@link Document} containing a {@literal $jsonSchema} to the fields of + * a given domain type. The mapping considers {@link org.springframework.data.mongodb.core.mapping.Field} annotations + * and other Spring Data specifics. + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface JsonSchemaMapper { + + /** + * Map the {@literal required} and {@literal properties} fields the given {@link Document} containing the + * {@literal $jsonSchema} against the given domain type.
                    + * The source document remains untouched, fields that do not require mapping are simply copied over to the mapped + * instance. + * + * @param jsonSchema the {@link Document} holding the raw schema representation. Must not be {@literal null}. + * @param type the target type to map against. Must not be {@literal null}. + * @return a new {@link Document} containing the mapped {@literal $jsonSchema} never {@literal null}. + */ + Document mapSchema(Document jsonSchema, Class type); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java index 76e352d9c6..77aac55813 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,24 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.lang.Nullable; import com.mongodb.DBRef; /** - * Allows direct interaction with the underlying {@link LazyLoadingInterceptor}. - * + * Allows direct interaction with the underlying {@code LazyLoadingInterceptor}. + * * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch * @since 1.5 + * @see LazyLoadingProxyFactory */ public interface LazyLoadingProxy { /** * Initializes the proxy and returns the wrapped value. - * + * * @return * @since 1.5 */ @@ -38,9 +40,21 @@ public interface LazyLoadingProxy { /** * Returns the {@link DBRef} represented by this {@link LazyLoadingProxy}, may be null. - * + * * @return * @since 1.5 */ + @Nullable DBRef toDBRef(); + + /** + * Returns the raw {@literal source} object that defines the reference. + * + * @return can be {@literal null}. + * @since 3.3 + */ + @Nullable + default Object getSource() { + return toDBRef(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java new file mode 100644 index 0000000000..76539ea431 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -0,0 +1,386 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.util.ReflectionUtils.*; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.Method; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Supplier; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.cglib.core.SpringNamingPolicy; +import org.springframework.cglib.proxy.Callback; +import org.springframework.cglib.proxy.Enhancer; +import org.springframework.cglib.proxy.Factory; +import org.springframework.cglib.proxy.MethodProxy; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.LazyLoadingException; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lock; +import org.springframework.data.util.Lock.AcquiredLock; +import org.springframework.lang.Nullable; +import org.springframework.objenesis.SpringObjenesis; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.DBRef; + +/** + * {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily. + * NOTE: This class is intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +public final class LazyLoadingProxyFactory { + + private static final Log LOGGER = LogFactory.getLog(LazyLoadingProxyFactory.class); + + private final SpringObjenesis objenesis; + + private final PersistenceExceptionTranslator exceptionTranslator; + + private LazyLoadingProxyFactory() { + this(ex -> null); + } + + public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + this.objenesis = new SpringObjenesis(null); + } + + /** + * Predict the proxy target type. This will advice the infrastructure to resolve as many pieces as possible in a + * potential AOT scenario without necessarily resolving the entire object. + * + * @param propertyType the type to proxy + * @param interceptor the interceptor to be added. + * @return the proxy type. + * @since 4.0 + */ + public static Class resolveProxyType(Class propertyType, Supplier interceptor) { + + LazyLoadingProxyFactory factory = new LazyLoadingProxyFactory(); + + if (!propertyType.isInterface()) { + return factory.getEnhancedTypeFor(propertyType); + } + + return factory.prepareProxyFactory(propertyType, interceptor) + .getProxyClass(LazyLoadingProxy.class.getClassLoader()); + } + + /** + * Create the {@link ProxyFactory} for the given type, already adding required additional interfaces. + * + * @param targetType the type to proxy. + * @return the prepared {@link ProxyFactory}. + * @since 4.0.5 + */ + public static ProxyFactory prepareFactory(Class targetType) { + + ProxyFactory proxyFactory = new ProxyFactory(); + + for (Class type : targetType.getInterfaces()) { + proxyFactory.addInterface(type); + } + + proxyFactory.addInterface(LazyLoadingProxy.class); + proxyFactory.addInterface(targetType); + + return proxyFactory; + } + + private ProxyFactory prepareProxyFactory(Class propertyType, Supplier interceptor) { + + ProxyFactory proxyFactory = prepareFactory(propertyType); + proxyFactory.addAdvice(interceptor.get()); + + return proxyFactory; + } + + public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback, + Object source) { + + Class propertyType = property.getType(); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator); + + if (!propertyType.isInterface()) { + + Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); + factory.setCallbacks(new Callback[] { interceptor }); + + return factory; + } + + return prepareProxyFactory(propertyType, + () -> new LazyLoadingInterceptor(property, callback, source, exceptionTranslator)) + .getProxy(LazyLoadingProxy.class.getClassLoader()); + } + + /** + * Returns the CGLib enhanced type for the given source type. + * + * @param type + * @return + */ + private Class getEnhancedTypeFor(Class type) { + + Enhancer enhancer = new Enhancer(); + enhancer.setSuperclass(type); + enhancer.setCallbackType(LazyLoadingInterceptor.class); + enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); + enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE); + enhancer.setAttemptLoad(true); + + return enhancer.createClass(); + } + + public static class LazyLoadingInterceptor + implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { + + private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; + + static { + try { + INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); + TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); + FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); + GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource"); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private final ReadWriteLock rwLock = new ReentrantReadWriteLock(); + private final Lock readLock = Lock.of(rwLock.readLock()); + private final Lock writeLock = Lock.of(rwLock.writeLock()); + + private final MongoPersistentProperty property; + private final DbRefResolverCallback callback; + private final Object source; + private final PersistenceExceptionTranslator exceptionTranslator; + private volatile boolean resolved; + private @Nullable Object result; + + /** + * @return a {@link LazyLoadingInterceptor} that just continues with the invocation. + * @since 4.0 + */ + public static LazyLoadingInterceptor none() { + + return new LazyLoadingInterceptor(null, null, null, null) { + @Nullable + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); + } + + @Nullable + @Override + public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { + + ReflectionUtils.makeAccessible(method); + return method.invoke(o, args); + } + }; + } + + public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source, + PersistenceExceptionTranslator exceptionTranslator) { + + this.property = property; + this.callback = callback; + this.source = source; + this.exceptionTranslator = exceptionTranslator; + } + + @Nullable + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); + } + + @Nullable + @Override + public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { + + if (INITIALIZE_METHOD.equals(method)) { + return ensureResolved(); + } + + if (TO_DBREF_METHOD.equals(method)) { + return source instanceof DBRef ? source : null; + } + + if (GET_SOURCE_METHOD.equals(method)) { + return source; + } + + if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { + + if (ReflectionUtils.isToStringMethod(method)) { + return proxyToString(source); + } + + if (ReflectionUtils.isEqualsMethod(method)) { + return proxyEquals(o, args[0]); + } + + if (ReflectionUtils.isHashCodeMethod(method)) { + return proxyHashCode(); + } + + // DATAMONGO-1076 - finalize methods should not trigger proxy initialization + if (FINALIZE_METHOD.equals(method)) { + return null; + } + } + + Object target = ensureResolved(); + + if (target == null) { + return null; + } + + ReflectionUtils.makeAccessible(method); + + return method.invoke(target, args); + } + + @Nullable + private Object ensureResolved() { + + if (!resolved) { + this.result = resolve(); + this.resolved = true; + } + + return this.result; + } + + private String proxyToString(@Nullable Object source) { + + StringBuilder description = new StringBuilder(); + if (source != null) { + if (source instanceof DBRef dbRef) { + description.append(dbRef.getCollectionName()); + description.append(":"); + description.append(dbRef.getId()); + } else { + description.append(source); + } + } else { + description.append(0); + } + description.append("$").append(LazyLoadingProxy.class.getSimpleName()); + + return description.toString(); + } + + private boolean proxyEquals(@Nullable Object proxy, Object that) { + + if (!(that instanceof LazyLoadingProxy)) { + return false; + } + + if (that == proxy) { + return true; + } + + return proxyToString(proxy).equals(that.toString()); + } + + private int proxyHashCode() { + return proxyToString(source).hashCode(); + } + + /** + * Callback method for serialization. + * + * @param out + * @throws IOException + */ + private void writeObject(ObjectOutputStream out) throws IOException { + + ensureResolved(); + out.writeObject(this.result); + } + + /** + * Callback method for deserialization. + * + * @param in + * @throws IOException + */ + private void readObject(ObjectInputStream in) throws IOException { + + try { + this.resolved = true; + this.result = in.readObject(); + } catch (ClassNotFoundException e) { + throw new LazyLoadingException("Could not deserialize result", e); + } + } + + @Nullable + private Object resolve() { + + try (AcquiredLock l = readLock.lock()) { + if (resolved) { + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Accessing already resolved lazy loading property %s.%s", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName())); + } + return result; + } + } + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Resolving lazy loading property %s.%s", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName())); + } + + try { + return writeLock.execute(() -> callback.resolve(property)); + } catch (RuntimeException ex) { + + DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex); + + if (translatedException instanceof ClientSessionException) { + throw new LazyLoadingException("Unable to lazily resolve DBRef; Invalid session state", ex); + } + + throw new LazyLoadingException("Unable to lazily resolve DBRef", + translatedException != null ? translatedException : ex); + } + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index d8530dcc46..864cc1c3e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,50 +15,92 @@ */ package org.springframework.data.mongodb.core.convert; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.function.BiPredicate; +import java.util.stream.Collectors; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.bson.json.JsonReader; +import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.CollectionFactory; -import org.springframework.core.convert.ConversionException; import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.data.convert.EntityInstantiator; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.PropertyValueConverter; import org.springframework.data.convert.TypeMapper; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.mapping.Association; -import org.springframework.data.mapping.AssociationHandler; +import org.springframework.data.mapping.InstanceCreatorMetadata; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.Parameter; +import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor.Parameter; -import org.springframework.data.mapping.PropertyHandler; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.CachingValueExpressionEvaluatorFactory; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.model.EntityInstantiator; import org.springframework.data.mapping.model.ParameterValueProvider; import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider; import org.springframework.data.mapping.model.PropertyValueProvider; import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionParameterValueProvider; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.util.Predicates; import org.springframework.data.util.TypeInformation; import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; @@ -67,35 +109,73 @@ /** * {@link MongoConverter} that uses a {@link MappingContext} to do sophisticated mapping of domain objects to - * {@link DBObject}. - * + * {@link Document}. + * * @author Oliver Gierke * @author Jon Brisbin * @author Patrik Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Jordi Llach + * @author Mark Paluch + * @author Roman Puchkovskiy + * @author Heesu Jung + * @author Divya Srivastava + * @author Julia Lee */ -public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware, ValueResolver { +public class MappingMongoConverter extends AbstractMongoConverter + implements ApplicationContextAware, EnvironmentCapable { + + private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s; Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions; Parent object was: %4$s"; + private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter"; + + private static final BiPredicate, MongoPersistentProperty> PROPERTY_FILTER = (e, + property) -> { + + if (e.isIdProperty(property)) { + return false; + } + + if (e.isCreatorArgument(property)) { + return false; + } + + if (!property.isReadable()) { + return false; + } + return true; + }; - private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s"; + public static final TypeInformation BSON = TypeInformation.of(Bson.class); - protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class); + protected static final Log LOGGER = LogFactory.getLog(MappingMongoConverter.class); protected final MappingContext, MongoPersistentProperty> mappingContext; - protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser(); protected final QueryMapper idMapper; protected final DbRefResolver dbRefResolver; + protected final DefaultDbRefProxyHandler dbRefProxyHandler; + protected final ReferenceLookupDelegate referenceLookupDelegate; - protected ApplicationContext applicationContext; + protected @Nullable ApplicationContext applicationContext; + protected @Nullable Environment environment; protected MongoTypeMapper typeMapper; - protected String mapKeyDotReplacement = null; + protected @Nullable String mapKeyDotReplacement = null; + protected @Nullable CodecRegistryProvider codecRegistryProvider; + private MongoTypeMapper defaultTypeMapper; private SpELContext spELContext; + private @Nullable EntityCallbacks entityCallbacks; + private final SpelExpressionParser expressionParser = new SpelExpressionParser(); + private final DocumentPointerFactory documentPointerFactory; + private final SpelAwareProxyProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory( + expressionParser); + private final CachingValueExpressionEvaluatorFactory expressionEvaluatorFactory = new CachingValueExpressionEvaluatorFactory( + expressionParser, this, o -> spELContext.getEvaluationContext(o)); /** * Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}. - * - * @param mongoDbFactory must not be {@literal null}. + * + * @param dbRefResolver must not be {@literal null}. * @param mappingContext must not be {@literal null}. */ public MappingMongoConverter(DbRefResolver dbRefResolver, @@ -103,546 +183,1020 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, super(new DefaultConversionService()); - Assert.notNull(dbRefResolver, "DbRefResolver must not be null!"); - Assert.notNull(mappingContext, "MappingContext must not be null!"); + Assert.notNull(dbRefResolver, "DbRefResolver must not be null"); + Assert.notNull(mappingContext, "MappingContext must not be null"); this.dbRefResolver = dbRefResolver; + this.mappingContext = mappingContext; - this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext); + this.defaultTypeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext, + this::getWriteTarget); this.idMapper = new QueryMapper(this); - this.spELContext = new SpELContext(DBObjectPropertyAccessor.INSTANCE); + this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); + this.dbRefProxyHandler = new DefaultDbRefProxyHandler(mappingContext, (prop, bson, evaluator, path) -> { + + ConversionContext context = getConversionContext(path); + return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); + }, expressionEvaluatorFactory::create); + + this.referenceLookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext); + this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext); + } + + /** + * Creates a new {@link ConversionContext} given {@link ObjectPath}. + * + * @param path the current {@link ObjectPath}, must not be {@literal null}. + * @return the {@link ConversionContext}. + */ + protected ConversionContext getConversionContext(ObjectPath path) { + + Assert.notNull(path, "ObjectPath must not be null"); + + return new DefaultConversionContext(this, conversions, path, this::readDocument, this::readCollectionOrArray, + this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead); } /** - * Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}. - * + * Creates a new {@link MappingMongoConverter} given the new {@link MongoDatabaseFactory} and {@link MappingContext}. + * * @deprecated use the constructor taking a {@link DbRefResolver} instead. * @param mongoDbFactory must not be {@literal null}. * @param mappingContext must not be {@literal null}. */ @Deprecated - public MappingMongoConverter(MongoDbFactory mongoDbFactory, + public MappingMongoConverter(MongoDatabaseFactory mongoDbFactory, MappingContext, MongoPersistentProperty> mappingContext) { this(new DefaultDbRefResolver(mongoDbFactory), mappingContext); + setCodecRegistryProvider(mongoDbFactory); } /** - * Configures the {@link MongoTypeMapper} to be used to add type information to {@link DBObject}s created by the - * converter and how to lookup type information from {@link DBObject}s when reading them. Uses a + * Configures the {@link MongoTypeMapper} to be used to add type information to {@link Document}s created by the + * converter and how to lookup type information from {@link Document}s when reading them. Uses a * {@link DefaultMongoTypeMapper} by default. Setting this to {@literal null} will reset the {@link TypeMapper} to the * default one. - * - * @param typeMapper the typeMapper to set + * + * @param typeMapper the typeMapper to set. Can be {@literal null}. */ - public void setTypeMapper(MongoTypeMapper typeMapper) { - this.typeMapper = typeMapper == null - ? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext) : typeMapper; + public void setTypeMapper(@Nullable MongoTypeMapper typeMapper) { + this.typeMapper = typeMapper; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoConverter#getTypeMapper() - */ @Override public MongoTypeMapper getTypeMapper() { - return this.typeMapper; + return this.typeMapper == null ? this.defaultTypeMapper : this.typeMapper; + } + + @Override + public ProjectionFactory getProjectionFactory() { + return projectionFactory; + } + + @Override + public CustomConversions getCustomConversions() { + return conversions; } /** - * Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default we don't do - * any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire + * Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default, we don't + * do any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire * object to fail. If further customization of the translation is needed, have a look at * {@link #potentiallyEscapeMapKey(String)} as well as {@link #potentiallyUnescapeMapKey(String)}. - * - * @param mapKeyDotReplacement the mapKeyDotReplacement to set + *

                    + * {@code mapKeyDotReplacement} is used as-is during replacement operations without further processing (i.e. regex or + * normalization). + * + * @param mapKeyDotReplacement the mapKeyDotReplacement to set. Can be {@literal null}. */ - public void setMapKeyDotReplacement(String mapKeyDotReplacement) { + public void setMapKeyDotReplacement(@Nullable String mapKeyDotReplacement) { this.mapKeyDotReplacement = mapKeyDotReplacement; } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.EntityConverter#getMappingContext() + /** + * If {@link #preserveMapKeys(boolean) preserve} is set to {@literal true} the conversion will treat map keys + * containing dot ({@literal .}) characters as is. + * + * @since 4.2 + * @see #setMapKeyDotReplacement(String) + */ + public void preserveMapKeys(boolean preserve) { + setMapKeyDotReplacement(preserve ? "." : null); + } + + /** + * Configure a {@link CodecRegistryProvider} that provides native MongoDB {@link org.bson.codecs.Codec codecs} for + * reading values. + * + * @param codecRegistryProvider can be {@literal null}. + * @since 2.2 */ + public void setCodecRegistryProvider(@Nullable CodecRegistryProvider codecRegistryProvider) { + this.codecRegistryProvider = codecRegistryProvider; + } + + @Override public MappingContext, MongoPersistentProperty> getMappingContext() { return mappingContext; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) - */ + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; + this.environment = applicationContext.getEnvironment(); this.spELContext = new SpELContext(this.spELContext, applicationContext); + this.projectionFactory.setBeanFactory(applicationContext); + this.projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + + if (entityCallbacks == null) { + setEntityCallbacks(EntityCallbacks.create(applicationContext)); + } + + ClassLoader classLoader = applicationContext.getClassLoader(); + if (this.defaultTypeMapper instanceof BeanClassLoaderAware beanClassLoaderAware && classLoader != null) { + beanClassLoaderAware.setBeanClassLoader(classLoader); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.DBObject) + @Override + public Environment getEnvironment() { + + if (environment == null) { + environment = new StandardEnvironment(); + } + return environment; + } + + /** + * Set the {@link EntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}. + *
                    + * Overrides potentially existing {@link EntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 3.0 */ - public S read(Class clazz, final DBObject dbo) { - return read(ClassTypeInformation.from(clazz), dbo); + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; } - protected S read(TypeInformation type, DBObject dbo) { - return read(type, dbo, ObjectPath.ROOT); + @Override + public R project(EntityProjection projection, Bson bson) { + + if (!projection.isProjection()) { // backed by real object + + TypeInformation typeToRead = projection.getMappedType().getType().isInterface() ? projection.getDomainType() + : projection.getMappedType(); + return (R) read(typeToRead, bson); + } + + ProjectingConversionContext context = new ProjectingConversionContext(this, conversions, ObjectPath.ROOT, + this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead, + projection); + + return doReadProjection(context, bson, projection); } @SuppressWarnings("unchecked") - private S read(TypeInformation type, DBObject dbo, ObjectPath path) { + private R doReadProjection(ConversionContext context, Bson bson, EntityProjection projection) { - if (null == dbo) { - return null; - } + MongoPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(projection.getActualDomainType()); + TypeInformation mappedType = projection.getActualMappedType(); + MongoPersistentEntity mappedEntity = (MongoPersistentEntity) getMappingContext() + .getPersistentEntity(mappedType); + ValueExpressionEvaluator evaluator = expressionEvaluatorFactory.create(bson); - TypeInformation typeToUse = typeMapper.readType(dbo, type); - Class rawType = typeToUse.getType(); + boolean isInterfaceProjection = mappedType.getType().isInterface(); + if (isInterfaceProjection) { - if (conversions.hasCustomReadTarget(dbo.getClass(), rawType)) { - return conversionService.convert(dbo, rawType); + PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(mappedEntity); + DocumentAccessor documentAccessor = new DocumentAccessor(bson); + PersistentPropertyAccessor accessor = new MapPersistentPropertyAccessor(); + + PersistentPropertyAccessor convertingAccessor = PropertyTranslatingPropertyAccessor + .create(new ConvertingPropertyAccessor<>(accessor, conversionService), propertyTranslator); + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor, + evaluator, spELContext); + + readProperties(context, entity, convertingAccessor, documentAccessor, valueProvider, evaluator, + (mongoPersistentProperties, mongoPersistentProperty) -> true); + return (R) projectionFactory.createProjection(mappedType.getType(), accessor.getBean()); } - if (DBObject.class.isAssignableFrom(rawType)) { - return (S) dbo; + // DTO projection + if (mappedEntity == null) { + throw new MappingException(String.format("No mapping metadata found for %s", mappedType.getType().getName())); } - if (typeToUse.isCollectionLike() && dbo instanceof BasicDBList) { - return (S) readCollectionOrArray(typeToUse, (BasicDBList) dbo, path); + // create target instance, merge metadata from underlying DTO type + PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(entity, + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + DocumentAccessor documentAccessor = new DocumentAccessor(bson) { + @Override + FieldName getFieldName(MongoPersistentProperty prop) { + return propertyTranslator.translate(prop).getMongoField().getName(); + } + }; + + InstanceCreatorMetadata instanceCreatorMetadata = mappedEntity + .getInstanceCreatorMetadata(); + ParameterValueProvider provider = instanceCreatorMetadata != null + && instanceCreatorMetadata.hasParameters() + ? getParameterProvider(context, mappedEntity, documentAccessor, evaluator) + : NoOpParameterValueProvider.INSTANCE; + + EntityInstantiator instantiator = instantiators.getInstantiatorFor(mappedEntity); + R instance = instantiator.createInstance(mappedEntity, provider); + PersistentPropertyAccessor accessor = mappedEntity.getPropertyAccessor(instance); + + populateProperties(context, mappedEntity, documentAccessor, evaluator, instance); + + return accessor.getBean(); + } + + private Object doReadOrProject(ConversionContext context, Bson source, TypeInformation typeHint, + EntityProjection typeDescriptor) { + + if (typeDescriptor.isProjection()) { + return doReadProjection(context, BsonUtils.asDocument(source), typeDescriptor); } - if (typeToUse.isMap()) { - return (S) readMap(typeToUse, dbo, path); + return readDocument(context, source, typeHint); + } + + static class MapPersistentPropertyAccessor implements PersistentPropertyAccessor> { + + Map map = new LinkedHashMap<>(); + + @Override + public void setProperty(PersistentProperty persistentProperty, Object o) { + map.put(persistentProperty.getName(), o); } - if (dbo instanceof BasicDBList) { - throw new MappingException(String.format(INCOMPATIBLE_TYPES, dbo, BasicDBList.class, typeToUse.getType(), path)); + @Override + public Object getProperty(PersistentProperty persistentProperty) { + return map.get(persistentProperty.getName()); } - // Retrieve persistent entity info - MongoPersistentEntity persistentEntity = (MongoPersistentEntity) mappingContext - .getPersistentEntity(typeToUse); - if (persistentEntity == null) { - throw new MappingException("No mapping metadata found for " + rawType.getName()); + @Override + public Map getBean() { + return map; } + } - return read(persistentEntity, dbo, path); + @Override + public S read(Class clazz, Bson bson) { + return read(TypeInformation.of(clazz), bson); } - private ParameterValueProvider getParameterProvider(MongoPersistentEntity entity, - DBObject source, DefaultSpELExpressionEvaluator evaluator, ObjectPath path) { + protected S read(TypeInformation type, Bson bson) { + return readDocument(getConversionContext(ObjectPath.ROOT), bson, type); + } - MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, path); - PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider( - entity, provider, path.getCurrentObject()); + /** + * Conversion method to materialize an object from a {@link Bson document}. Can be overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param typeHint the {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted object, will never be {@literal null}. + * @since 3.2 + */ + @SuppressWarnings("unchecked") + protected S readDocument(ConversionContext context, Bson bson, + TypeInformation typeHint) { - return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, - path); - } + Document document = bson instanceof BasicDBObject dbObject ? new Document(dbObject) : (Document) bson; + TypeInformation typeToRead = getTypeMapper().readType(document, typeHint); + Class rawType = typeToRead.getType(); - private S read(final MongoPersistentEntity entity, final DBObject dbo, final ObjectPath path) { + if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { + return doConvert(bson, rawType, typeHint.getType()); + } - final DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(dbo, spELContext); + if (Document.class.isAssignableFrom(rawType)) { + return (S) bson; + } - ParameterValueProvider provider = getParameterProvider(entity, dbo, evaluator, path); - EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity); - S instance = instantiator.createInstance(entity, provider); + if (DBObject.class.isAssignableFrom(rawType)) { - final PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor(entity.getPropertyAccessor(instance), - conversionService); + if (bson instanceof DBObject) { + return (S) bson; + } - final MongoPersistentProperty idProperty = entity.getIdProperty(); - final S result = instance; + if (bson instanceof Document doc) { + return (S) new BasicDBObject(doc); + } - // make sure id property is set before all other properties - Object idValue = null; + return (S) bson; + } - if (idProperty != null) { - idValue = getValueInternal(idProperty, dbo, evaluator, path); - accessor.setProperty(idProperty, idValue); + if (typeToRead.isMap()) { + return context.convert(bson, typeToRead); } - final ObjectPath currentPath = path.push(result, entity, - idValue != null ? dbo.get(idProperty.getFieldName()) : null); + if (BSON.isAssignableFrom(typeHint)) { + return (S) bson; + } - // Set properties not already set in the constructor - entity.doWithProperties(new PropertyHandler() { - public void doWithPersistentProperty(MongoPersistentProperty prop) { + MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToRead); - // we skip the id property since it was already set - if (idProperty != null && idProperty.equals(prop)) { - return; - } + if (entity == null) { - if (!dbo.containsField(prop.getFieldName()) || entity.isConstructorArgument(prop)) { - return; - } + if (codecRegistryProvider != null) { - accessor.setProperty(prop, getValueInternal(prop, dbo, evaluator, currentPath)); + Optional> codec = codecRegistryProvider.getCodecFor(rawType); + if (codec.isPresent()) { + return codec.get().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()); + } } - }); - - // Handle associations - entity.doWithAssociations(new AssociationHandler() { - public void doWithAssociation(Association association) { - final MongoPersistentProperty property = association.getInverse(); - Object value = dbo.get(property.getFieldName()); - - if (value == null || entity.isConstructorArgument(property)) { - return; - } + throw new MappingException(String.format(INVALID_TYPE_TO_READ, document, rawType)); + } - DBRef dbref = value instanceof DBRef ? (DBRef) value : null; + return read(context, (MongoPersistentEntity) entity, document); + } - DbRefProxyHandler handler = new DefaultDbRefProxyHandler(spELContext, mappingContext, - MappingMongoConverter.this); - DbRefResolverCallback callback = new DefaultDbRefResolverCallback(dbo, currentPath, evaluator, - MappingMongoConverter.this); + private ParameterValueProvider getParameterProvider(ConversionContext context, + MongoPersistentEntity entity, DocumentAccessor source, ValueExpressionEvaluator evaluator) { - accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); - } - }); + AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(context, + source, evaluator); + PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider<>( + entity, provider, context.getPath().getCurrentObject()); - return result; + return new ConverterAwareValueExpressionParameterValueProvider(context, evaluator, conversionService, + parameterProvider); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoWriter#toDBRef(java.lang.Object, org.springframework.data.mongodb.core.mapping.MongoPersistentProperty) - */ - public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { + class EvaluatingDocumentAccessor extends DocumentAccessor implements ValueExpressionEvaluator { - org.springframework.data.mongodb.core.mapping.DBRef annotation = null; + /** + * Creates a new {@link DocumentAccessor} for the given {@link Document}. + * + * @param document must be a {@link Document} effectively, must not be {@literal null}. + */ + public EvaluatingDocumentAccessor(Bson document) { + super(document); + } - if (referingProperty != null) { - annotation = referingProperty.getDBRef(); - Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!"); + @Override + public T evaluate(String expression) { + return expressionEvaluatorFactory.create(getDocument()).evaluate(expression); } + } - // @see DATAMONGO-913 - if (object instanceof LazyLoadingProxy) { - return ((LazyLoadingProxy) object).toDBRef(); + private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { + + S existing = context.findContextualEntity(entity, bson); + if (existing != null) { + return existing; } - return createDBRef(object, referingProperty); + EvaluatingDocumentAccessor documentAccessor = new EvaluatingDocumentAccessor(bson); + InstanceCreatorMetadata instanceCreatorMetadata = entity.getInstanceCreatorMetadata(); + + ParameterValueProvider provider = instanceCreatorMetadata != null + && instanceCreatorMetadata.hasParameters() + ? getParameterProvider(context, entity, documentAccessor, documentAccessor) + : NoOpParameterValueProvider.INSTANCE; + + EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity); + S instance = instantiator.createInstance(entity, provider); + + return populateProperties(context, entity, documentAccessor, documentAccessor, instance); } - /** - * Root entry method into write conversion. Adds a type discriminator to the {@link DBObject}. Shouldn't be called for - * nested conversions. - * - * @see org.springframework.data.mongodb.core.core.convert.MongoWriter#write(java.lang.Object, com.mongodb.DBObject) - */ - public void write(final Object obj, final DBObject dbo) { + private S populateProperties(ConversionContext context, MongoPersistentEntity entity, + DocumentAccessor documentAccessor, ValueExpressionEvaluator evaluator, S instance) { - if (null == obj) { - return; + if (!entity.requiresPropertyPopulation()) { + return instance; } - Class entityType = obj.getClass(); - boolean handledByCustomConverter = conversions.getCustomWriteTarget(entityType, DBObject.class) != null; - TypeInformation type = ClassTypeInformation.from(entityType); + PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor<>(entity.getPropertyAccessor(instance), + conversionService); - if (!handledByCustomConverter && !(dbo instanceof BasicDBList)) { - typeMapper.writeType(type, dbo); - } + // Make sure id property is set before all other properties - Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj; + Object rawId = readAndPopulateIdentifier(context, accessor, documentAccessor, entity, evaluator); + ObjectPath currentPath = context.getPath().push(accessor.getBean(), entity, rawId); + ConversionContext contextToUse = context.withPath(currentPath); - writeInternal(target, dbo, type); + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor, + evaluator, spELContext); + + readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator, PROPERTY_FILTER); + + return accessor.getBean(); } /** - * Internal write conversion method which should be used for nested invocations. - * - * @param obj - * @param dbo + * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in + * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. */ - @SuppressWarnings("unchecked") - protected void writeInternal(final Object obj, final DBObject dbo, final TypeInformation typeHint) { + @Nullable + private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, + DocumentAccessor document, MongoPersistentEntity entity, ValueExpressionEvaluator evaluator) { - if (null == obj) { - return; - } - - Class entityType = obj.getClass(); - Class customTarget = conversions.getCustomWriteTarget(entityType, DBObject.class); + Object rawId = document.getRawId(entity); - if (customTarget != null) { - DBObject result = conversionService.convert(obj, DBObject.class); - dbo.putAll(result); - return; + if (!entity.hasIdProperty() || rawId == null) { + return rawId; } - if (Map.class.isAssignableFrom(entityType)) { - writeMapInternal((Map) obj, dbo, ClassTypeInformation.MAP); - return; - } + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); - if (Collection.class.isAssignableFrom(entityType)) { - writeCollectionInternal((Collection) obj, ClassTypeInformation.LIST, (BasicDBList) dbo); - return; + if (idProperty.isImmutable() && entity.isCreatorArgument(idProperty)) { + return rawId; } - MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); - writeInternal(obj, dbo, entity); - addCustomTypeKeyIfNecessary(typeHint, obj, dbo); + accessor.setProperty(idProperty, readIdValue(context, evaluator, idProperty, rawId)); + + return rawId; } - protected void writeInternal(Object obj, final DBObject dbo, MongoPersistentEntity entity) { + @Nullable + private Object readIdValue(ConversionContext context, ValueExpressionEvaluator evaluator, + MongoPersistentProperty idProperty, Object rawId) { - if (obj == null) { - return; - } + String expression = idProperty.getSpelExpression(); + Object resolvedValue = expression != null ? evaluator.evaluate(expression) : rawId; - if (null == entity) { - throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName()); - } + return resolvedValue != null + ? readValue(context.forProperty(idProperty), resolvedValue, idProperty.getTypeInformation()) + : null; + } - final PersistentPropertyAccessor accessor = entity.getPropertyAccessor(obj); - final MongoPersistentProperty idProperty = entity.getIdProperty(); + private void readProperties(ConversionContext context, MongoPersistentEntity entity, + PersistentPropertyAccessor accessor, DocumentAccessor documentAccessor, + MongoDbPropertyValueProvider valueProvider, ValueExpressionEvaluator evaluator, + BiPredicate, MongoPersistentProperty> propertyFilter) { - if (!dbo.containsField("_id") && null != idProperty) { + DbRefResolverCallback callback = null; - try { - Object id = accessor.getProperty(idProperty); - dbo.put("_id", idMapper.convertId(id)); - } catch (ConversionException ignored) {} - } + for (MongoPersistentProperty prop : entity) { - // Write the properties - entity.doWithProperties(new PropertyHandler() { - public void doWithPersistentProperty(MongoPersistentProperty prop) { + if (!propertyFilter.test(entity, prop)) { + continue; + } - if (prop.equals(idProperty) || !prop.isWritable()) { - return; - } + ConversionContext propertyContext = context.forProperty(prop); - Object propertyObj = accessor.getProperty(prop); + if (prop.isAssociation()) { - if (null != propertyObj) { + if (callback == null) { + callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator); + } - if (!conversions.isSimpleType(propertyObj.getClass())) { - writePropertyInternal(propertyObj, dbo, prop); - } else { - writeSimpleInternal(propertyObj, dbo, prop); - } + Object value = readAssociation(prop.getRequiredAssociation(), documentAccessor, dbRefProxyHandler, callback, + propertyContext); + + if (value != null) { + accessor.setProperty(prop, value); } + continue; } - }); - - entity.doWithAssociations(new AssociationHandler() { - public void doWithAssociation(Association association) { + if (prop.isUnwrapped()) { - MongoPersistentProperty inverseProp = association.getInverse(); - Object propertyObj = accessor.getProperty(inverseProp); + accessor.setProperty(prop, + readUnwrapped(propertyContext, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop))); + continue; + } - if (null != propertyObj) { - writePropertyInternal(propertyObj, dbo, inverseProp); - } + if (!documentAccessor.hasValue(prop)) { + continue; } - }); + + accessor.setProperty(prop, valueProvider.getPropertyValue(prop)); + } } - @SuppressWarnings({ "unchecked" }) - protected void writePropertyInternal(Object obj, DBObject dbo, MongoPersistentProperty prop) { + private DbRefResolverCallback getDbRefResolverCallback(ConversionContext context, DocumentAccessor documentAccessor, + ValueExpressionEvaluator evaluator) { - if (obj == null) { - return; - } + return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), context.getPath(), evaluator, + (prop, bson, e, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, e)); + } - DBObjectAccessor accessor = new DBObjectAccessor(dbo); + @Nullable + private Object readAssociation(Association association, DocumentAccessor documentAccessor, + DbRefProxyHandler handler, DbRefResolverCallback callback, ConversionContext context) { - TypeInformation valueType = ClassTypeInformation.from(obj.getClass()); - TypeInformation type = prop.getTypeInformation(); + MongoPersistentProperty property = association.getInverse(); + Object value = documentAccessor.get(property); - if (valueType.isCollectionLike()) { - DBObject collectionInternal = createCollection(asCollection(obj), prop); - accessor.put(prop, collectionInternal); - return; - } + if (property.isDocumentReference() + || !property.isDbReference() && property.findAnnotation(Reference.class) != null) { - if (valueType.isMap()) { - DBObject mapDbObj = createMap((Map) obj, prop); - accessor.put(prop, mapDbObj); - return; - } + // quite unusual but sounds like worth having? - if (prop.isDbReference()) { + if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { - DBRef dbRefObj = null; + if (value == null) { + return null; + } - /* - * If we already have a LazyLoadingProxy, we use it's cached DBRef value instead of - * unnecessarily initializing it only to convert it to a DBRef a few instructions later. - */ - if (obj instanceof LazyLoadingProxy) { - dbRefObj = ((LazyLoadingProxy) obj).toDBRef(); - } + DocumentPointer pointer = () -> value; - dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); + // collection like special treatment + return conversionService.convert(pointer, property.getActualType()); + } else { - if (null != dbRefObj) { - accessor.put(prop, dbRefObj); - return; + return dbRefResolver.resolveReference(property, + new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), + referenceLookupDelegate, context.forProperty(property)::convert); } } - /* - * If we have a LazyLoadingProxy we make sure it is initialized first. - */ - if (obj instanceof LazyLoadingProxy) { - obj = ((LazyLoadingProxy) obj).getTarget(); + if (value == null) { + return null; } - // Lookup potential custom target type - Class basicTargetType = conversions.getCustomWriteTarget(obj.getClass(), null); + if (value instanceof DBRef dbref) { + return dbRefResolver.resolveDbRef(property, dbref, callback, handler); + } - if (basicTargetType != null) { - accessor.put(prop, conversionService.convert(obj, basicTargetType)); - return; + /* + * The value might be a pre resolved full document (eg. resulting from an aggregation $lookup). + * In this case we try to map that object to the target type without an additional step ($dbref resolution server roundtrip) + * in between. + */ + if (value instanceof Document document) { + if (property.isMap()) { + if (document.isEmpty() || peek(document.values()) instanceof DBRef) { + return dbRefResolver.resolveDbRef(property, null, callback, handler); + } else { + return readMap(context, document, property.getTypeInformation()); + } + } else { + return read(property.getActualType(), document); + } + } else if (value instanceof Collection collection && !collection.isEmpty() + && peek(collection) instanceof Document) { + return readCollectionOrArray(context, collection, property.getTypeInformation()); + } else { + return dbRefResolver.resolveDbRef(property, null, callback, handler); } + } - Object existingValue = accessor.get(prop); - BasicDBObject propDbObj = existingValue instanceof BasicDBObject ? (BasicDBObject) existingValue - : new BasicDBObject(); - addCustomTypeKeyIfNecessary(ClassTypeInformation.from(prop.getRawType()), obj, propDbObj); + @Nullable + private Object readUnwrapped(ConversionContext context, DocumentAccessor documentAccessor, + MongoPersistentProperty prop, MongoPersistentEntity unwrappedEntity) { - MongoPersistentEntity entity = isSubtype(prop.getType(), obj.getClass()) - ? mappingContext.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type); + if (prop.findAnnotation(Unwrapped.class).onEmpty().equals(OnEmpty.USE_EMPTY)) { + return read(context, unwrappedEntity, (Document) documentAccessor.getDocument()); + } - writeInternal(obj, propDbObj, entity); - accessor.put(prop, propDbObj); + for (MongoPersistentProperty persistentProperty : unwrappedEntity) { + if (documentAccessor.hasValue(persistentProperty)) { + return read(context, unwrappedEntity, (Document) documentAccessor.getDocument()); + } + } + return null; } - private boolean isSubtype(Class left, Class right) { - return left.isAssignableFrom(right) && !left.equals(right); - } + @Override + public DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringProperty) { - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - private static Collection asCollection(Object source) { + org.springframework.data.mongodb.core.mapping.DBRef annotation; + + if (referringProperty != null) { + annotation = referringProperty.getDBRef(); + Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef"); + } - if (source instanceof Collection) { - return (Collection) source; + // DATAMONGO-913 + if (object instanceof LazyLoadingProxy proxy) { + return proxy.toDBRef(); } - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + return createDBRef(object, referringProperty); } - /** - * Writes the given {@link Collection} using the given {@link MongoPersistentProperty} information. - * - * @param collection must not be {@literal null}. - * @param property must not be {@literal null}. - * @return - */ - protected DBObject createCollection(Collection collection, MongoPersistentProperty property) { + @Override + public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { - if (!property.isDbReference()) { - return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); + if (source instanceof LazyLoadingProxy proxy) { + return proxy::getSource; } - BasicDBList dbList = new BasicDBList(); + Assert.notNull(referringProperty, "Cannot create DocumentReference; The referringProperty must not be null"); - for (Object element : collection) { - - if (element == null) { - continue; - } + if (referringProperty.isDbReference()) { + return () -> toDBRef(source, referringProperty); + } - DBRef dbRef = createDBRef(element, property); - dbList.add(dbRef); + if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) { + return createDocumentPointer(source, referringProperty); } - return dbList; + throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference"); } - /** - * Writes the given {@link Map} using the given {@link MongoPersistentProperty} information. - * - * @param map must not {@literal null}. - * @param property must not be {@literal null}. - * @return - */ - protected DBObject createMap(Map map, MongoPersistentProperty property) { + DocumentPointer createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { - Assert.notNull(map, "Given map must not be null!"); - Assert.notNull(property, "PersistentProperty must not be null!"); + if (referringProperty == null) { + return () -> source; + } - if (!property.isDbReference()) { - return writeMapInternal(map, new BasicDBObject(), property.getTypeInformation()); + if (source instanceof DocumentPointer pointer) { + return pointer; } - BasicDBObject dbObject = new BasicDBObject(); + if (ClassUtils.isAssignableValue(referringProperty.getType(), source) + && conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) { + return conversionService.convert(source, DocumentPointer.class); + } - for (Map.Entry entry : map.entrySet()) { + if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) { + return documentPointerFactory.computePointer(mappingContext, referringProperty, source, + referringProperty.getActualType()); + } - Object key = entry.getKey(); - Object value = entry.getValue(); + return () -> source; + } - if (conversions.isSimpleType(key.getClass())) { + /** + * Root entry method into write conversion. Adds a type discriminator to the {@link Document}. Shouldn't be called for + * nested conversions. + * + * @see org.springframework.data.mongodb.core.convert.MongoWriter#write(java.lang.Object, java.lang.Object) + */ + @Override + public void write(Object obj, Bson bson) { + + if (null == obj) { + return; + } + + Class entityType = ClassUtils.getUserClass(obj.getClass()); + TypeInformation type = TypeInformation.of(entityType); + + Object target = obj instanceof LazyLoadingProxy proxy ? proxy.getTarget() : obj; + + writeInternal(target, bson, type); + BsonUtils.removeNullId(bson); + + if (requiresTypeHint(entityType)) { + getTypeMapper().writeType(type, bson); + } + } + + /** + * Check if a given type requires a type hint (aka {@literal _class} attribute) when writing to the document. + * + * @param type must not be {@literal null}. + * @return {@literal true} if not a simple type, {@link Collection} or type with custom write target. + */ + private boolean requiresTypeHint(Class type) { + + return !conversions.isSimpleType(type) && !ClassUtils.isAssignable(Collection.class, type) + && !conversions.hasCustomWriteTarget(type, Document.class); + } + + /** + * Internal write conversion method which should be used for nested invocations. + */ + @SuppressWarnings("unchecked") + protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInformation typeHint) { + + if (null == obj) { + return; + } + + Class entityType = obj.getClass(); + Optional> customTarget = conversions.getCustomWriteTarget(entityType, Document.class); + + if (customTarget.isPresent()) { + Document result = doConvert(obj, Document.class); + BsonUtils.addAllToMap(bson, result); + return; + } + + if (Map.class.isAssignableFrom(entityType)) { + writeMapInternal((Map) obj, bson, TypeInformation.MAP); + return; + } + + if (Collection.class.isAssignableFrom(entityType)) { + writeCollectionInternal((Collection) obj, TypeInformation.LIST, (Collection) bson); + return; + } + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(entityType); + writeInternal(obj, bson, entity); + addCustomTypeKeyIfNecessary(typeHint, obj, bson); + } + + protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable MongoPersistentEntity entity) { + + if (obj == null) { + return; + } + + if (null == entity) { + throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName()); + } + + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(obj); + DocumentAccessor dbObjectAccessor = new DocumentAccessor(bson); + MongoPersistentProperty idProperty = entity.getIdProperty(); + + if (idProperty != null && !dbObjectAccessor.hasValue(idProperty)) { + + Object value = idMapper.convertId(accessor.getProperty(idProperty), idProperty.getFieldType()); + + if (value != null) { + dbObjectAccessor.put(idProperty, value); + } + } + + writeProperties(bson, entity, accessor, dbObjectAccessor, idProperty); + } + + private void writeProperties(Bson bson, MongoPersistentEntity entity, PersistentPropertyAccessor accessor, + DocumentAccessor dbObjectAccessor, @Nullable MongoPersistentProperty idProperty) { + + // Write the properties + for (MongoPersistentProperty prop : entity) { + + if (prop.equals(idProperty) || !prop.isWritable()) { + continue; + } + if (prop.isAssociation()) { + + writeAssociation(prop.getRequiredAssociation(), accessor, dbObjectAccessor); + continue; + } + + Object value = accessor.getProperty(prop); + + if (value == null) { + + if (conversions.hasValueConverter(prop)) { + dbObjectAccessor.put(prop, applyPropertyConversion(null, prop, accessor)); + } else { + dbObjectAccessor.put(prop, null); + } + } else if (!conversions.isSimpleType(value.getClass())) { + writePropertyInternal(value, dbObjectAccessor, prop, accessor); + } else { + writeSimpleInternal(value, bson, prop, accessor); + } + } + } + + private void writeAssociation(Association association, + PersistentPropertyAccessor accessor, DocumentAccessor dbObjectAccessor) { + + MongoPersistentProperty inverseProp = association.getInverse(); + + Object value = accessor.getProperty(inverseProp); + + if (value == null && !inverseProp.isUnwrapped() && inverseProp.writeNullValues()) { + dbObjectAccessor.put(inverseProp, null); + return; + } + + writePropertyInternal(value, dbObjectAccessor, inverseProp, accessor); + } + + @SuppressWarnings({ "unchecked" }) + void writePropertyInternal(@Nullable Object obj, DocumentAccessor accessor, MongoPersistentProperty prop, + PersistentPropertyAccessor persistentPropertyAccessor) { + + if (obj == null) { + return; + } + + TypeInformation valueType = TypeInformation.of(obj.getClass()); + TypeInformation type = prop.getTypeInformation(); + + if (conversions.hasValueConverter(prop)) { + accessor.put(prop, applyPropertyConversion(obj, prop, persistentPropertyAccessor)); + return; + } + + if (prop.isUnwrapped()) { + + Document target = new Document(); + writeInternal(obj, target, mappingContext.getPersistentEntity(prop)); + + accessor.putAll(target); + return; + } + + if (valueType.isCollectionLike()) { + + List collectionInternal = createCollection(BsonUtils.asCollection(obj), prop); + accessor.put(prop, collectionInternal); + return; + } + + if (valueType.isMap()) { + + Bson mapDbObj = createMap((Map) obj, prop); + accessor.put(prop, mapDbObj); + return; + } + + if (prop.isDbReference()) { + + DBRef dbRefObj = null; + + /* + * If we already have a LazyLoadingProxy, we use it's cached DBRef value instead of + * unnecessarily initializing it only to convert it to a DBRef a few instructions later. + */ + if (obj instanceof LazyLoadingProxy proxy) { + dbRefObj = proxy.toDBRef(); + } + + if (obj != null && conversions.hasCustomWriteTarget(obj.getClass())) { + accessor.put(prop, doConvert(obj, conversions.getCustomWriteTarget(obj.getClass()).get())); + return; + } + + dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); + + accessor.put(prop, dbRefObj); + return; + } + + if (prop.isAssociation() && prop.isAnnotationPresent(Reference.class)) { + + accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext) + .computePointer(mappingContext, prop, obj, valueType.getType()).getPointer()); + return; + } + + /* + * If we have a LazyLoadingProxy we make sure it is initialized first. + */ + if (obj instanceof LazyLoadingProxy proxy) { + obj = proxy.getTarget(); + } + + // Lookup potential custom target type + Optional> basicTargetType = conversions.getCustomWriteTarget(obj.getClass()); + + if (basicTargetType.isPresent()) { + + accessor.put(prop, doConvert(obj, basicTargetType.get())); + return; + } + + MongoPersistentEntity entity = valueType.isSubTypeOf(prop.getType()) + ? mappingContext.getRequiredPersistentEntity(obj.getClass()) + : mappingContext.getRequiredPersistentEntity(type); + + Object existingValue = accessor.get(prop); + Document document = existingValue instanceof Document existingDocument ? existingDocument : new Document(); + + writeInternal(obj, document, entity); + addCustomTypeKeyIfNecessary(TypeInformation.of(prop.getRawType()), obj, document); + accessor.put(prop, document); + } + + /** + * Writes the given {@link Collection} using the given {@link MongoPersistentProperty} information. + * + * @param collection must not be {@literal null}. + * @param property must not be {@literal null}. + */ + protected List createCollection(Collection collection, MongoPersistentProperty property) { + + if (!property.isDbReference()) { + + if (property.isAssociation()) { + + List targetCollection = collection.stream().map(it -> { + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()) + .getPointer(); + }).collect(Collectors.toList()); + + return writeCollectionInternal(targetCollection, TypeInformation.of(DocumentPointer.class), + new ArrayList<>(targetCollection.size())); + } + + if (property.hasExplicitWriteTarget()) { + return writeCollectionInternal(collection, new FieldTypeInformation<>(property), + new ArrayList<>(collection.size())); + } + + return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>(collection.size())); + } + + List dbList = new ArrayList<>(collection.size()); + + for (Object element : collection) { + + if (element == null) { + continue; + } + + DBRef dbRef = createDBRef(element, property); + dbList.add(dbRef); + } + + return dbList; + } + + /** + * Writes the given {@link Map} using the given {@link MongoPersistentProperty} information. + * + * @param map must not {@literal null}. + * @param property must not be {@literal null}. + */ + protected Bson createMap(Map map, MongoPersistentProperty property) { + + Assert.notNull(map, "Given map must not be null"); + Assert.notNull(property, "PersistentProperty must not be null"); + + if (!property.isAssociation()) { + return writeMapInternal(map, new Document(), property.getTypeInformation()); + } + + Document document = new Document(); + + for (Map.Entry entry : map.entrySet()) { + + Object key = entry.getKey(); + Object value = entry.getValue(); + + if (conversions.isSimpleType(key.getClass())) { String simpleKey = prepareMapKey(key.toString()); - dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null); + if (property.isDbReference()) { + document.put(simpleKey, value != null ? createDBRef(value, property) : null); + } else { + document.put(simpleKey, documentPointerFactory + .computePointer(mappingContext, property, value, property.getActualType()).getPointer()); + } } else { - throw new MappingException("Cannot use a complex object as a key value."); + throw new MappingException("Cannot use a complex object as a key value"); } } - return dbObject; + return document; } /** - * Populates the given {@link BasicDBList} with values from the given {@link Collection}. - * - * @param source the collection to create a {@link BasicDBList} for, must not be {@literal null}. + * Populates the given {@link Collection sink} with converted values from the given {@link Collection source}. + * + * @param source the collection to create a {@link Collection} for, must not be {@literal null}. * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. - * @param sink the {@link BasicDBList} to write to. - * @return + * @param sink the {@link Collection} to write to. */ - private BasicDBList writeCollectionInternal(Collection source, TypeInformation type, BasicDBList sink) { + @SuppressWarnings("unchecked") + private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, + Collection sink) { - TypeInformation componentType = type == null ? null : type.getComponentType(); + TypeInformation componentType = null; + + List collection = sink instanceof List ? (List) sink : new ArrayList<>(sink); + + if (type != null) { + componentType = type.getComponentType(); + } for (Object element : source) { Class elementType = element == null ? null : element.getClass(); if (elementType == null || conversions.isSimpleType(elementType)) { - sink.add(getPotentiallyConvertedSimpleWrite(element)); + collection.add(getPotentiallyConvertedSimpleWrite(element, + componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - sink.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList())); + + Collection objects = BsonUtils.asCollection(element); + collection.add(writeCollectionInternal(objects, componentType, new ArrayList<>(objects.size()))); } else { - BasicDBObject propDbObj = new BasicDBObject(); - writeInternal(element, propDbObj, componentType); - sink.add(propDbObj); + Document document = new Document(); + writeInternal(element, document, componentType); + collection.add(document); } } - return sink; + return collection; } /** - * Writes the given {@link Map} to the given {@link DBObject} considering the given {@link TypeInformation}. - * + * Writes the given {@link Map} to the given {@link Document} considering the given {@link TypeInformation}. + * * @param obj must not be {@literal null}. - * @param dbo must not be {@literal null}. + * @param bson must not be {@literal null}. * @param propertyType must not be {@literal null}. - * @return */ - protected DBObject writeMapInternal(Map obj, DBObject dbo, TypeInformation propertyType) { + protected Bson writeMapInternal(Map obj, Bson bson, TypeInformation propertyType) { for (Map.Entry entry : obj.entrySet()) { @@ -653,35 +1207,34 @@ protected DBObject writeMapInternal(Map obj, DBObject dbo, TypeI String simpleKey = prepareMapKey(key); if (val == null || conversions.isSimpleType(val.getClass())) { - writeSimpleInternal(val, dbo, simpleKey); + writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { - dbo.put(simpleKey, - writeCollectionInternal(asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + BsonUtils.addToMap(bson, simpleKey, + writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>())); } else { - DBObject newDbo = new BasicDBObject(); + Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() - : ClassTypeInformation.OBJECT; - writeInternal(val, newDbo, valueTypeInfo); - dbo.put(simpleKey, newDbo); + : TypeInformation.OBJECT; + writeInternal(val, document, valueTypeInfo); + BsonUtils.addToMap(bson, simpleKey, document); } } else { - throw new MappingException("Cannot use a complex object as a key value."); + throw new MappingException("Cannot use a complex object as a key value"); } } - return dbo; + return bson; } /** * Prepares the given {@link Map} key to be converted into a {@link String}. Will invoke potentially registered custom * conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB. - * + * * @param key must not be {@literal null}. - * @return */ private String prepareMapKey(Object key) { - Assert.notNull(key, "Map key must not be null!"); + Assert.notNull(key, "Map key must not be null"); String convertedKey = potentiallyConvertMapKey(key); return potentiallyEscapeMapKey(convertedKey); @@ -690,10 +1243,9 @@ private String prepareMapKey(Object key) { /** * Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts * conversion if none is configured. - * + * * @see #setMapKeyDotReplacement(String) - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyEscapeMapKey(String source) { @@ -703,336 +1255,407 @@ protected String potentiallyEscapeMapKey(String source) { if (mapKeyDotReplacement == null) { throw new MappingException(String.format( - "Map key %s contains dots but no replacement was configured! Make " - + "sure map keys don't contain dots in the first place or configure an appropriate replacement!", + "Map key %s contains dots but no replacement was configured; Make" + + " sure map keys don't contain dots in the first place or configure an appropriate replacement", source)); } - return source.replaceAll("\\.", mapKeyDotReplacement); + return StringUtils.replace(source, ".", mapKeyDotReplacement); } /** * Returns a {@link String} representation of the given {@link Map} key - * + * * @param key - * @return */ private String potentiallyConvertMapKey(Object key) { - if (key instanceof String) { - return (String) key; + if (key instanceof String stringValue) { + return stringValue; } return conversions.hasCustomWriteTarget(key.getClass(), String.class) - ? (String) getPotentiallyConvertedSimpleWrite(key) : key.toString(); + ? (String) getPotentiallyConvertedSimpleWrite(key, Object.class) + : key.toString(); } /** * Translates the map key replacements in the given key just read with a dot in case a map key replacement has been * configured. - * - * @param source - * @return + * + * @param source must not be {@literal null}. */ protected String potentiallyUnescapeMapKey(String source) { - return mapKeyDotReplacement == null ? source : source.replaceAll(mapKeyDotReplacement, "\\."); + return mapKeyDotReplacement == null ? source : StringUtils.replace(source, mapKeyDotReplacement, "."); } /** - * Adds custom type information to the given {@link DBObject} if necessary. That is if the value is not the same as + * Adds custom type information to the given {@link Document} if necessary. That is if the value is not the same as * the one given. This is usually the case if you store a subtype of the actual declared type of the property. * - * @param type + * @param type can be {@literal null}. * @param value must not be {@literal null}. - * @param dbObject must not be {@literal null}. + * @param bson must not be {@literal null}. */ - protected void addCustomTypeKeyIfNecessary(TypeInformation type, Object value, DBObject dbObject) { + protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Object value, Bson bson) { - TypeInformation actualType = type != null ? type.getActualType() : null; - Class reference = actualType == null ? Object.class : actualType.getType(); + Class reference = type != null ? type.getRequiredActualType().getType() : Object.class; Class valueType = ClassUtils.getUserClass(value.getClass()); boolean notTheSameClass = !valueType.equals(reference); if (notTheSameClass) { - typeMapper.writeType(valueType, dbObject); + getTypeMapper().writeType(valueType, bson); } } /** - * Writes the given simple value to the given {@link DBObject}. Will store enum names for enum values. - * - * @param value - * @param dbObject must not be {@literal null}. + * Writes the given simple value to the given {@link Document}. Will store enum names for enum values. + * + * @param value can be {@literal null}. + * @param bson must not be {@literal null}. * @param key must not be {@literal null}. */ - private void writeSimpleInternal(Object value, DBObject dbObject, String key) { - dbObject.put(key, getPotentiallyConvertedSimpleWrite(value)); + private void writeSimpleInternal(@Nullable Object value, Bson bson, String key) { + BsonUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); + } + + private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property, + PersistentPropertyAccessor persistentPropertyAccessor) { + + DocumentAccessor accessor = new DocumentAccessor(bson); + + if (conversions.hasValueConverter(property)) { + accessor.put(property, applyPropertyConversion(value, property, persistentPropertyAccessor)); + return; + } + + accessor.put(property, getPotentiallyConvertedSimpleWrite(value, + property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class)); } - private void writeSimpleInternal(Object value, DBObject dbObject, MongoPersistentProperty property) { - DBObjectAccessor accessor = new DBObjectAccessor(dbObject); - accessor.put(property, getPotentiallyConvertedSimpleWrite(value)); + @Nullable + @SuppressWarnings("unchecked") + private Object applyPropertyConversion(@Nullable Object value, MongoPersistentProperty property, + PersistentPropertyAccessor persistentPropertyAccessor) { + MongoConversionContext context = new MongoConversionContext(new PropertyValueProvider<>() { + + @Nullable + @Override + public T getPropertyValue(MongoPersistentProperty property) { + return (T) persistentPropertyAccessor.getProperty(property); + } + }, property, this, spELContext); + PropertyValueConverter> valueConverter = conversions + .getPropertyValueConversions().getValueConverter(property); + return value != null ? valueConverter.write(value, context) : valueConverter.writeNull(context); } /** * Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type. * Returns the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return */ - private Object getPotentiallyConvertedSimpleWrite(Object value) { + @Nullable + private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class typeHint) { if (value == null) { return null; } - Class customTarget = conversions.getCustomWriteTarget(value.getClass(), null); + if (typeHint != null && Object.class != typeHint) { - if (customTarget != null) { - return conversionService.convert(value, customTarget); - } else { - return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; + if (conversionService.canConvert(value.getClass(), typeHint)) { + value = doConvert(value, typeHint); + } + } + + Optional> customTarget = conversions.getCustomWriteTarget(value.getClass()); + + if (customTarget.isPresent()) { + return doConvert(value, customTarget.get()); + } + + if (ObjectUtils.isArray(value)) { + + if (value instanceof byte[]) { + return value; + } + return BsonUtils.asCollection(value); } + + return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; + } + + /** + * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies + * {@link Enum} handling or returns the value as is. Can be overridden by subclasses. + * + * @since 3.2 + */ + protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { + return getPotentiallyConvertedSimpleRead(value, target.getType()); } /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies * {@link Enum} handling or returns the value as is. - * - * @param value - * @param target must not be {@literal null}. - * @return */ @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object getPotentiallyConvertedSimpleRead(Object value, Class target) { + private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (value == null || target == null || target.isAssignableFrom(value.getClass())) { + if (target == null) { return value; } if (conversions.hasCustomReadTarget(value.getClass(), target)) { - return conversionService.convert(value, target); + return doConvert(value, target); + } + + if (ClassUtils.isAssignableValue(target, value)) { + return value; } if (Enum.class.isAssignableFrom(target)) { return Enum.valueOf((Class) target, value.toString()); } - return conversionService.convert(value, target); + return doConvert(value, target); } - protected DBRef createDBRef(Object target, MongoPersistentProperty property) { + protected DBRef createDBRef(Object target, @Nullable MongoPersistentProperty property) { - Assert.notNull(target); + Assert.notNull(target, "Target object must not be null"); - if (target instanceof DBRef) { - return (DBRef) target; + if (target instanceof DBRef dbRef) { + return dbRef; } MongoPersistentEntity targetEntity = mappingContext.getPersistentEntity(target.getClass()); - targetEntity = targetEntity == null ? targetEntity = mappingContext.getPersistentEntity(property) : targetEntity; + targetEntity = targetEntity != null ? targetEntity : mappingContext.getPersistentEntity(property); if (null == targetEntity) { throw new MappingException("No mapping metadata found for " + target.getClass()); } - MongoPersistentProperty idProperty = targetEntity.getIdProperty(); + MongoPersistentEntity entity = targetEntity; - if (idProperty == null) { - throw new MappingException("No id property found on class " + targetEntity.getType()); - } + MongoPersistentProperty idProperty = entity.getIdProperty(); - Object id = null; + if (idProperty != null) { - if (target.getClass().equals(idProperty.getType())) { - id = target; - } else { - PersistentPropertyAccessor accessor = targetEntity.getPropertyAccessor(target); - id = accessor.getProperty(idProperty); - } + Object id = target.getClass().equals(idProperty.getType()) ? target + : entity.getPropertyAccessor(target).getProperty(idProperty); + + if (null == id) { + throw new MappingException("Cannot create a reference to an object with a NULL id"); + } - if (null == id) { - throw new MappingException("Cannot create a reference to an object with a NULL id."); + return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity, + idMapper.convertId(id, idProperty != null ? idProperty.getFieldType() : ObjectId.class)); } - return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), targetEntity, - idMapper.convertId(id)); + throw new MappingException("No id property found on class " + entity.getType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.ValueResolver#getValueInternal(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, com.mongodb.DBObject, org.springframework.data.mapping.model.SpELExpressionEvaluator, java.lang.Object) - */ - @Override - public Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator, - ObjectPath path) { - return new MongoDbPropertyValueProvider(dbo, evaluator, path).getPropertyValue(prop); + @Nullable + private Object getValueInternal(ConversionContext context, MongoPersistentProperty prop, Bson bson, + ValueExpressionEvaluator evaluator) { + return new MongoDbPropertyValueProvider(context, bson, evaluator).getPropertyValue(prop); } /** - * Reads the given {@link BasicDBList} into a collection of the given {@link TypeInformation}. - * - * @param targetType must not be {@literal null}. - * @param sourceValue must not be {@literal null}. - * @param path must not be {@literal null}. + * Reads the given {@link Collection} into a collection of the given {@link TypeInformation}. Can be overridden by + * subclasses. + * + * @param context must not be {@literal null} + * @param source must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @since 3.2 * @return the converted {@link Collection} or array, will never be {@literal null}. */ - private Object readCollectionOrArray(TypeInformation targetType, BasicDBList sourceValue, ObjectPath path) { + @SuppressWarnings("unchecked") + protected Object readCollectionOrArray(ConversionContext context, Collection source, + TypeInformation targetType) { - Assert.notNull(targetType, "Target type must not be null!"); - Assert.notNull(path, "Object path must not be null!"); + Assert.notNull(targetType, "Target type must not be null"); - Class collectionType = targetType.getType(); + Class collectionType = targetType.isSubTypeOf(Collection.class) // + ? targetType.getType() // + : List.class; - if (sourceValue.isEmpty()) { - return getPotentiallyConvertedSimpleRead(new HashSet(), collectionType); - } + TypeInformation componentType = targetType.getComponentType() != null // + ? targetType.getComponentType() // + : TypeInformation.OBJECT; + Class rawComponentType = componentType.getType(); - TypeInformation componentType = targetType.getComponentType(); - Class rawComponentType = componentType == null ? null : componentType.getType(); + Collection items = targetType.getType().isArray() // + ? new ArrayList<>(source.size()) // + : CollectionFactory.createCollection(collectionType, rawComponentType, source.size()); - collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class; - Collection items = targetType.getType().isArray() ? new ArrayList() - : CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size()); + if (source.isEmpty()) { + return getPotentiallyConvertedSimpleRead(items, targetType.getType()); + } - for (int i = 0; i < sourceValue.size(); i++) { + if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(source)) { - Object dbObjItem = sourceValue.get(i); + List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType); + return getPotentiallyConvertedSimpleRead(objects, targetType.getType()); + } - if (dbObjItem instanceof DBRef) { - items.add( - DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem), path)); - } else if (dbObjItem instanceof DBObject) { - items.add(read(componentType, (DBObject) dbObjItem, path)); - } else { - items.add(getPotentiallyConvertedSimpleRead(dbObjItem, rawComponentType)); - } + for (Object element : source) { + items.add(element != null ? context.convert(element, componentType) : element); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); } /** - * Reads the given {@link DBObject} into a {@link Map}. will recursively resolve nested {@link Map}s as well. - * - * @param type the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link DBObject}. - * @param dbObject must not be {@literal null} - * @param path must not be {@literal null} - * @return + * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. Can be + * overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted {@link Map}, will never be {@literal null}. + * @since 3.2 */ - @SuppressWarnings("unchecked") - protected Map readMap(TypeInformation type, DBObject dbObject, ObjectPath path) { + protected Map readMap(ConversionContext context, Bson bson, TypeInformation targetType) { - Assert.notNull(dbObject, "DBObject must not be null!"); - Assert.notNull(path, "Object path must not be null!"); + Assert.notNull(bson, "Document must not be null"); + Assert.notNull(targetType, "TypeInformation must not be null"); - Class mapType = typeMapper.readType(dbObject, type).getType(); + Class mapType = getTypeMapper().readType(bson, targetType).getType(); - TypeInformation keyType = type.getComponentType(); - Class rawKeyType = keyType == null ? null : keyType.getType(); + TypeInformation keyType = targetType.getComponentType(); + TypeInformation valueType = targetType.getMapValueType() == null ? TypeInformation.OBJECT + : targetType.getRequiredMapValueType(); - TypeInformation valueType = type.getMapValueType(); - Class rawValueType = valueType == null ? null : valueType.getType(); + Class rawKeyType = keyType != null ? keyType.getType() : Object.class; + Class rawValueType = valueType.getType(); - Map map = CollectionFactory.createMap(mapType, rawKeyType, dbObject.keySet().size()); - Map sourceMap = dbObject.toMap(); + Map sourceMap = BsonUtils.asMap(bson); + Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); - for (Entry entry : sourceMap.entrySet()) { - if (typeMapper.isTypeKey(entry.getKey())) { - continue; - } + if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { + bulkReadAndConvertDBRefMapIntoTarget(context, valueType, sourceMap, map); + return map; + } - Object key = potentiallyUnescapeMapKey(entry.getKey()); + sourceMap.forEach((k, v) -> { - if (rawKeyType != null) { - key = conversionService.convert(key, rawKeyType); + if (getTypeMapper().isTypeKey(k)) { + return; } - Object value = entry.getValue(); + Object key = potentiallyUnescapeMapKey(k); - if (value instanceof DBObject) { - map.put(key, read(valueType, (DBObject) value, path)); - } else if (value instanceof DBRef) { - map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, readRef((DBRef) value))); - } else { - Class valueClass = valueType == null ? null : valueType.getType(); - map.put(key, getPotentiallyConvertedSimpleRead(value, valueClass)); + if (!rawKeyType.isAssignableFrom(key.getClass())) { + key = doConvert(key, rawKeyType); } - } + + map.put(key, v == null ? v : context.convert(v, valueType)); + }); return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation) - */ + @Nullable @SuppressWarnings("unchecked") - public Object convertToMongoType(Object obj, TypeInformation typeInformation) { + @Override + public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation) { if (obj == null) { return null; } - Class target = conversions.getCustomWriteTarget(obj.getClass()); - if (target != null) { - return conversionService.convert(obj, target); + Optional> target = conversions.getCustomWriteTarget(obj.getClass()); + if (target.isPresent()) { + return doConvert(obj, target.get()); } if (conversions.isSimpleType(obj.getClass())) { - // Doesn't need conversion - return getPotentiallyConvertedSimpleWrite(obj); + + Class conversionTargetType; + + if (typeInformation != null && conversions.isSimpleType(typeInformation.getType())) { + conversionTargetType = typeInformation.getType(); + } else { + conversionTargetType = Object.class; + } + + return getPotentiallyConvertedSimpleWrite(obj, conversionTargetType); + } + + if (obj instanceof List list) { + return maybeConvertList(list, typeInformation); } - TypeInformation typeHint = typeInformation; + if (obj instanceof Document document) { - if (obj instanceof BasicDBList) { - return maybeConvertList((BasicDBList) obj, typeHint); + Document newValueDocument = new Document(); + for (String vk : document.keySet()) { + Object o = document.get(vk); + newValueDocument.put(vk, convertToMongoType(o, typeInformation)); + } + return newValueDocument; } - if (obj instanceof DBObject) { - DBObject newValueDbo = new BasicDBObject(); - for (String vk : ((DBObject) obj).keySet()) { - Object o = ((DBObject) obj).get(vk); - newValueDbo.put(vk, convertToMongoType(o, typeHint)); + if (obj instanceof DBObject dbObject) { + + Document newValueDbo = new Document(); + for (String vk : dbObject.keySet()) { + + Object o = dbObject.get(vk); + newValueDbo.put(vk, convertToMongoType(o, typeInformation)); } + return newValueDbo; } if (obj instanceof Map) { - DBObject result = new BasicDBObject(); + + Document result = new Document(); + for (Map.Entry entry : ((Map) obj).entrySet()) { - result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint)); + result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeInformation)); } + return result; } if (obj.getClass().isArray()) { - return maybeConvertList(Arrays.asList((Object[]) obj), typeHint); + return maybeConvertList(Arrays.asList((Object[]) obj), typeInformation); } - if (obj instanceof Collection) { - return maybeConvertList((Collection) obj, typeHint); + if (obj instanceof Collection collection) { + return maybeConvertList(collection, typeInformation); } - DBObject newDbo = new BasicDBObject(); - this.write(obj, newDbo); + Document newDocument = new Document(); + this.write(obj, newDocument); if (typeInformation == null) { - return removeTypeInfo(newDbo, true); + return removeTypeInfo(newDocument, true); } if (typeInformation.getType().equals(NestedDocument.class)) { - return removeTypeInfo(newDbo, false); + return removeTypeInfo(newDocument, false); } - return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfo(newDbo, true); + return !obj.getClass().equals(typeInformation.getType()) ? newDocument : removeTypeInfo(newDocument, true); } - public BasicDBList maybeConvertList(Iterable source, TypeInformation typeInformation) { + @Override + public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { + Document newDocument = new Document(); + writeInternal(obj, newDocument, entity); + return newDocument; + } + + // TODO: hide in 5.0 + public List maybeConvertList(Iterable source, @Nullable TypeInformation typeInformation) { + + List newDbl = new ArrayList<>(); - BasicDBList newDbl = new BasicDBList(); for (Object element : source) { newDbl.add(convertToMongoType(element, typeInformation)); } @@ -1042,36 +1665,40 @@ public BasicDBList maybeConvertList(Iterable source, TypeInformation typeI /** * Removes the type information from the entire conversion result. - * + * * @param object * @param recursively whether to apply the removal recursively * @return */ + @SuppressWarnings("unchecked") private Object removeTypeInfo(Object object, boolean recursively) { - if (!(object instanceof DBObject)) { + if (!(object instanceof Document document)) { return object; } - DBObject dbObject = (DBObject) object; String keyToRemove = null; - for (String key : dbObject.keySet()) { + for (String key : document.keySet()) { if (recursively) { - Object value = dbObject.get(key); + Object value = document.get(key); if (value instanceof BasicDBList) { for (Object element : (BasicDBList) value) { removeTypeInfo(element, recursively); } + } else if (value instanceof List) { + for (Object element : (List) value) { + removeTypeInfo(element, recursively); + } } else { removeTypeInfo(value, recursively); } } - if (typeMapper.isTypeKey(key)) { + if (getTypeMapper().isTypeKey(key)) { keyToRemove = key; @@ -1082,143 +1709,842 @@ private Object removeTypeInfo(Object object, boolean recursively) { } if (keyToRemove != null) { - dbObject.removeField(keyToRemove); + document.remove(keyToRemove); } - return dbObject; + return document; + } + + @Nullable + @SuppressWarnings("unchecked") + T readValue(ConversionContext context, @Nullable Object value, TypeInformation type) { + + if (value == null) { + return null; + } + + Assert.notNull(type, "TypeInformation must not be null"); + + Class rawType = type.getType(); + + if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { + return (T) doConvert(value, rawType); + } else if (value instanceof DBRef dbRef) { + return (T) readDBRef(context, dbRef, type); + } + + return (T) context.convert(value, type); + } + + @Nullable + private Object readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { + + if (type.getType().equals(DBRef.class)) { + return dbref; + } + + ObjectPath path = context.getPath(); + + Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), type.getType()); + if (object != null) { + return object; + } + + List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type); + return CollectionUtils.isEmpty(result) ? null : peek(result); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, TypeInformation valueType, + Map sourceMap, Map targetMap) { + + LinkedHashMap referenceMap = new LinkedHashMap<>(sourceMap); + List convertedObjects = bulkReadAndConvertDBRefs(context.withPath(ObjectPath.ROOT), + (List) new ArrayList(referenceMap.values()), valueType); + int index = 0; + + for (String key : referenceMap.keySet()) { + targetMap.put(key, convertedObjects.get(index)); + index++; + } + } + + @SuppressWarnings("unchecked") + private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type) { + + if (CollectionUtils.isEmpty(dbrefs)) { + return Collections.emptyList(); + } + + List referencedRawDocuments = dbrefs.size() == 1 ? Collections.singletonList(readRef(peek(dbrefs))) + : bulkReadRefs(dbrefs); + String collectionName = peek(dbrefs).getCollectionName(); + + List targetList = new ArrayList<>(dbrefs.size()); + + for (Document document : referencedRawDocuments) { + + T target = null; + if (document != null) { + + maybeEmitEvent(new AfterLoadEvent<>(document, (Class) type.getType(), collectionName)); + target = (T) readDocument(context, document, type); + } + + if (target != null) { + maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName)); + target = maybeCallAfterConvert(target, document, collectionName); + } + + targetList.add(target); + } + + return targetList; + } + + private void maybeEmitEvent(MongoMappingEvent event) { + + if (canPublishEvent()) { + this.applicationContext.publishEvent(event); + } + } + + private boolean canPublishEvent() { + return this.applicationContext != null; + } + + protected T maybeCallAfterConvert(T object, Document document, String collection) { + + if (null != entityCallbacks) { + return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection); + } + + return object; + } + + /** + * Performs the fetch operation for the given {@link DBRef}. + * + * @param ref + * @return + */ + @Nullable + Document readRef(DBRef ref) { + return dbRefResolver.fetch(ref); + } + + /** + * Performs a bulk fetch operation for the given {@link DBRef}s. + * + * @param references must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + List bulkReadRefs(List references) { + return dbRefResolver.bulkFetch(references); + } + + /** + * Get the conversion target type if defined or return the {@literal source}. + * + * @param source must not be {@literal null}. + * @return + * @since 2.2 + */ + public Class getWriteTarget(Class source) { + return conversions.getCustomWriteTarget(source).orElse(source); + } + + @Override + public CodecRegistry getCodecRegistry() { + return codecRegistryProvider != null ? codecRegistryProvider.getCodecRegistry() : super.getCodecRegistry(); + } + + /** + * Create a new {@link MappingMongoConverter} using the given {@link MongoDatabaseFactory} when loading {@link DBRef}. + * + * @return new instance of {@link MappingMongoConverter}. Never {@literal null}. + * @since 2.1.6 + */ + public MappingMongoConverter with(MongoDatabaseFactory dbFactory) { + + MappingMongoConverter target = new MappingMongoConverter(new DefaultDbRefResolver(dbFactory), mappingContext); + target.applicationContext = applicationContext; + target.conversions = conversions; + target.spELContext = spELContext; + target.setInstantiators(instantiators); + target.defaultTypeMapper = defaultTypeMapper; + target.typeMapper = typeMapper; + target.setCodecRegistryProvider(dbFactory); + target.afterPropertiesSet(); + + return target; + } + + private T doConvert(Object value, Class target) { + return doConvert(value, target, null); + } + + @SuppressWarnings("ConstantConditions") + private T doConvert(Object value, Class target, + @Nullable Class fallback) { + + if (conversionService.canConvert(value.getClass(), target) || fallback == null) { + return conversionService.convert(value, target); + } + return conversionService.convert(value, fallback); + } + + /** + * Returns whether the given {@link Iterable} contains {@link DBRef} instances all pointing to the same collection. + * + * @param source must not be {@literal null}. + * @return + */ + private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable source) { + + Assert.notNull(source, "Iterable of DBRefs must not be null"); + + Set collectionsFound = new HashSet<>(); + + for (Object dbObjItem : source) { + + if (!(dbObjItem instanceof DBRef dbRef)) { + return false; + } + + collectionsFound.add(dbRef.getCollectionName()); + + if (collectionsFound.size() > 1) { + return false; + } + } + + return true; + } + + private static T peek(Iterable result) { + return result.iterator().next(); } /** * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field - * of the configured source {@link DBObject}. + * of the configured source {@link Document}. * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ - private class MongoDbPropertyValueProvider implements PropertyValueProvider { + static class MongoDbPropertyValueProvider implements PropertyValueProvider { - private final DBObjectAccessor source; - private final SpELExpressionEvaluator evaluator; - private final ObjectPath path; + final ConversionContext context; + final DocumentAccessor accessor; + final ValueExpressionEvaluator evaluator; + final SpELContext spELContext; /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link ValueExpressionEvaluator} and * {@link ObjectPath}. - * + * + * @param context must not be {@literal null}. * @param source must not be {@literal null}. * @param evaluator must not be {@literal null}. - * @param path can be {@literal null}. */ - public MongoDbPropertyValueProvider(DBObject source, SpELExpressionEvaluator evaluator, ObjectPath path) { + MongoDbPropertyValueProvider(ConversionContext context, Bson source, ValueExpressionEvaluator evaluator) { + this(context, new DocumentAccessor(source), evaluator, null); + } - Assert.notNull(source); - Assert.notNull(evaluator); + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link ValueExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor accessor, + ValueExpressionEvaluator evaluator, SpELContext spELContext) { - this.source = new DBObjectAccessor(source); + this.context = context; + this.accessor = accessor; this.evaluator = evaluator; - this.path = path; + this.spELContext = spELContext; } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ + @Override + @Nullable + @SuppressWarnings("unchecked") public T getPropertyValue(MongoPersistentProperty property) { String expression = property.getSpelExpression(); - Object value = expression != null ? evaluator.evaluate(expression) : source.get(property); + Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); + + CustomConversions conversions = context.getCustomConversions(); + if (conversions.hasValueConverter(property)) { + MongoConversionContext conversionContext = new MongoConversionContext(this, property, + context.getSourceConverter(), spELContext); + PropertyValueConverter> valueConverter = conversions + .getPropertyValueConversions().getValueConverter(property); + return (T) (value != null ? valueConverter.read(value, conversionContext) + : valueConverter.readNull(conversionContext)); + } if (value == null) { return null; } - return readValue(value, property.getTypeInformation(), path); + ConversionContext contextToUse = context.forProperty(property); + + return (T) contextToUse.convert(value, property.getTypeInformation()); + } + + public MongoDbPropertyValueProvider withContext(ConversionContext context) { + + return context == this.context ? this + : new MongoDbPropertyValueProvider(context, accessor, evaluator, spELContext); + } + } + + /** + * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates + * resolution to {@link DbRefResolver}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { + + /** + * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, + * {@link ValueExpressionEvaluator} and {@link ObjectPath}. + * + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, + ValueExpressionEvaluator evaluator) { + super(context, source, evaluator, MappingMongoConverter.this.spELContext); + } + + @Override + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + ConversionContext propertyContext = context.forProperty(property); + + if (property.isAssociation()) { + + DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), + evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, + evaluator)); + + return (T) readAssociation(property.getRequiredAssociation(), accessor, dbRefProxyHandler, callback, + propertyContext); + } + + if (property.isUnwrapped()) { + + return (T) readUnwrapped(propertyContext, accessor, property, + mappingContext.getRequiredPersistentEntity(property)); + } + + if (!accessor.hasValue(property)) { + return null; + } + + return super.getPropertyValue(property); } } /** * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw * resolved SpEL value. - * + * * @author Oliver Gierke */ - private class ConverterAwareSpELExpressionParameterValueProvider - extends SpELExpressionParameterValueProvider { + private static class ConverterAwareValueExpressionParameterValueProvider + extends ValueExpressionParameterValueProvider { - private final ObjectPath path; + private final ConversionContext context; /** - * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. - * + * Creates a new {@link ConverterAwareValueExpressionParameterValueProvider}. + * + * @param context must not be {@literal null}. * @param evaluator must not be {@literal null}. * @param conversionService must not be {@literal null}. * @param delegate must not be {@literal null}. */ - public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator, - ConversionService conversionService, ParameterValueProvider delegate, - ObjectPath path) { + public ConverterAwareValueExpressionParameterValueProvider(ConversionContext context, + ValueExpressionEvaluator evaluator, ConversionService conversionService, + ParameterValueProvider delegate) { super(evaluator, conversionService, delegate); - this.path = path; + + Assert.notNull(context, "ConversionContext must no be null"); + + this.context = context; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) - */ @Override - protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return readValue(object, parameter.getType(), path); + protected T potentiallyConvertExpressionValue(Object object, Parameter parameter) { + return context.convert(object, parameter.getType()); } } - @SuppressWarnings("unchecked") - private T readValue(Object value, TypeInformation type, ObjectPath path) { + /** + * Marker class used to indicate we have a non root document object here that might be used within an update - so we + * need to preserve type hints for potential nested elements but need to remove it on top level. + * + * @author Christoph Strobl + * @since 1.8 + */ + static class NestedDocument { - Class rawType = type.getType(); + } - if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { - return (T) conversionService.convert(value, rawType); - } else if (value instanceof DBRef) { - return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType); - } else if (value instanceof BasicDBList) { - return (T) readCollectionOrArray(type, (BasicDBList) value, path); - } else if (value instanceof DBObject) { - return (T) read(type, (DBObject) value, path); - } else { - return (T) getPotentiallyConvertedSimpleRead(value, rawType); + enum NoOpParameterValueProvider implements ParameterValueProvider { + + INSTANCE; + + @Override + public T getParameterValue(Parameter parameter) { + return null; } } - @SuppressWarnings("unchecked") - private T potentiallyReadOrResolveDbRef(DBRef dbref, TypeInformation type, ObjectPath path, Class rawType) { + /** + * {@link TypeInformation} considering {@link MongoPersistentProperty#getFieldType()} as type source. + * + * @param + */ + private static class FieldTypeInformation implements TypeInformation { + + private final MongoPersistentProperty persistentProperty; + private final TypeInformation delegate; + + @SuppressWarnings("unchecked") + public FieldTypeInformation(MongoPersistentProperty property) { + + this.persistentProperty = property; + this.delegate = (TypeInformation) property.getTypeInformation(); + } + + @Override + public List> getParameterTypes(Constructor constructor) { + return persistentProperty.getTypeInformation().getParameterTypes(constructor); + } + + @Override + public org.springframework.data.util.TypeInformation getProperty(String property) { + return delegate.getProperty(property); + } + + @Override + public boolean isCollectionLike() { + return delegate.isCollectionLike(); + } + + @Override + public org.springframework.data.util.TypeInformation getComponentType() { + return TypeInformation.of(persistentProperty.getFieldType()); + } + + @Override + public boolean isMap() { + return delegate.isMap(); + } + + @Override + public org.springframework.data.util.TypeInformation getMapValueType() { + return TypeInformation.of(persistentProperty.getFieldType()); + } + + @Override + public Class getType() { + return delegate.getType(); + } - if (rawType.equals(DBRef.class)) { - return (T) dbref; + @Override + public TypeInformation getRawTypeInformation() { + return delegate.getRawTypeInformation(); + } + + @Override + public org.springframework.data.util.TypeInformation getActualType() { + return delegate.getActualType(); + } + + @Override + public org.springframework.data.util.TypeInformation getReturnType(Method method) { + return delegate.getReturnType(method); } - Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName()); + @Override + public List> getParameterTypes(Method method) { + return delegate.getParameterTypes(method); + } + + @Override + public org.springframework.data.util.TypeInformation getSuperTypeInformation(Class superType) { + return delegate.getSuperTypeInformation(superType); + } + + @Override + public boolean isAssignableFrom(org.springframework.data.util.TypeInformation target) { + return delegate.isAssignableFrom(target); + } + + @Override + public List> getTypeArguments() { + return delegate.getTypeArguments(); + } + + @Override + public org.springframework.data.util.TypeInformation specialize(TypeInformation type) { + return delegate.specialize(type); + } - return (T) (object != null ? object : read(type, readRef(dbref), path)); + @Override + public TypeDescriptor toTypeDescriptor() { + return delegate.toTypeDescriptor(); + } } /** - * Performs the fetch operation for the given {@link DBRef}. - * - * @param ref - * @return + * Conversion context defining an interface for graph-traversal-based conversion of documents. Entrypoint for + * recursive conversion of {@link Document} and other types. + * + * @since 3.4.3 */ - DBObject readRef(DBRef ref) { - return dbRefResolver.fetch(ref); + protected interface ConversionContext { + + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @return the converted object. + */ + default S convert(Object source, TypeInformation typeHint) { + return convert(source, typeHint, this); + } + + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the converted object. + */ + S convert(Object source, TypeInformation typeHint, ConversionContext context); + + /** + * Create a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + * + * @param currentPath must not be {@literal null}. + * @return a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + */ + ConversionContext withPath(ObjectPath currentPath); + + /** + * Obtain a {@link ConversionContext} for the given property {@code name}. + * + * @param name must not be {@literal null}. + * @return the {@link ConversionContext} to be used for conversion of the given property. + */ + default ConversionContext forProperty(String name) { + return this; + } + + /** + * Obtain a {@link ConversionContext} for the given {@link MongoPersistentProperty}. + * + * @param property must not be {@literal null}. + * @return the {@link ConversionContext} to be used for conversion of the given property. + */ + default ConversionContext forProperty(MongoPersistentProperty property) { + + return property.isAssociation() ? new AssociationConversionContext(forProperty(property.getName())) + : forProperty(property.getName()); + } + + /** + * Lookup a potentially existing entity instance of the given {@link MongoPersistentEntity} and {@link Document} + * + * @param entity + * @param document + * @return + * @param + */ + @Nullable + default S findContextualEntity(MongoPersistentEntity entity, Document document) { + return null; + } + + ObjectPath getPath(); + + CustomConversions getCustomConversions(); + + MongoConverter getSourceConverter(); + } /** - * Marker class used to indicate we have a non root document object here that might be used within an update - so we - * need to preserve type hints for potential nested elements but need to remove it on top level. - * - * @author Christoph Strobl - * @since 1.8 + * @since 3.4.3 */ - static class NestedDocument { + static class AssociationConversionContext implements ConversionContext { + + private final ConversionContext delegate; + + public AssociationConversionContext(ConversionContext delegate) { + this.delegate = delegate; + } + + @Override + public S convert(Object source, TypeInformation typeHint, ConversionContext context) { + return delegate.convert(source, typeHint, context); + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + return new AssociationConversionContext(delegate.withPath(currentPath)); + } + + @Override + public S findContextualEntity(MongoPersistentEntity entity, Document document) { + Object identifier = document.get(BasicMongoPersistentProperty.ID_FIELD_NAME); + + return identifier != null ? getPath().getPathItem(identifier, entity.getCollection(), entity.getType()) : null; + } + + @Override + public ObjectPath getPath() { + return delegate.getPath(); + } + + @Override + public CustomConversions getCustomConversions() { + return delegate.getCustomConversions(); + } + + @Override + public MongoConverter getSourceConverter() { + return delegate.getSourceConverter(); + } + + } + + /** + * Conversion context holding references to simple {@link ValueConverter} and {@link ContainerValueConverter}. + * Entrypoint for recursive conversion of {@link Document} and other types. + * + * @since 3.2 + */ + protected static class DefaultConversionContext implements ConversionContext { + + final MongoConverter sourceConverter; + final org.springframework.data.convert.CustomConversions conversions; + final ObjectPath path; + final ContainerValueConverter documentConverter; + final ContainerValueConverter> collectionConverter; + final ContainerValueConverter mapConverter; + final ContainerValueConverter dbRefConverter; + final ValueConverter elementConverter; + + DefaultConversionContext(MongoConverter sourceConverter, + org.springframework.data.convert.CustomConversions customConversions, ObjectPath path, + ContainerValueConverter documentConverter, ContainerValueConverter> collectionConverter, + ContainerValueConverter mapConverter, ContainerValueConverter dbRefConverter, + ValueConverter elementConverter) { + + this.sourceConverter = sourceConverter; + this.conversions = customConversions; + this.path = path; + this.documentConverter = documentConverter; + this.collectionConverter = collectionConverter; + this.mapConverter = mapConverter; + this.dbRefConverter = dbRefConverter; + this.elementConverter = elementConverter; + } + + @SuppressWarnings("unchecked") + @Override + public S convert(Object source, TypeInformation typeHint, + ConversionContext context) { + + if (conversions.hasCustomReadTarget(source.getClass(), typeHint.getType())) { + return (S) elementConverter.convert(source, typeHint); + } + + if (source instanceof Collection collection) { + + Class rawType = typeHint.getType(); + if (!Object.class.equals(rawType) && !String.class.equals(rawType)) { + + if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { + + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, source.getClass(), rawType, getPath())); + } + } + + if (typeHint.isCollectionLike() || typeHint.getType().isAssignableFrom(Collection.class)) { + return (S) collectionConverter.convert(context, collection, typeHint); + } + } + + if (typeHint.isMap()) { + + if (ClassUtils.isAssignable(Document.class, typeHint.getType())) { + return (S) documentConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + if (BsonUtils.supportsBson(source)) { + return (S) mapConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + throw new IllegalArgumentException( + String.format("Expected map like structure but found %s", source.getClass())); + } + + if (source instanceof DBRef dbRef) { + return (S) dbRefConverter.convert(context, dbRef, typeHint); + } + + if (BsonUtils.supportsBson(source)) { + return (S) documentConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + return (S) elementConverter.convert(source, typeHint); + } + + @Override + public CustomConversions getCustomConversions() { + return conversions; + } + + @Override + public MongoConverter getSourceConverter() { + return sourceConverter; + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + + Assert.notNull(currentPath, "ObjectPath must not be null"); + + return new DefaultConversionContext(sourceConverter, conversions, currentPath, documentConverter, + collectionConverter, mapConverter, dbRefConverter, elementConverter); + } + + @Override + public ObjectPath getPath() { + return path; + } + + /** + * Converts a simple {@code source} value into {@link TypeInformation the target type}. + * + * @param + */ + interface ValueConverter { + + Object convert(T source, TypeInformation typeHint); + + } + + /** + * Converts a container {@code source} value into {@link TypeInformation the target type}. Containers may + * recursively apply conversions for entities, collections, maps, etc. + * + * @param + */ + interface ContainerValueConverter { + + Object convert(ConversionContext context, T source, TypeInformation typeHint); + + } + + } + + /** + * @since 3.4.3 + */ + class ProjectingConversionContext extends DefaultConversionContext { + + private final EntityProjection returnedTypeDescriptor; + + ProjectingConversionContext(MongoConverter sourceConverter, CustomConversions customConversions, ObjectPath path, + ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, + ContainerValueConverter dbRefConverter, ValueConverter elementConverter, + EntityProjection projection) { + super(sourceConverter, customConversions, path, + (context, source, typeHint) -> doReadOrProject(context, source, typeHint, projection), + + collectionConverter, mapConverter, dbRefConverter, elementConverter); + this.returnedTypeDescriptor = projection; + } + + @Override + public ConversionContext forProperty(String name) { + + EntityProjection property = returnedTypeDescriptor.findProperty(name); + if (property == null) { + return new DefaultConversionContext(sourceConverter, conversions, path, + MappingMongoConverter.this::readDocument, collectionConverter, mapConverter, dbRefConverter, + elementConverter); + } + + return new ProjectingConversionContext(sourceConverter, conversions, path, collectionConverter, mapConverter, + dbRefConverter, elementConverter, property); + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + return new ProjectingConversionContext(sourceConverter, conversions, currentPath, collectionConverter, + mapConverter, dbRefConverter, elementConverter, returnedTypeDescriptor); + } } + + private static class PropertyTranslatingPropertyAccessor implements PersistentPropertyAccessor { + + private final PersistentPropertyAccessor delegate; + private final PersistentPropertyTranslator propertyTranslator; + + private PropertyTranslatingPropertyAccessor(PersistentPropertyAccessor delegate, + PersistentPropertyTranslator propertyTranslator) { + this.delegate = delegate; + this.propertyTranslator = propertyTranslator; + } + + static PersistentPropertyAccessor create(PersistentPropertyAccessor delegate, + PersistentPropertyTranslator propertyTranslator) { + return new PropertyTranslatingPropertyAccessor<>(delegate, propertyTranslator); + } + + @Override + public void setProperty(PersistentProperty property, @Nullable Object value) { + delegate.setProperty(translate(property), value); + } + + @Override + public Object getProperty(PersistentProperty property) { + return delegate.getProperty(translate(property)); + } + + @Override + public T getBean() { + return delegate.getBean(); + } + + private MongoPersistentProperty translate(PersistentProperty property) { + return propertyTranslator.translate((MongoPersistentProperty) property); + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java new file mode 100644 index 0000000000..da106715d4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java @@ -0,0 +1,170 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.bson.conversions.Bson; +import org.springframework.data.convert.ValueConversionContext; +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Nullable; + +/** + * {@link ValueConversionContext} that allows to delegate read/write to an underlying {@link MongoConverter}. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.4 + */ +public class MongoConversionContext implements ValueConversionContext { + + private final PropertyValueProvider accessor; // TODO: generics + private final MongoConverter mongoConverter; + + @Nullable private final MongoPersistentProperty persistentProperty; + @Nullable private final SpELContext spELContext; + @Nullable private final OperatorContext operatorContext; + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) { + this(accessor, persistentProperty, mongoConverter, null, null); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable SpELContext spELContext) { + this(accessor, persistentProperty, mongoConverter, spELContext, null); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable OperatorContext operatorContext) { + this(accessor, persistentProperty, mongoConverter, null, operatorContext); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable SpELContext spELContext, @Nullable OperatorContext operatorContext) { + + this.accessor = accessor; + this.persistentProperty = persistentProperty; + this.mongoConverter = mongoConverter; + this.spELContext = spELContext; + this.operatorContext = operatorContext; + } + + @Override + public MongoPersistentProperty getProperty() { + + if (persistentProperty == null) { + throw new IllegalStateException("No underlying MongoPersistentProperty available"); + } + + return persistentProperty; + } + + /** + * @param operatorContext + * @return new instance of {@link MongoConversionContext}. + * @since 4.5 + */ + @CheckReturnValue + public MongoConversionContext forOperator(@Nullable OperatorContext operatorContext) { + return new MongoConversionContext(accessor, persistentProperty, mongoConverter, spELContext, operatorContext); + } + + @Nullable + public Object getValue(String propertyPath) { + return accessor.getPropertyValue(getProperty().getOwner().getRequiredPersistentProperty(propertyPath)); + } + + @Override + @SuppressWarnings("unchecked") + public T write(@Nullable Object value, TypeInformation target) { + return (T) mongoConverter.convertToMongoType(value, target); + } + + @Override + public T read(@Nullable Object value, TypeInformation target) { + return value instanceof Bson bson ? mongoConverter.read(target.getType(), bson) + : ValueConversionContext.super.read(value, target); + } + + @Nullable + public SpELContext getSpELContext() { + return spELContext; + } + + @Nullable + public OperatorContext getOperatorContext() { + return operatorContext; + } + + /** + * The {@link OperatorContext} provides access to the actual conversion intent like a write operation or a query + * operator such as {@literal $gte}. + * + * @since 4.5 + */ + public interface OperatorContext { + + /** + * The operator the conversion is used in. + * + * @return {@literal write} for simple write operations during save, or a query operator. + */ + String operator(); + + /** + * The context path the operator is used in. + * + * @return never {@literal null}. + */ + String path(); + + boolean isWriteOperation(); + + } + + record WriteOperatorContext(String path) implements OperatorContext { + + @Override + public String operator() { + return "write"; + } + + @Override + public boolean isWriteOperation() { + return true; + } + } + + record QueryOperatorContext(String operator, String path) implements OperatorContext { + + public QueryOperatorContext(@Nullable String operator, String path) { + this.operator = operator != null ? operator : "$eq"; + this.path = path; + } + + @Override + public boolean isWriteOperation() { + return false; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index ef9bfd69bd..3676e74c8b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -1,43 +1,194 @@ -/* - * Copyright 2010-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import org.springframework.data.convert.EntityConverter; -import org.springframework.data.convert.EntityReader; -import org.springframework.data.convert.TypeMapper; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; - -import com.mongodb.DBObject; - -/** - * Central Mongo specific converter interface which combines {@link MongoWriter} and {@link MongoReader}. - * - * @author Oliver Gierke - * @author Thomas Darimont - */ -public interface MongoConverter extends - EntityConverter, MongoPersistentProperty, Object, DBObject>, MongoWriter, - EntityReader { - - /** - * Returns thw {@link TypeMapper} being used to write type information into {@link DBObject}s created with that - * converter. - * - * @return will never be {@literal null}. - */ - MongoTypeMapper getTypeMapper(); -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import com.mongodb.MongoClientSettings; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.springframework.core.convert.ConversionException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.EntityConverter; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.convert.TypeMapper; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.DBRef; + +/** + * Central Mongo specific converter interface which combines {@link MongoWriter} and {@link EntityReader}. + * + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch + * @author Ryan Gibb + */ +public interface MongoConverter + extends EntityConverter, MongoPersistentProperty, Object, Bson>, MongoWriter, + EntityReader, CodecRegistryProvider { + + /** + * Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that + * converter. + * + * @return will never be {@literal null}. + */ + MongoTypeMapper getTypeMapper(); + + /** + * Returns the {@link ProjectionFactory} for this converter. + * + * @return will never be {@literal null}. + * @since 3.4 + */ + ProjectionFactory getProjectionFactory(); + + /** + * Returns the {@link CustomConversions} for this converter. + * + * @return will never be {@literal null}. + * @since 3.4 + */ + CustomConversions getCustomConversions(); + + /** + * Apply a projection to {@link Bson} and return the projection return type {@code R}. + * {@link EntityProjection#isProjection() Non-projecting} descriptors fall back to {@link #read(Class, Object) regular + * object materialization}. + * + * @param descriptor the projection descriptor, must not be {@literal null}. + * @param bson must not be {@literal null}. + * @param + * @return a new instance of the projection return type {@code R}. + * @since 3.4 + */ + R project(EntityProjection descriptor, Bson bson); + + /** + * Mapping function capable of converting values into a desired target type by eg. extracting the actual java type + * from a given {@link BsonValue}. + * + * @param targetType must not be {@literal null}. + * @param dbRefResolver must not be {@literal null}. + * @param + * @param + * @return new typed {@link java.util.function.Function}. + * @throws IllegalArgumentException if {@literal targetType} is {@literal null}. + * @since 2.1 + */ + @SuppressWarnings("unchecked") + @Nullable + default T mapValueToTargetType(S source, Class targetType, DbRefResolver dbRefResolver) { + + Assert.notNull(targetType, "TargetType must not be null"); + Assert.notNull(dbRefResolver, "DbRefResolver must not be null"); + + if (targetType != Object.class && ClassUtils.isAssignable(targetType, source.getClass())) { + return (T) source; + } + + if (source instanceof BsonValue bson) { + + Object value = BsonUtils.toJavaType(bson); + + if (value instanceof Document document) { + + if (document.containsKey("$ref") && document.containsKey("$id")) { + + Object id = document.get("$id"); + String collection = document.getString("$ref"); + + MongoPersistentEntity entity = getMappingContext().getPersistentEntity(targetType); + if (entity != null && entity.hasIdProperty()) { + id = convertId(id, entity.getIdProperty().getFieldType()); + } + + DBRef ref = document.containsKey("$db") ? new DBRef(document.getString("$db"), collection, id) + : new DBRef(collection, id); + + document = dbRefResolver.fetch(ref); + if (document == null) { + return null; + } + } + + return read(targetType, document); + } else { + if (!ClassUtils.isAssignable(targetType, value.getClass()) && getConversionService().canConvert(value.getClass(), targetType)) { + return getConversionService().convert(value, targetType); + } + } + + return (T) value; + } + return getConversionService().convert(source, targetType); + } + + /** + * Converts the given raw id value into either {@link ObjectId} or {@link String}. + * + * @param id can be {@literal null}. + * @param targetType must not be {@literal null}. + * @return {@literal null} if source {@literal id} is already {@literal null}. + * @since 2.2 + */ + @Nullable + default Object convertId(@Nullable Object id, Class targetType) { + + if (id == null || ClassUtils.isAssignableValue(targetType, id)) { + return id; + } + + if (ClassUtils.isAssignable(ObjectId.class, targetType)) { + + if (id instanceof String) { + + if (ObjectId.isValid(id.toString())) { + return new ObjectId(id.toString()); + } + + // avoid ConversionException as convertToMongoType will return String anyways. + return id; + } + } + + try { + return getConversionService().canConvert(id.getClass(), targetType) + ? getConversionService().convert(id, targetType) + : convertToMongoType(id, (TypeInformation) null); + } catch (ConversionException o_O) { + return convertToMongoType(id,(TypeInformation) null); + } + } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java index f11403edb1..f9a67d73a0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,39 @@ */ package org.springframework.data.mongodb.core.convert; +import static org.springframework.data.convert.ConverterBuilder.*; + import java.math.BigDecimal; import java.math.BigInteger; import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Currency; import java.util.List; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import org.bson.BinaryVector; +import org.bson.BsonArray; +import org.bson.BsonDouble; +import org.bson.BsonReader; +import org.bson.BsonTimestamp; +import org.bson.BsonUndefined; +import org.bson.BsonWriter; +import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecRegistries; +import org.bson.types.Binary; import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; + import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.ConditionalConverter; @@ -35,22 +55,24 @@ import org.springframework.core.convert.converter.ConverterFactory; import org.springframework.data.convert.ReadingConverter; import org.springframework.data.convert.WritingConverter; +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoVector; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.script.NamedMongoScript; import org.springframework.util.Assert; import org.springframework.util.NumberUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; +import com.mongodb.MongoClientSettings; /** * Wrapper class to contain useful converters for the usage with Mongo. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ abstract class MongoConverters { @@ -61,50 +83,63 @@ private MongoConverters() {} /** * Returns the converters to be registered. - * + * * @return * @since 1.9 */ - public static Collection getConvertersToRegister() { + static Collection getConvertersToRegister() { - List converters = new ArrayList(); + List converters = new ArrayList<>(); + + converters.add(BigDecimalToDecimal128Converter.INSTANCE); + converters.add(Decimal128ToBigDecimalConverter.INSTANCE); + converters.add(BigIntegerToDecimal128Converter.INSTANCE); - converters.add(BigDecimalToStringConverter.INSTANCE); - converters.add(StringToBigDecimalConverter.INSTANCE); - converters.add(BigIntegerToStringConverter.INSTANCE); - converters.add(StringToBigIntegerConverter.INSTANCE); converters.add(URLToStringConverter.INSTANCE); converters.add(StringToURLConverter.INSTANCE); - converters.add(DBObjectToStringConverter.INSTANCE); + converters.add(DocumentToStringConverter.INSTANCE); converters.add(TermToStringConverter.INSTANCE); - converters.add(NamedMongoScriptToDBObjectConverter.INSTANCE); - converters.add(DBObjectToNamedMongoScriptCoverter.INSTANCE); + converters.add(NamedMongoScriptToDocumentConverter.INSTANCE); + converters.add(DocumentToNamedMongoScriptConverter.INSTANCE); converters.add(CurrencyToStringConverter.INSTANCE); converters.add(StringToCurrencyConverter.INSTANCE); + converters.add(AtomicIntegerToIntegerConverter.INSTANCE); + converters.add(AtomicLongToLongConverter.INSTANCE); + converters.add(LongToAtomicLongConverter.INSTANCE); + converters.add(IntegerToAtomicIntegerConverter.INSTANCE); + converters.add(BinaryToByteArrayConverter.INSTANCE); + converters.add(BsonTimestampToInstantConverter.INSTANCE); converters.add(NumberToNumberConverterFactory.INSTANCE); + converters.add(VectorToBsonArrayConverter.INSTANCE); + converters.add(ListToVectorConverter.INSTANCE); + converters.add(BinaryVectorToMongoVectorConverter.INSTANCE); + + converters.add(reading(BsonUndefined.class, Object.class, it -> null)); + converters.add(reading(String.class, URI.class, URI::create).andWriting(URI::toString)); + return converters; } /** * Simple singleton to convert {@link ObjectId}s to their {@link String} representation. - * + * * @author Oliver Gierke */ - public static enum ObjectIdToStringConverter implements Converter { + enum ObjectIdToStringConverter implements Converter { INSTANCE; public String convert(ObjectId id) { - return id == null ? null : id.toString(); + return id.toString(); } } /** * Simple singleton to convert {@link String}s to their {@link ObjectId} representation. - * + * * @author Oliver Gierke */ - public static enum StringToObjectIdConverter implements Converter { + enum StringToObjectIdConverter implements Converter { INSTANCE; public ObjectId convert(String source) { @@ -114,39 +149,61 @@ public ObjectId convert(String source) { /** * Simple singleton to convert {@link ObjectId}s to their {@link java.math.BigInteger} representation. - * + * * @author Oliver Gierke */ - public static enum ObjectIdToBigIntegerConverter implements Converter { + enum ObjectIdToBigIntegerConverter implements Converter { INSTANCE; public BigInteger convert(ObjectId source) { - return source == null ? null : new BigInteger(source.toString(), 16); + return new BigInteger(source.toString(), 16); } } /** * Simple singleton to convert {@link BigInteger}s to their {@link ObjectId} representation. - * + * * @author Oliver Gierke */ - public static enum BigIntegerToObjectIdConverter implements Converter { + enum BigIntegerToObjectIdConverter implements Converter { INSTANCE; public ObjectId convert(BigInteger source) { - return source == null ? null : new ObjectId(source.toString(16)); + return new ObjectId(source.toString(16)); } } - public static enum BigDecimalToStringConverter implements Converter { + enum BigDecimalToStringConverter implements Converter { INSTANCE; public String convert(BigDecimal source) { - return source == null ? null : source.toString(); + return source.toString(); } } - public static enum StringToBigDecimalConverter implements Converter { + /** + * @since 2.2 + */ + enum BigDecimalToDecimal128Converter implements Converter { + INSTANCE; + + public Decimal128 convert(BigDecimal source) { + return new Decimal128(source); + } + } + + /** + * @since 5.0 + */ + enum BigIntegerToDecimal128Converter implements Converter { + INSTANCE; + + public Decimal128 convert(BigInteger source) { + return new Decimal128(new BigDecimal(source)); + } + } + + enum StringToBigDecimalConverter implements Converter { INSTANCE; public BigDecimal convert(String source) { @@ -154,15 +211,28 @@ public BigDecimal convert(String source) { } } - public static enum BigIntegerToStringConverter implements Converter { + /** + * @since 2.2 + */ + enum Decimal128ToBigDecimalConverter implements Converter { + INSTANCE; + + public BigDecimal convert(Decimal128 source) { + return source.bigDecimalValue(); + } + } + + @WritingConverter + enum BigIntegerToStringConverter implements Converter { INSTANCE; public String convert(BigInteger source) { - return source == null ? null : source.toString(); + return source.toString(); } } - public static enum StringToBigIntegerConverter implements Converter { + @ReadingConverter + enum StringToBigIntegerConverter implements Converter { INSTANCE; public BigInteger convert(String source) { @@ -170,15 +240,15 @@ public BigInteger convert(String source) { } } - public static enum URLToStringConverter implements Converter { + enum URLToStringConverter implements Converter { INSTANCE; public String convert(URL source) { - return source == null ? null : source.toString(); + return source.toString(); } } - public static enum StringToURLConverter implements Converter { + enum StringToURLConverter implements Converter { INSTANCE; private static final TypeDescriptor SOURCE = TypeDescriptor.valueOf(String.class); @@ -187,7 +257,7 @@ public static enum StringToURLConverter implements Converter { public URL convert(String source) { try { - return source == null ? null : new URL(source); + return new URL(source); } catch (MalformedURLException e) { throw new ConversionFailedException(SOURCE, TARGET, source, e); } @@ -195,13 +265,31 @@ public URL convert(String source) { } @ReadingConverter - public static enum DBObjectToStringConverter implements Converter { + enum DocumentToStringConverter implements Converter { INSTANCE; + private final Codec codec = CodecRegistries.fromRegistries(CodecRegistries.fromCodecs(new Codec() { + + @Override + public void encode(BsonWriter writer, UUID value, EncoderContext encoderContext) { + writer.writeString(value.toString()); + } + + @Override + public Class getEncoderClass() { + return UUID.class; + } + + @Override + public UUID decode(BsonReader reader, DecoderContext decoderContext) { + throw new IllegalStateException("decode not supported"); + } + }), MongoClientSettings.getDefaultCodecRegistry()).get(Document.class); + @Override - public String convert(DBObject source) { - return source == null ? null : source.toString(); + public String convert(Document source) { + return source.toJson(codec); } } @@ -210,13 +298,13 @@ public String convert(DBObject source) { * @since 1.6 */ @WritingConverter - public static enum TermToStringConverter implements Converter { + enum TermToStringConverter implements Converter { INSTANCE; @Override public String convert(Term source) { - return source == null ? null : source.getFormatted(); + return source.getFormatted(); } } @@ -224,18 +312,18 @@ public String convert(Term source) { * @author Christoph Strobl * @since 1.7 */ - public static enum DBObjectToNamedMongoScriptCoverter implements Converter { + enum DocumentToNamedMongoScriptConverter implements Converter { INSTANCE; @Override - public NamedMongoScript convert(DBObject source) { + public NamedMongoScript convert(Document source) { - if (source == null) { + if (source.isEmpty()) { return null; } - String id = source.get("_id").toString(); + String id = source.get(FieldName.ID.name()).toString(); Object rawValue = source.get("value"); return new NamedMongoScript(id, ((Code) rawValue).getCode()); @@ -246,62 +334,50 @@ public NamedMongoScript convert(DBObject source) { * @author Christoph Strobl * @since 1.7 */ - public static enum NamedMongoScriptToDBObjectConverter implements Converter { + enum NamedMongoScriptToDocumentConverter implements Converter { INSTANCE; @Override - public DBObject convert(NamedMongoScript source) { + public Document convert(NamedMongoScript source) { - if (source == null) { - return new BasicDBObject(); - } + Document document = new Document(); - BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); + document.put(FieldName.ID.name(), source.getName()); + document.put("value", new Code(source.getCode())); - builder.append("_id", source.getName()); - builder.append("value", new Code(source.getCode())); - - return builder.get(); + return document; } } /** - * {@link Converter} implementation converting {@link Currency} into its ISO 4217 {@link String} representation. - * + * {@link Converter} implementation converting {@link Currency} into its ISO 4217-2018 {@link String} representation. + * * @author Christoph Strobl * @since 1.9 */ @WritingConverter - public static enum CurrencyToStringConverter implements Converter { + enum CurrencyToStringConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(Currency source) { - return source == null ? null : source.getCurrencyCode(); + return source.getCurrencyCode(); } } /** - * {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}. - * + * {@link Converter} implementation converting ISO 4217-2018 {@link String} into {@link Currency}. + * * @author Christoph Strobl * @since 1.9 */ @ReadingConverter - public static enum StringToCurrencyConverter implements Converter { + enum StringToCurrencyConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Currency convert(String source) { return StringUtils.hasText(source) ? Currency.getInstance(source) : null; @@ -312,28 +388,20 @@ public Currency convert(String source) { * {@link ConverterFactory} implementation using {@link NumberUtils} for number conversion and parsing. Additionally * deals with {@link AtomicInteger} and {@link AtomicLong} by calling {@code get()} before performing the actual * conversion. - * + * * @author Christoph Strobl * @since 1.9 */ @WritingConverter - public static enum NumberToNumberConverterFactory implements ConverterFactory,ConditionalConverter { + enum NumberToNumberConverterFactory implements ConverterFactory, ConditionalConverter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.ConverterFactory#getConverter(java.lang.Class) - */ @Override public Converter getConverter(Class targetType) { return new NumberToNumberConverter(targetType); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.ConditionalConverter#matches(org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor) - */ @Override public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { return !sourceType.equals(targetType); @@ -345,33 +413,215 @@ private final static class NumberToNumberConverter implements /** * Creates a new {@link NumberToNumberConverter} for the given target type. - * + * * @param targetType must not be {@literal null}. */ public NumberToNumberConverter(Class targetType) { - - Assert.notNull(targetType, "Target type must not be null!"); - + + Assert.notNull(targetType, "Target type must not be null"); + this.targetType = targetType; } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public T convert(Number source) { - if (source instanceof AtomicInteger) { - return NumberUtils.convertNumberToTargetClass(((AtomicInteger) source).get(), this.targetType); + if (source instanceof AtomicInteger atomicInteger) { + return NumberUtils.convertNumberToTargetClass(atomicInteger.get(), this.targetType); } - if (source instanceof AtomicLong) { - return NumberUtils.convertNumberToTargetClass(((AtomicLong) source).get(), this.targetType); + if (source instanceof AtomicLong atomicLong) { + return NumberUtils.convertNumberToTargetClass(atomicLong.get(), this.targetType); } return NumberUtils.convertNumberToTargetClass(source, this.targetType); } } } + + @WritingConverter + enum VectorToBsonArrayConverter implements Converter { + + INSTANCE; + + @Override + public Object convert(Vector source) { + + if (source instanceof MongoVector mv) { + return mv.getSource(); + } + + double[] doubleArray = source.toDoubleArray(); + + BsonArray array = new BsonArray(doubleArray.length); + + for (double v : doubleArray) { + array.add(new BsonDouble(v)); + } + + return array; + } + } + + @ReadingConverter + enum ListToVectorConverter implements Converter, Vector> { + + INSTANCE; + + @Override + public Vector convert(List source) { + return Vector.of(source); + } + } + + @ReadingConverter + enum BinaryVectorToMongoVectorConverter implements Converter { + + INSTANCE; + + @Override + public Vector convert(BinaryVector source) { + return MongoVector.of(source); + } + } + + @WritingConverter + enum ByteArrayConverterFactory implements ConverterFactory, ConditionalConverter { + + INSTANCE; + + @Override + public Converter getConverter(Class targetType) { + return new ByteArrayConverter<>(targetType); + } + + @Override + public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { + return targetType.getType() != Object.class && !sourceType.equals(targetType); + } + + private final static class ByteArrayConverter implements Converter { + + private final Class targetType; + + /** + * Creates a new {@link ByteArrayConverter} for the given target type. + * + * @param targetType must not be {@literal null}. + */ + public ByteArrayConverter(Class targetType) { + + Assert.notNull(targetType, "Target type must not be null"); + + this.targetType = targetType; + } + + @Override + public T convert(byte[] source) { + + if (this.targetType == BinaryVector.class) { + return (T) BinaryVector.int8Vector(source); + } + return (T) source; + } + } + } + + /** + * {@link ConverterFactory} implementation converting {@link AtomicLong} into {@link Long}. + * + * @author Christoph Strobl + * @since 1.10 + */ + @WritingConverter + enum AtomicLongToLongConverter implements Converter { + INSTANCE; + + @Override + public Long convert(AtomicLong source) { + return NumberUtils.convertNumberToTargetClass(source, Long.class); + } + } + + /** + * {@link ConverterFactory} implementation converting {@link AtomicInteger} into {@link Integer}. + * + * @author Christoph Strobl + * @since 1.10 + */ + @WritingConverter + enum AtomicIntegerToIntegerConverter implements Converter { + INSTANCE; + + @Override + public Integer convert(AtomicInteger source) { + return NumberUtils.convertNumberToTargetClass(source, Integer.class); + } + } + + /** + * {@link ConverterFactory} implementation converting {@link Long} into {@link AtomicLong}. + * + * @author Christoph Strobl + * @since 1.10 + */ + @ReadingConverter + enum LongToAtomicLongConverter implements Converter { + INSTANCE; + + @Override + public AtomicLong convert(Long source) { + return new AtomicLong(source); + } + } + + /** + * {@link ConverterFactory} implementation converting {@link Integer} into {@link AtomicInteger}. + * + * @author Christoph Strobl + * @since 1.10 + */ + @ReadingConverter + enum IntegerToAtomicIntegerConverter implements Converter { + INSTANCE; + + @Override + public AtomicInteger convert(Integer source) { + return new AtomicInteger(source); + } + } + + /** + * {@link Converter} implementation converting {@link Binary} into {@code byte[]}. + * + * @author Christoph Strobl + * @since 2.0.1 + */ + @ReadingConverter + enum BinaryToByteArrayConverter implements Converter { + + INSTANCE; + + @Override + public byte[] convert(Binary source) { + return source.getData(); + } + } + + /** + * {@link Converter} implementation converting {@link BsonTimestamp} into {@link Instant}. + * + * @author Christoph Strobl + * @since 2.1.2 + */ + @ReadingConverter + enum BsonTimestampToInstantConverter implements Converter { + + INSTANCE; + + @Override + public Instant convert(BsonTimestamp source) { + return Instant.ofEpochSecond(source.getTime(), 0); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java new file mode 100644 index 0000000000..050c3bd27d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java @@ -0,0 +1,461 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.function.Consumer; + +import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.convert.converter.ConverterFactory; +import org.springframework.core.convert.converter.GenericConverter; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.PropertyValueConversions; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.convert.PropertyValueConverterRegistrar; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.SimplePropertyValueConversions; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Value object to capture custom conversion. {@link MongoCustomConversions} also act as factory for + * {@link org.springframework.data.mapping.model.SimpleTypeHolder} + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + * @see org.springframework.data.convert.CustomConversions + * @see org.springframework.data.mapping.model.SimpleTypeHolder + * @see MongoSimpleTypes + */ +public class MongoCustomConversions extends org.springframework.data.convert.CustomConversions { + + private static final StoreConversions STORE_CONVERSIONS; + private static final List STORE_CONVERTERS; + + static { + + List converters = new ArrayList<>(); + + converters.add(CustomToStringConverter.INSTANCE); + converters.addAll(MongoConverters.getConvertersToRegister()); + converters.addAll(GeoConverters.getConvertersToRegister()); + + STORE_CONVERTERS = Collections.unmodifiableList(converters); + STORE_CONVERSIONS = StoreConversions.of(MongoSimpleTypes.HOLDER, STORE_CONVERTERS); + } + + /** + * Creates an empty {@link MongoCustomConversions} object. + */ + MongoCustomConversions() { + this(Collections.emptyList()); + } + + /** + * Create a new {@link MongoCustomConversions} instance registering the given converters. + * + * @param converters must not be {@literal null}. + */ + public MongoCustomConversions(List converters) { + this(MongoConverterConfigurationAdapter.from(converters)); + } + + /** + * Create a new {@link MongoCustomConversions} given {@link MongoConverterConfigurationAdapter}. + * + * @param conversionConfiguration must not be {@literal null}. + * @since 2.3 + */ + protected MongoCustomConversions(MongoConverterConfigurationAdapter conversionConfiguration) { + super(conversionConfiguration.createConverterConfiguration()); + } + + /** + * Functional style {@link org.springframework.data.convert.CustomConversions} creation giving users a convenient way + * of configuring store specific capabilities by providing deferred hooks to what will be configured when creating the + * {@link org.springframework.data.convert.CustomConversions#CustomConversions(ConverterConfiguration) instance}. + * + * @param configurer must not be {@literal null}. + * @since 2.3 + */ + public static MongoCustomConversions create(Consumer configurer) { + + MongoConverterConfigurationAdapter adapter = new MongoConverterConfigurationAdapter(); + configurer.accept(adapter); + + return new MongoCustomConversions(adapter); + } + + @WritingConverter + private enum CustomToStringConverter implements GenericConverter { + + INSTANCE; + + public Set getConvertibleTypes() { + + ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class); + ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class); + + return new HashSet<>(Arrays.asList(localeToString, booleanToString)); + } + + public Object convert(@Nullable Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { + return source != null ? source.toString() : null; + } + } + + /** + * {@link MongoConverterConfigurationAdapter} encapsulates creation of + * {@link org.springframework.data.convert.CustomConversions.ConverterConfiguration} with MongoDB specifics. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MongoConverterConfigurationAdapter { + + /** + * List of {@literal java.time} types having different representation when rendered via the native + * {@link org.bson.codecs.Codec} than the Spring Data {@link Converter}. + */ + private static final Set> JAVA_DRIVER_TIME_SIMPLE_TYPES = Set.of(LocalDate.class, LocalTime.class, LocalDateTime.class); + + private boolean useNativeDriverJavaTimeCodecs = false; + private BigDecimalRepresentation bigDecimals = BigDecimalRepresentation.STRING; + private final List customConverters = new ArrayList<>(); + + private final PropertyValueConversions internalValueConversion = PropertyValueConversions.simple(it -> {}); + private PropertyValueConversions propertyValueConversions = internalValueConversion; + + /** + * Create a {@link MongoConverterConfigurationAdapter} using the provided {@code converters} and our own codecs for + * JSR-310 types. + * + * @param converters must not be {@literal null}. + * @return + */ + public static MongoConverterConfigurationAdapter from(List converters) { + + Assert.notNull(converters, "Converters must not be null"); + + MongoConverterConfigurationAdapter converterConfigurationAdapter = new MongoConverterConfigurationAdapter(); + converterConfigurationAdapter.useSpringDataJavaTimeCodecs(); + converterConfigurationAdapter.registerConverters(converters); + + return converterConfigurationAdapter; + } + + /** + * Add a custom {@link Converter} implementation. + * + * @param converter must not be {@literal null}. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverter(Converter converter) { + + Assert.notNull(converter, "Converter must not be null"); + customConverters.add(converter); + return this; + } + + /** + * Add {@link Converter converters}, {@link ConverterFactory factories}, {@link ConverterBuilder.ConverterAware + * converter-aware objects}, and {@link GenericConverter generic converters}. + * + * @param converters must not be {@literal null} nor contain {@literal null} values. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverters(Collection converters) { + + Assert.notNull(converters, "Converters must not be null"); + Assert.noNullElements(converters, "Converters must not be null nor contain null values"); + + customConverters.addAll(converters); + return this; + } + + /** + * Add a custom {@link ConverterFactory} implementation. + * + * @param converterFactory must not be {@literal null}. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverterFactory(ConverterFactory converterFactory) { + + Assert.notNull(converterFactory, "ConverterFactory must not be null"); + customConverters.add(converterFactory); + return this; + } + + /** + * Add a custom/default {@link PropertyValueConverterFactory} implementation used to serve + * {@link PropertyValueConverter}. + * + * @param converterFactory must not be {@literal null}. + * @return this. + * @since 3.4 + */ + public MongoConverterConfigurationAdapter registerPropertyValueConverterFactory( + PropertyValueConverterFactory converterFactory) { + + Assert.state(valueConversions() instanceof SimplePropertyValueConversions, + "Configured PropertyValueConversions does not allow setting custom ConverterRegistry"); + + ((SimplePropertyValueConversions) valueConversions()).setConverterFactory(converterFactory); + return this; + } + + /** + * Gateway to register property specific converters. + * + * @param configurationAdapter must not be {@literal null}. + * @return this. + * @since 3.4 + */ + public MongoConverterConfigurationAdapter configurePropertyConversions( + Consumer> configurationAdapter) { + + Assert.state(valueConversions() instanceof SimplePropertyValueConversions, + "Configured PropertyValueConversions does not allow setting custom ConverterRegistry"); + + PropertyValueConverterRegistrar propertyValueConverterRegistrar = new PropertyValueConverterRegistrar(); + configurationAdapter.accept(propertyValueConverterRegistrar); + + ((SimplePropertyValueConversions) valueConversions()) + .setValueConverterRegistry(propertyValueConverterRegistrar.buildRegistry()); + return this; + } + + /** + * Set whether to or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for + * {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime} + * and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}. + * + * @param useNativeDriverJavaTimeCodecs + * @return this. + */ + public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs(boolean useNativeDriverJavaTimeCodecs) { + + this.useNativeDriverJavaTimeCodecs = useNativeDriverJavaTimeCodecs; + return this; + } + + /** + * Use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for + * {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime} + * and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}. + * + * @return this. + * @see #useNativeDriverJavaTimeCodecs(boolean) + */ + public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs() { + return useNativeDriverJavaTimeCodecs(true); + } + + /** + * Use SpringData {@link Converter Jsr310 converters} for + * {@link org.springframework.data.convert.Jsr310Converters.LocalDateToDateConverter LocalDate}, + * {@link org.springframework.data.convert.Jsr310Converters.LocalTimeToDateConverter LocalTime} and + * {@link org.springframework.data.convert.Jsr310Converters.LocalDateTimeToDateConverter LocalDateTime} using the + * {@link ZoneId#systemDefault()}. + * + * @return this. + * @see #useNativeDriverJavaTimeCodecs(boolean) + */ + public MongoConverterConfigurationAdapter useSpringDataJavaTimeCodecs() { + return useNativeDriverJavaTimeCodecs(false); + } + + /** + * Configures the representation to for {@link java.math.BigDecimal} and {@link java.math.BigInteger} values in + * MongoDB. Defaults to {@link BigDecimalRepresentation#STRING}. + * + * @param representation the representation to use. + * @return this. + * @since 4.5 + */ + public MongoConverterConfigurationAdapter bigDecimal(BigDecimalRepresentation representation) { + + Assert.notNull(representation, "BigDecimalDataType must not be null"); + this.bigDecimals = representation; + return this; + } + /** + * Optionally set the {@link PropertyValueConversions} to be applied during mapping. + *

                    + * Use this method if {@link #configurePropertyConversions(Consumer)} and + * {@link #registerPropertyValueConverterFactory(PropertyValueConverterFactory)} are not sufficient. + * + * @param valueConversions must not be {@literal null}. + * @return this. + * @since 3.4 + * @deprecated since 4.2. Use {@link #withPropertyValueConversions(PropertyValueConversions)} instead. + */ + @Deprecated(since = "4.2.0") + public MongoConverterConfigurationAdapter setPropertyValueConversions(PropertyValueConversions valueConversions) { + return withPropertyValueConversions(valueConversions); + } + + /** + * Optionally set the {@link PropertyValueConversions} to be applied during mapping. + *

                    + * Use this method if {@link #configurePropertyConversions(Consumer)} and + * {@link #registerPropertyValueConverterFactory(PropertyValueConverterFactory)} are not sufficient. + * + * @param valueConversions must not be {@literal null}. + * @return this. + * @since 4.2 + */ + public MongoConverterConfigurationAdapter withPropertyValueConversions(PropertyValueConversions valueConversions) { + + Assert.notNull(valueConversions, "PropertyValueConversions must not be null"); + this.propertyValueConversions = valueConversions; + return this; + } + + PropertyValueConversions valueConversions() { + + if (this.propertyValueConversions == null) { + this.propertyValueConversions = internalValueConversion; + } + + return this.propertyValueConversions; + } + + ConverterConfiguration createConverterConfiguration() { + + if (hasDefaultPropertyValueConversions() + && propertyValueConversions instanceof SimplePropertyValueConversions svc) { + svc.init(); + } + + List converters = new ArrayList<>(STORE_CONVERTERS.size() + 7); + + if (bigDecimals == BigDecimalRepresentation.STRING) { + + converters.add(BigDecimalToStringConverter.INSTANCE); + converters.add(StringToBigDecimalConverter.INSTANCE); + converters.add(BigIntegerToStringConverter.INSTANCE); + converters.add(StringToBigIntegerConverter.INSTANCE); + } + + if (!useNativeDriverJavaTimeCodecs) { + + converters.addAll(customConverters); + return new ConverterConfiguration(STORE_CONVERSIONS, converters, convertiblePair -> true, + this.propertyValueConversions); + } + + /* + * We need to have those converters using UTC as the default ones would go on with the systemDefault. + */ + converters.add(DateToUtcLocalDateConverter.INSTANCE); + converters.add(DateToUtcLocalTimeConverter.INSTANCE); + converters.add(DateToUtcLocalDateTimeConverter.INSTANCE); + converters.addAll(STORE_CONVERTERS); + + StoreConversions storeConversions = StoreConversions + .of(new SimpleTypeHolder(JAVA_DRIVER_TIME_SIMPLE_TYPES, MongoSimpleTypes.HOLDER), converters); + + return new ConverterConfiguration(storeConversions, this.customConverters, convertiblePair -> { + + // Avoid default registrations + + return !JAVA_DRIVER_TIME_SIMPLE_TYPES.contains(convertiblePair.getSourceType()) + || !Date.class.isAssignableFrom(convertiblePair.getTargetType()); + }, this.propertyValueConversions); + } + + @ReadingConverter + private enum DateToUtcLocalDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public LocalDateTime convert(Date source) { + return LocalDateTime.ofInstant(Instant.ofEpochMilli(source.getTime()), ZoneId.of("UTC")); + } + } + + @ReadingConverter + private enum DateToUtcLocalTimeConverter implements Converter { + INSTANCE; + + @Override + public LocalTime convert(Date source) { + return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalTime(); + } + } + + @ReadingConverter + private enum DateToUtcLocalDateConverter implements Converter { + INSTANCE; + + @Override + public LocalDate convert(Date source) { + return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalDate(); + } + } + + private boolean hasDefaultPropertyValueConversions() { + return propertyValueConversions == internalValueConversion; + } + + } + + /** + * Strategy to represent {@link java.math.BigDecimal} and {@link java.math.BigInteger} values in MongoDB. + * + * @since 4.5 + */ + public enum BigDecimalRepresentation { + + /** + * Store values as {@link Number#toString() String}. Using strings retains precision but does not support range + * queries. + */ + STRING, + + /** + * Store numbers using {@link org.bson.types.Decimal128}. Requires MongoDB Server 3.4 or later. + */ + DECIMAL128 + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java new file mode 100644 index 0000000000..05baa88c57 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java @@ -0,0 +1,80 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.MongoCollection; + +/** + * {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents} + * for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { + + private static final Log LOGGER = LogFactory.getLog(MongoDatabaseFactoryReferenceLoader.class); + + private final MongoDatabaseFactory mongoDbFactory; + + /** + * @param mongoDbFactory must not be {@literal null}. + */ + public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) { + + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null"); + + this.mongoDbFactory = mongoDbFactory; + } + + @Override + public Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { + + MongoCollection collection = getCollection(context); + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Bulk fetching %s from %s.%s", referenceQuery, + StringUtils.hasText(context.getDatabase()) ? context.getDatabase() + : collection.getNamespace().getDatabaseName(), + context.getCollection())); + } + + return referenceQuery.apply(collection); + } + + /** + * Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying + * {@link MongoDatabaseFactory}. + * + * @param context must not be {@literal null}. + * @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}. + */ + protected MongoCollection getCollection(ReferenceCollection context) { + + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), + Document.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java index 1844390716..0316251dc1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,246 +17,322 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.Set; import java.util.Stack; import java.util.regex.Pattern; +import org.bson.Document; import org.springframework.data.domain.Example; import org.springframework.data.domain.ExampleMatcher.NullHandler; import org.springframework.data.domain.ExampleMatcher.PropertyValueTransformer; import org.springframework.data.domain.ExampleMatcher.StringMatcher; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.MongoRegexCreator; +import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.data.repository.core.support.ExampleMatcherAccessor; -import org.springframework.data.repository.query.parser.Part.Type; +import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; +import org.springframework.data.mongodb.util.DotPath; +import org.springframework.data.support.ExampleMatcherAccessor; import org.springframework.data.util.TypeInformation; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** + * Mapper from {@link Example} to a query {@link Document}. + * * @author Christoph Strobl * @author Mark Paluch + * @author Jens Schauder * @since 1.8 + * @see Example + * @see org.springframework.data.domain.ExampleMatcher + * @see UntypedExampleMatcher */ public class MongoExampleMapper { private final MappingContext, MongoPersistentProperty> mappingContext; private final MongoConverter converter; - private final Map stringMatcherPartMapping = new HashMap(); + /** + * Create a new {@link MongoTypeMapper} given {@link MongoConverter}. + * + * @param converter must not be {@literal null}. + */ public MongoExampleMapper(MongoConverter converter) { this.converter = converter; this.mappingContext = converter.getMappingContext(); - - stringMatcherPartMapping.put(StringMatcher.EXACT, Type.SIMPLE_PROPERTY); - stringMatcherPartMapping.put(StringMatcher.CONTAINING, Type.CONTAINING); - stringMatcherPartMapping.put(StringMatcher.STARTING, Type.STARTING_WITH); - stringMatcherPartMapping.put(StringMatcher.ENDING, Type.ENDING_WITH); - stringMatcherPartMapping.put(StringMatcher.REGEX, Type.REGEX); } /** - * Returns the given {@link Example} as {@link DBObject} holding matching values extracted from + * Returns the given {@link Example} as {@link Document} holding matching values extracted from * {@link Example#getProbe()}. * * @param example must not be {@literal null}. * @return */ - public DBObject getMappedExample(Example example) { + public Document getMappedExample(Example example) { - Assert.notNull(example, "Example must not be null!"); + Assert.notNull(example, "Example must not be null"); - return getMappedExample(example, mappingContext.getPersistentEntity(example.getProbeType())); + return getMappedExample(example, mappingContext.getRequiredPersistentEntity(example.getProbeType())); } /** - * Returns the given {@link Example} as {@link DBObject} holding matching values extracted from + * Returns the given {@link Example} as {@link Document} holding matching values extracted from * {@link Example#getProbe()}. * * @param example must not be {@literal null}. * @param entity must not be {@literal null}. * @return */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - public DBObject getMappedExample(Example example, MongoPersistentEntity entity) { + public Document getMappedExample(Example example, MongoPersistentEntity entity) { - Assert.notNull(example, "Example must not be null!"); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + Assert.notNull(example, "Example must not be null"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); - DBObject reference = (DBObject) converter.convertToMongoType(example.getProbe()); + Document reference = (Document) converter.convertToMongoType(example.getProbe()); - if (entity.hasIdProperty() && entity.getIdentifierAccessor(example.getProbe()).getIdentifier() == null) { - reference.removeField(entity.getIdProperty().getFieldName()); + if (entity.getIdProperty() != null && ClassUtils.isAssignable(entity.getType(), example.getProbeType())) { + + Object identifier = entity.getIdentifierAccessor(example.getProbe()).getIdentifier(); + if (identifier == null) { + reference.remove(entity.getIdProperty().getFieldName()); + } } ExampleMatcherAccessor matcherAccessor = new ExampleMatcherAccessor(example.getMatcher()); applyPropertySpecs("", reference, example.getProbeType(), matcherAccessor); - this.converter.getTypeMapper().writeTypeRestrictions(reference, getTypesToMatch(example)); + Document flattened = ObjectUtils.nullSafeEquals(NullHandler.INCLUDE, matcherAccessor.getNullHandler()) ? reference + : new Document(SerializationUtils.flattenMap(reference)); + Document result = example.getMatcher().isAllMatching() ? flattened : orConcatenate(flattened); - return ObjectUtils.nullSafeEquals(NullHandler.INCLUDE, matcherAccessor.getNullHandler()) ? reference - : new BasicDBObject(SerializationUtils.flattenMap(reference)); + return updateTypeRestrictions(result, example); } - private Set> getTypesToMatch(Example example) { + private void applyPropertySpecs(String path, Document source, Class probeType, + ExampleMatcherAccessor exampleSpecAccessor) { - Set> types = new HashSet>(); + if (source == null) { + return; + } - for (TypeInformation reference : mappingContext.getManagedTypes()) { - if (example.getProbeType().isAssignableFrom(reference.getType())) { - types.add(reference.getType()); + Iterator> iter = source.entrySet().iterator(); + + while (iter.hasNext()) { + + Map.Entry entry = iter.next(); + String propertyPath = DotPath.from(path).append(entry.getKey()).toString(); + String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType); + + if (isEmptyIdProperty(entry)) { + iter.remove(); + continue; } - } - return types; + if (exampleSpecAccessor.isIgnoredPath(propertyPath) || exampleSpecAccessor.isIgnoredPath(mappedPropertyPath)) { + iter.remove(); + continue; + } + + StringMatcher stringMatcher = exampleSpecAccessor.getDefaultStringMatcher(); + Object value = entry.getValue(); + boolean ignoreCase = exampleSpecAccessor.isIgnoreCaseEnabled(); + + if (exampleSpecAccessor.hasPropertySpecifiers()) { + + mappedPropertyPath = exampleSpecAccessor.hasPropertySpecifier(propertyPath) ? propertyPath + : getMappedPropertyPath(propertyPath, probeType); + + stringMatcher = exampleSpecAccessor.getStringMatcherForPath(mappedPropertyPath); + ignoreCase = exampleSpecAccessor.isIgnoreCaseForPath(mappedPropertyPath); + } + + // TODO: should a PropertySpecifier outrule the later on string matching? + if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) { + + PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath); + Optional converted = valueTransformer.apply(Optional.ofNullable(value)); + + if(!converted.isPresent()) { + iter.remove(); + continue; + } + + entry.setValue(converted.get()); + } + + if (entry.getValue() instanceof String) { + applyStringMatcher(entry, stringMatcher, ignoreCase); + } else if (entry.getValue() instanceof Document document) { + applyPropertySpecs(propertyPath, document, probeType, exampleSpecAccessor); + } + } } private String getMappedPropertyPath(String path, Class probeType) { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(probeType); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(probeType); Iterator parts = Arrays.asList(path.split("\\.")).iterator(); - final Stack stack = new Stack(); + final Stack stack = new Stack<>(); - List resultParts = new ArrayList(); + List resultParts = new ArrayList<>(); while (parts.hasNext()) { - final String part = parts.next(); + String part = parts.next(); MongoPersistentProperty prop = entity.getPersistentProperty(part); if (prop == null) { - entity.doWithProperties(new PropertyHandler() { - - @Override - public void doWithPersistentProperty(MongoPersistentProperty property) { - - if (property.getFieldName().equals(part)) { - stack.push(property); - } + entity.doWithProperties((PropertyHandler) property -> { + if (property.getFieldName().equals(part)) { + stack.push(property); } }); if (stack.isEmpty()) { return ""; } + prop = stack.pop(); } resultParts.add(prop.getName()); if (prop.isEntity() && mappingContext.hasPersistentEntityFor(prop.getActualType())) { - entity = mappingContext.getPersistentEntity(prop.getActualType()); + entity = mappingContext.getRequiredPersistentEntity(prop.getActualType()); } else { break; } } return StringUtils.collectionToDelimitedString(resultParts, "."); - } - private void applyPropertySpecs(String path, DBObject source, Class probeType, - ExampleMatcherAccessor exampleSpecAccessor) { - - if (!(source instanceof BasicDBObject)) { - return; - } + private Document updateTypeRestrictions(Document query, Example example) { - Iterator> iter = ((BasicDBObject) source).entrySet().iterator(); + Document result = new Document(); - while (iter.hasNext()) { + if (isTypeRestricting(example)) { - Map.Entry entry = iter.next(); - String propertyPath = StringUtils.hasText(path) ? path + "." + entry.getKey() : entry.getKey(); - String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType); + result.putAll(query); + this.converter.getTypeMapper().writeTypeRestrictions(result, getTypesToMatch(example)); + return result; + } - if (isEmptyIdProperty(entry)) { - iter.remove(); - continue; + for (Map.Entry entry : query.entrySet()) { + if (!this.converter.getTypeMapper().isTypeKey(entry.getKey())) { + result.put(entry.getKey(), entry.getValue()); } + } - if (exampleSpecAccessor.isIgnoredPath(propertyPath) || exampleSpecAccessor.isIgnoredPath(mappedPropertyPath)) { - iter.remove(); - continue; - } + return result; + } - StringMatcher stringMatcher = exampleSpecAccessor.getDefaultStringMatcher(); - Object value = entry.getValue(); - boolean ignoreCase = exampleSpecAccessor.isIgnoreCaseEnabled(); + private boolean isTypeRestricting(Example example) { - if (exampleSpecAccessor.hasPropertySpecifiers()) { + if (example.getMatcher() instanceof UntypedExampleMatcher) { + return false; + } - mappedPropertyPath = exampleSpecAccessor.hasPropertySpecifier(propertyPath) ? propertyPath - : getMappedPropertyPath(propertyPath, probeType); + if (example.getMatcher().getIgnoredPaths().isEmpty()) { + return true; + } - stringMatcher = exampleSpecAccessor.getStringMatcherForPath(mappedPropertyPath); - ignoreCase = exampleSpecAccessor.isIgnoreCaseForPath(mappedPropertyPath); + for (String path : example.getMatcher().getIgnoredPaths()) { + if (this.converter.getTypeMapper().isTypeKey(path)) { + return false; } + } - // TODO: should a PropertySpecifier outrule the later on string matching? - if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) { + return true; + } - PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath); - value = valueTransformer.convert(value); - if (value == null) { - iter.remove(); - continue; - } + private Set> getTypesToMatch(Example example) { - entry.setValue(value); - } + Set> types = new HashSet<>(); - if (entry.getValue() instanceof String) { - applyStringMatcher(entry, stringMatcher, ignoreCase); - } else if (entry.getValue() instanceof BasicDBObject) { - applyPropertySpecs(propertyPath, (BasicDBObject) entry.getValue(), probeType, exampleSpecAccessor); + for (TypeInformation reference : mappingContext.getManagedTypes()) { + if (example.getProbeType().isAssignableFrom(reference.getType())) { + types.add(reference.getType()); } } + + return types; } - private boolean isEmptyIdProperty(Entry entry) { - return entry.getKey().equals("_id") && entry.getValue() == null; + private static boolean isEmptyIdProperty(Entry entry) { + return entry.getKey().equals(FieldName.ID.name()) + && (entry.getValue() == null || entry.getValue().equals(Optional.empty())); } - private void applyStringMatcher(Map.Entry entry, StringMatcher stringMatcher, boolean ignoreCase) { + private static void applyStringMatcher(Map.Entry entry, StringMatcher stringMatcher, + boolean ignoreCase) { - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); - if (ObjectUtils.nullSafeEquals(StringMatcher.DEFAULT, stringMatcher)) { + if (StringMatcher.DEFAULT == stringMatcher) { if (ignoreCase) { - dbo.put("$regex", Pattern.quote((String) entry.getValue())); - entry.setValue(dbo); + document.put("$regex", Pattern.quote((String) entry.getValue())); + entry.setValue(document); } } else { - Type type = stringMatcherPartMapping.get(stringMatcher); - String expression = MongoRegexCreator.INSTANCE.toRegularExpression((String) entry.getValue(), type); - dbo.put("$regex", expression); - entry.setValue(dbo); + String expression = MongoRegexCreator.INSTANCE.toRegularExpression((String) entry.getValue(), + toMatchMode(stringMatcher)); + document.put("$regex", expression); + entry.setValue(document); } if (ignoreCase) { - dbo.put("$options", "i"); + document.put("$options", "i"); + } + } + + private static Document orConcatenate(Document source) { + + List or = new ArrayList<>(source.keySet().size()); + + for (String key : source.keySet()) { + or.add(new Document(key, source.get(key))); } + + return new Document("$or", or); + } + + /** + * Return the {@link MatchMode} for the given {@link StringMatcher}. + * + * @param matcher must not be {@literal null}. + * @return + */ + private static MatchMode toMatchMode(StringMatcher matcher) { + + return switch (matcher) { + case CONTAINING -> MatchMode.CONTAINING; + case STARTING -> MatchMode.STARTING_WITH; + case ENDING -> MatchMode.ENDING_WITH; + case EXACT -> MatchMode.EXACT; + case REGEX -> MatchMode.REGEX; + default -> MatchMode.DEFAULT; + }; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java new file mode 100644 index 0000000000..8d199083e7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java @@ -0,0 +1,160 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * {@link JsonSchemaMapper} implementation using the conversion and mapping infrastructure for mapping fields to the + * provided domain type. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class MongoJsonSchemaMapper implements JsonSchemaMapper { + + private static final String $JSON_SCHEMA = "$jsonSchema"; + private static final String REQUIRED_FIELD = "required"; + private static final String PROPERTIES_FIELD = "properties"; + private static final String ENUM_FIELD = "enum"; + + private final MappingContext, MongoPersistentProperty> mappingContext; + private final MongoConverter converter; + + /** + * Create a new {@link MongoJsonSchemaMapper} facilitating the given {@link MongoConverter}. + * + * @param converter must not be {@literal null}. + */ + public MongoJsonSchemaMapper(MongoConverter converter) { + + Assert.notNull(converter, "Converter must not be null"); + + this.converter = converter; + this.mappingContext = converter.getMappingContext(); + } + + public Document mapSchema(Document jsonSchema, Class type) { + + Assert.notNull(jsonSchema, "Schema must not be null"); + Assert.notNull(type, "Type must not be null Please consider Object.class"); + Assert.isTrue(jsonSchema.containsKey($JSON_SCHEMA), + () -> String.format("Document does not contain $jsonSchema field; Found: %s", jsonSchema)); + + if (Object.class.equals(type)) { + return new Document(jsonSchema); + } + + return new Document($JSON_SCHEMA, + mapSchemaObject(mappingContext.getPersistentEntity(type), jsonSchema.get($JSON_SCHEMA, Document.class))); + } + + @SuppressWarnings("unchecked") + private Document mapSchemaObject(@Nullable PersistentEntity entity, Document source) { + + Document sink = new Document(source); + + if (source.containsKey(REQUIRED_FIELD)) { + sink.replace(REQUIRED_FIELD, mapRequiredProperties(entity, source.get(REQUIRED_FIELD, Collection.class))); + } + + if (source.containsKey(PROPERTIES_FIELD)) { + sink.replace(PROPERTIES_FIELD, mapProperties(entity, source.get(PROPERTIES_FIELD, Document.class))); + } + + mapEnumValuesIfNecessary(sink); + + return sink; + } + + private Document mapProperties(@Nullable PersistentEntity entity, Document source) { + + Document sink = new Document(); + for (String fieldName : source.keySet()) { + + String mappedFieldName = getFieldName(entity, fieldName); + Document mappedProperty = mapProperty(entity, fieldName, source.get(fieldName, Document.class)); + + sink.append(mappedFieldName, mappedProperty); + } + return sink; + } + + private List mapRequiredProperties(@Nullable PersistentEntity entity, + Collection sourceFields) { + + return sourceFields.stream() /// + .map(fieldName -> getFieldName(entity, fieldName)) // + .collect(Collectors.toList()); + } + + private Document mapProperty(@Nullable PersistentEntity entity, String sourceFieldName, + Document source) { + + Document sink = new Document(source); + + if (entity != null && sink.containsKey(Type.objectType().representation())) { + + MongoPersistentProperty property = entity.getPersistentProperty(sourceFieldName); + if (property != null && property.isEntity()) { + sink = mapSchemaObject(mappingContext.getPersistentEntity(property.getActualType()), source); + } + } + + return mapEnumValuesIfNecessary(sink); + } + + private Document mapEnumValuesIfNecessary(Document source) { + + Document sink = new Document(source); + if (source.containsKey(ENUM_FIELD)) { + sink.replace(ENUM_FIELD, mapEnumValues(source.get(ENUM_FIELD, Iterable.class))); + } + return sink; + } + + private List mapEnumValues(Iterable values) { + + List converted = new ArrayList<>(); + for (Object val : values) { + converted.add(converter.convertToMongoType(val)); + } + return converted; + } + + private String getFieldName(@Nullable PersistentEntity entity, String sourceField) { + + if (entity == null) { + return sourceField; + } + + MongoPersistentProperty property = entity.getPersistentProperty(sourceField); + return property != null ? property.getFieldName() : sourceField; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java index a2cebbfa85..d9d49fb19a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,31 +17,44 @@ import java.util.Set; +import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.data.convert.TypeMapper; -import com.mongodb.DBObject; - /** - * Mongo-specific {@link TypeMapper} exposing that {@link DBObject}s might contain a type key. - * + * Mongo-specific {@link TypeMapper} exposing that {@link Document}s might contain a type key. + * * @author Oliver Gierke + * @author Christoph Strobl + * @since 1.0 */ -public interface MongoTypeMapper extends TypeMapper { +public interface MongoTypeMapper extends TypeMapper { /** * Returns whether the given key is the type key. - * + * * @return */ boolean isTypeKey(String key); /** - * Writes type restrictions to the given {@link DBObject}. This usually results in an {@code $in}-clause to be + * Writes type restrictions to the given {@link Document}. This usually results in an {@code $in}-clause to be * generated that restricts the type-key (e.g. {@code _class}) to be in the set of type aliases for the given * {@code restrictedTypes}. - * + * * @param result must not be {@literal null} * @param restrictedTypes must not be {@literal null} */ - void writeTypeRestrictions(DBObject result, Set> restrictedTypes); + void writeTypeRestrictions(Document result, Set> restrictedTypes); + + /** + * Compute the target type for a given source considering {@link org.springframework.data.convert.CustomConversions}. + * + * @param source the source type. + * @return never {@literal null}. + * @since 2.2 + */ + default Class getWriteTargetTypeFor(Class source) { + return source; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java new file mode 100644 index 0000000000..73c05b949b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java @@ -0,0 +1,26 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.data.convert.PropertyValueConverter; + +/** + * MongoDB-specific {@link PropertyValueConverter} extension. + * + * @author Christoph Strobl + * @since 3.4 + */ +public interface MongoValueConverter extends PropertyValueConverter {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java index 6d25f84a20..867a6213d2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java @@ -1,63 +1,88 @@ -/* - * Copyright 2010-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import org.springframework.data.convert.EntityWriter; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.util.TypeInformation; - -import com.mongodb.DBObject; -import com.mongodb.DBRef; - -/** - * A MongoWriter is responsible for converting an object of type T to the native MongoDB representation DBObject. - * - * @param the type of the object to convert to a DBObject - * @author Mark Pollack - * @author Thomas Risberg - * @author Oliver Gierke - */ -public interface MongoWriter extends EntityWriter { - - /** - * Converts the given object into one Mongo will be able to store natively. If the given object can already be stored - * as is, no conversion will happen. - * - * @param obj can be {@literal null}. - * @return - */ - Object convertToMongoType(Object obj); - - /** - * Converts the given object into one Mongo will be able to store natively but retains the type information in case - * the given {@link TypeInformation} differs from the given object type. - * - * @param obj can be {@literal null}. - * @param typeInformation can be {@literal null}. - * @return - */ - Object convertToMongoType(Object obj, TypeInformation typeInformation); - - /** - * Creates a {@link DBRef} to refer to the given object. - * - * @param object the object to create a {@link DBRef} to link to. The object's type has to carry an id attribute. - * @param referingProperty the client-side property referring to the object which might carry additional metadata for - * the {@link DBRef} object to create. Can be {@literal null}. - * @return will never be {@literal null}. - */ - DBRef toDBRef(Object object, MongoPersistentProperty referingProperty); -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.bson.conversions.Bson; +import org.springframework.data.convert.EntityWriter; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +import com.mongodb.DBRef; + +/** + * A MongoWriter is responsible for converting an object of type T to the native MongoDB representation Document. + * + * @param the type of the object to convert to a Document + * @author Mark Pollack + * @author Thomas Risberg + * @author Oliver Gierke + * @author Christoph Strobl + */ +public interface MongoWriter extends EntityWriter { + + /** + * Converts the given object into one Mongo will be able to store natively. If the given object can already be stored + * as is, no conversion will happen. + * + * @param obj can be {@literal null}. + * @return + */ + @Nullable + default Object convertToMongoType(@Nullable Object obj) { + return convertToMongoType(obj, (TypeInformation) null); + } + + /** + * Converts the given object into one Mongo will be able to store natively but retains the type information in case + * the given {@link TypeInformation} differs from the given object type. + * + * @param obj can be {@literal null}. + * @param typeInformation can be {@literal null}. + * @return + */ + @Nullable + Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation); + + default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { + return convertToMongoType(obj, entity.getTypeInformation()); + } + + /** + * Creates a {@link DBRef} to refer to the given object. + * + * @param object the object to create a {@link DBRef} to link to. The object's type has to carry an id attribute. + * @param referringProperty the client-side property referring to the object which might carry additional metadata for + * the {@link DBRef} object to create. Can be {@literal null}. + * @return will never be {@literal null}. + */ + DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringProperty); + + /** + * Creates a the {@link DocumentPointer} representing the link to another entity. + * + * @param source the object to create a document link to. + * @param referringProperty the client-side property referring to the object which might carry additional metadata for + * the {@link DBRef} object to create. Can be {@literal null}. + * @return will never be {@literal null}. + * @since 3.3 + */ + default DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + return () -> toDBRef(source, referringProperty); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java new file mode 100644 index 0000000000..265257af5c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -0,0 +1,68 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; + +import com.mongodb.DBRef; + +/** + * No-Operation {@link org.springframework.data.mongodb.core.mapping.DBRef} resolver throwing + * {@link UnsupportedOperationException} when attempting to resolve database references. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ +public enum NoOpDbRefResolver implements DbRefResolver { + + INSTANCE; + + @Override + @Nullable + public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, + DbRefProxyHandler proxyHandler) { + + return handle(); + } + + @Override + @Nullable + public Document fetch(DBRef dbRef) { + return handle(); + } + + @Override + public List bulkFetch(List dbRefs) { + return handle(); + } + + private T handle() throws UnsupportedOperationException { + throw new UnsupportedOperationException("DBRef resolution is not supported"); + } + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java index 8c03da5b82..5fefd472c4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,50 +16,61 @@ package org.springframework.data.mongodb.core.convert; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.DBObject; - /** * A path of objects nested into each other. The type allows access to all parent objects currently in creation even * when resolving more nested objects. This allows to avoid re-resolving object instances that are logically equivalent * to already resolved ones. *

                    - * An immutable ordered set of target objects for {@link DBObject} to {@link Object} conversions. Object paths can be - * constructed by the {@link #toObjectPath(Object)} method and extended via {@link #push(Object)}. - * + * An immutable ordered set of target objects for {@link org.bson.Document} to {@link Object} conversions. Object paths + * can be extended via {@link #push(Object, MongoPersistentEntity, Object)}. + * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl * @since 1.6 */ -class ObjectPath { +public class ObjectPath { - public static final ObjectPath ROOT = new ObjectPath(); + static final ObjectPath ROOT = new ObjectPath(); - private final List items; + private final @Nullable ObjectPath parent; + private final @Nullable Object object; + private final @Nullable Object idValue; + private final Lazy collection; private ObjectPath() { - this.items = Collections.emptyList(); + + this.parent = null; + this.object = null; + this.idValue = null; + this.collection = Lazy.empty(); } /** - * Creates a new {@link ObjectPath} from the given parent {@link ObjectPath} by adding the provided - * {@link ObjectPathItem} to it. - * - * @param parent can be {@literal null}. - * @param item + * Creates a new {@link ObjectPath} from the given parent {@link ObjectPath} and adding the provided path values. + * + * @param parent must not be {@literal null}. + * @param collection + * @param idValue + * @param collection */ - private ObjectPath(ObjectPath parent, ObjectPath.ObjectPathItem item) { + private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, Lazy collection) { - List items = new ArrayList(parent.items); - items.add(item); - - this.items = Collections.unmodifiableList(items); + this.parent = parent; + this.object = object; + this.idValue = idValue; + this.collection = collection; } /** @@ -68,44 +79,44 @@ private ObjectPath(ObjectPath parent, ObjectPath.ObjectPathItem item) { * @param object must not be {@literal null}. * @param entity must not be {@literal null}. * @param id must not be {@literal null}. - * @return + * @return new instance of {@link ObjectPath}. */ - public ObjectPath push(Object object, MongoPersistentEntity entity, Object id) { + ObjectPath push(Object object, MongoPersistentEntity entity, @Nullable Object id) { - Assert.notNull(object, "Object must not be null!"); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + Assert.notNull(object, "Object must not be null"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); - ObjectPathItem item = new ObjectPathItem(object, id, entity.getCollection()); - return new ObjectPath(this, item); + return new ObjectPath(this, object, id, Lazy.of(entity::getCollection)); } /** - * Returns the object with the given id and stored in the given collection if it's contained in the {@link ObjectPath} - * . - * + * Get the object with given {@literal id}, stored in the {@literal collection} that is assignable to the given + * {@literal type} or {@literal null} if no match found. + * * @param id must not be {@literal null}. * @param collection must not be {@literal null} or empty. - * @return + * @param type must not be {@literal null}. + * @return {@literal null} when no match found. + * @since 2.0 */ - public Object getPathItem(Object id, String collection) { + @Nullable + T getPathItem(Object id, String collection, Class type) { - Assert.notNull(id, "Id must not be null!"); - Assert.hasText(collection, "Collection name must not be null!"); + Assert.notNull(id, "Id must not be null"); + Assert.hasText(collection, "Collection name must not be null"); + Assert.notNull(type, "Type must not be null"); - for (ObjectPathItem item : items) { + for (ObjectPath current = this; current != null; current = current.parent) { - Object object = item.getObject(); + Object object = current.getObject(); - if (object == null) { + if (object == null || current.getIdValue() == null) { continue; } - if (item.getIdValue() == null) { - continue; - } - - if (collection.equals(item.getCollection()) && id.equals(item.getIdValue())) { - return object; + if (collection.equals(current.getCollection()) && id.equals(current.getIdValue()) + && ClassUtils.isAssignable(type, object.getClass())) { + return type.cast(object); } } @@ -114,69 +125,41 @@ public Object getPathItem(Object id, String collection) { /** * Returns the current object of the {@link ObjectPath} or {@literal null} if the path is empty. - * + * * @return */ - public Object getCurrentObject() { - return items.isEmpty() ? null : items.get(items.size() - 1).getObject(); + @Nullable + Object getCurrentObject() { + return getObject(); + } + + @Nullable + private Object getObject() { + return object; + } + + @Nullable + private Object getIdValue() { + return idValue; + } + + private String getCollection() { + return collection.get(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { - if (items.isEmpty()) { + if (parent == null) { return "[empty]"; } - List strings = new ArrayList(items.size()); + List strings = new ArrayList<>(); - for (ObjectPathItem item : items) { - strings.add(item.object.toString()); + for (ObjectPath current = this; current != null; current = current.parent) { + strings.add(ObjectUtils.nullSafeToString(current.getObject())); } return StringUtils.collectionToDelimitedString(strings, " -> "); } - - /** - * An item in an {@link ObjectPath}. - * - * @author Thomas Darimont - * @author Oliver Gierke - */ - private static class ObjectPathItem { - - private final Object object; - private final Object idValue; - private final String collection; - - /** - * Creates a new {@link ObjectPathItem}. - * - * @param object - * @param idValue - * @param collection - */ - ObjectPathItem(Object object, Object idValue, String collection) { - - this.object = object; - this.idValue = idValue; - this.collection = collection; - } - - public Object getObject() { - return object; - } - - public Object getIdValue() { - return idValue; - } - - public String getCollection() { - return collection; - } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index f439254475..debaf2f127 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,62 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; +import java.util.Collection; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.Set; - +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonRegularExpression; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.springframework.core.convert.ConversionException; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.domain.Example; import org.springframework.data.mapping.Association; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.PropertyReferenceException; import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationExpression; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.QueryOperatorContext; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DotPath; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; @@ -52,69 +79,84 @@ /** * A helper class to encapsulate any modifications of a Query object before it gets submitted to the database. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author David Julia + * @author Divya Srivastava + * @author Gyungrai Wang + * @author Ross Lawley */ public class QueryMapper { - private static final List DEFAULT_ID_NAMES = Arrays.asList("id", "_id"); - private static final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore"); - static final ClassTypeInformation NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class); + protected static final Log LOGGER = LogFactory.getLog(QueryMapper.class); + + private static final List DEFAULT_ID_NAMES = Arrays.asList("id", FieldName.ID.name()); + private static final Document META_TEXT_SCORE = new Document("$meta", "textScore"); + static final TypeInformation NESTED_DOCUMENT = TypeInformation.of(NestedDocument.class); private enum MetaMapping { - FORCE, WHEN_PRESENT, IGNORE; + FORCE, WHEN_PRESENT, IGNORE } private final ConversionService conversionService; private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; private final MongoExampleMapper exampleMapper; + private final MongoJsonSchemaMapper schemaMapper; /** * Creates a new {@link QueryMapper} with the given {@link MongoConverter}. - * + * * @param converter must not be {@literal null}. */ public QueryMapper(MongoConverter converter) { - Assert.notNull(converter); + Assert.notNull(converter, "MongoConverter must not be null"); this.conversionService = converter.getConversionService(); this.converter = converter; this.mappingContext = converter.getMappingContext(); this.exampleMapper = new MongoExampleMapper(converter); + this.schemaMapper = new MongoJsonSchemaMapper(converter); + } + + public Document getMappedObject(Bson query, Optional> entity) { + return getMappedObject(query, entity.orElse(null)); } /** - * Replaces the property keys used in the given {@link DBObject} with the appropriate keys by using the + * Replaces the property keys used in the given {@link Document} with the appropriate keys by using the * {@link PersistentEntity} metadata. - * + * * @param query must not be {@literal null}. * @param entity can be {@literal null}. * @return */ - @SuppressWarnings("deprecation") - public DBObject getMappedObject(DBObject query, MongoPersistentEntity entity) { + public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity entity) { if (isNestedKeyword(query)) { return getMappedKeyword(new Keyword(query), entity); } - DBObject result = new BasicDBObject(); + Document result = new Document(); - for (String key : query.keySet()) { + for (String key : BsonUtils.asMap(query).keySet()) { // TODO: remove one once QueryMapper can work with Query instances directly if (Query.isRestrictedTypeKey(key)) { - @SuppressWarnings("unchecked") - Set> restrictedTypes = (Set>) query.get(key); + Set> restrictedTypes = BsonUtils.get(query, key); this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes); + continue; + } + if (isTypeKey(key)) { + result.put(key, BsonUtils.get(query, key)); continue; } @@ -126,16 +168,38 @@ public DBObject getMappedObject(DBObject query, MongoPersistentEntity entity) try { Field field = createPropertyField(entity, key, mappingContext); - Entry entry = getMappedObjectForField(field, query.get(key)); - result.put(entry.getKey(), entry.getValue()); + + // TODO: move to dedicated method + if (field.getProperty() != null && field.getProperty().isUnwrapped()) { + + Object theNestedObject = BsonUtils.get(query, key); + Document mappedValue = (Document) getMappedValue(field, theNestedObject); + if (!StringUtils.hasText(field.getMappedKey())) { + result.putAll(mappedValue); + } else { + result.put(field.getMappedKey(), mappedValue); + } + } else { + + Entry entry = getMappedObjectForField(field, BsonUtils.get(query, key)); + + /* + * Note to future self: + * ---- + * This could be the place to plug in a query rewrite mechanism that allows to transform comparison + * against field that has a dot in its name (like 'a.b') into an $expr so that { "a.b" : "some value" } + * eventually becomes { $expr : { $eq : [ { $getField : "a.b" }, "some value" ] } } + * ---- + */ + result.put(entry.getKey(), entry.getValue()); + } } catch (InvalidPersistentPropertyPath invalidPathException) { // in case the object has not already been mapped - if (!(query.get(key) instanceof DBObject)) { + if (!(BsonUtils.get(query, key) instanceof Document)) { throw invalidPathException; } - - result.put(key, query.get(key)); + result.put(key, BsonUtils.get(query, key)); } } @@ -145,61 +209,142 @@ public DBObject getMappedObject(DBObject query, MongoPersistentEntity entity) /** * Maps fields used for sorting to the {@link MongoPersistentEntity}s properties.
                    * Also converts properties to their {@code $meta} representation if present. - * + * * @param sortObject * @param entity * @return * @since 1.6 */ - public DBObject getMappedSort(DBObject sortObject, MongoPersistentEntity entity) { + public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEntity entity) { - if (sortObject == null) { - return null; + Assert.notNull(sortObject, "SortObject must not be null"); + + if (sortObject.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; } - DBObject mappedSort = getMappedObject(sortObject, entity); - mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); - return mappedSort; + Document mappedSort = mapFieldsToPropertyNames(sortObject, entity); + return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); } /** * Maps fields to retrieve to the {@link MongoPersistentEntity}s properties.
                    - * Also onverts and potentially adds missing property {@code $meta} representation. - * - * @param fieldsObject - * @param entity + * Also converts and potentially adds missing property {@code $meta} representation. + * + * @param fieldsObject must not be {@literal null}. + * @param entity can be {@literal null}. * @return * @since 1.6 */ - public DBObject getMappedFields(DBObject fieldsObject, MongoPersistentEntity entity) { + public Document getMappedFields(Document fieldsObject, @Nullable MongoPersistentEntity entity) { + + Assert.notNull(fieldsObject, "FieldsObject must not be null"); - DBObject mappedFields = fieldsObject != null ? getMappedObject(fieldsObject, entity) : new BasicDBObject(); - mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); - return mappedFields.keySet().isEmpty() ? null : mappedFields; + Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity); + return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); } - private void mapMetaAttributes(DBObject source, MongoPersistentEntity entity, MetaMapping metaMapping) { + private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity entity) { - if (entity == null || source == null) { - return; + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + Document target = new Document(); + + BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> { + + Field field = createPropertyField(entity, k, mappingContext); + if (field.getProperty() != null && field.getProperty().isUnwrapped()) { + return; + } + + target.put(field.getMappedKey(), v); + }); + + return target; + } + + /** + * Adds missing {@code $meta} representation if required. + * + * @param source must not be {@literal null}. + * @param entity can be {@literal null}. + * @return never {@literal null}. + * @since 3.4 + */ + public Document addMetaAttributes(Document source, @Nullable MongoPersistentEntity entity) { + return mapMetaAttributes(source, entity, MetaMapping.FORCE); + } + + private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, + MetaMapping metaMapping) { + + if (entity == null) { + return source; } if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) { + + if (source == BsonUtils.EMPTY_DOCUMENT) { + source = new Document(); + } + MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty(); if (MetaMapping.FORCE.equals(metaMapping) - || (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsField(textScoreProperty.getFieldName()))) { + || (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) { source.putAll(getMappedTextScoreField(textScoreProperty)); } } + + return source; } - private DBObject getMappedTextScoreField(MongoPersistentProperty property) { - return new BasicDBObject(property.getFieldName(), META_TEXT_SCORE); + private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity entity) { + + if (fieldsObject.isEmpty() || entity == null) { + return fieldsObject; + } + + Document target = new Document(); + + for (Entry field : fieldsObject.entrySet()) { + + try { + + PropertyPath path = PropertyPath.from(field.getKey(), entity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = mappingContext + .getPersistentPropertyPath(path); + MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getLeafProperty(); + + if (property.isUnwrapped() && property.isEntity()) { + + MongoPersistentEntity unwrappedEntity = mappingContext.getRequiredPersistentEntity(property); + + for (MongoPersistentProperty unwrappedProperty : unwrappedEntity) { + + DotPath dotPath = DotPath.from(persistentPropertyPath.toDotPath()).append(unwrappedProperty.getName()); + target.put(dotPath.toString(), field.getValue()); + } + + } else { + target.put(field.getKey(), field.getValue()); + } + } catch (RuntimeException e) { + target.put(field.getKey(), field.getValue()); + } + + } + return target; + } + + private Document getMappedTextScoreField(MongoPersistentProperty property) { + return new Document(property.getFieldName(), META_TEXT_SCORE); } /** * Extracts the mapped object value for given field out of rawValue taking nested {@link Keyword}s into account - * + * * @param field * @param rawValue * @return @@ -209,8 +354,12 @@ protected Entry getMappedObjectForField(Field field, Object rawV String key = field.getMappedKey(); Object value; + if (rawValue instanceof MongoExpression mongoExpression) { + return createMapEntry(key, getMappedObject(mongoExpression.toDocument(), field.getEntity())); + } + if (isNestedKeyword(rawValue) && !field.isIdField()) { - Keyword keyword = new Keyword((DBObject) rawValue); + Keyword keyword = new Keyword((Document) rawValue); value = getMappedKeyword(field, keyword); } else { value = getMappedValue(field, rawValue); @@ -225,105 +374,127 @@ protected Entry getMappedObjectForField(Field field, Object rawV * @param mappingContext * @return */ - protected Field createPropertyField(MongoPersistentEntity entity, String key, + protected Field createPropertyField(@Nullable MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { - return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext); + + if (entity == null) { + return new Field(key); + } + + if (FieldName.ID.name().equals(key)) { + return new MetadataBackedField(key, entity, mappingContext, entity.getIdProperty()); + } + + return new MetadataBackedField(key, entity, mappingContext); } /** - * Returns the given {@link DBObject} representing a keyword by mapping the keyword's value. - * - * @param keyword the {@link DBObject} representing a keyword (e.g. {@code $ne : … } ) + * Returns the given {@link Document} representing a keyword by mapping the keyword's value. + * + * @param keyword the {@link Document} representing a keyword (e.g. {@code $ne : … } ) * @param entity * @return */ - protected DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity entity) { + protected Document getMappedKeyword(Keyword keyword, @Nullable MongoPersistentEntity entity) { // $or/$nor if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) { Iterable conditions = keyword.getValue(); - BasicDBList newConditions = new BasicDBList(); + List newConditions = conditions instanceof Collection collection ? new ArrayList<>(collection.size()) + : new ArrayList<>(); for (Object condition : conditions) { - newConditions.add(isDBObject(condition) ? getMappedObject((DBObject) condition, entity) - : convertSimpleOrDBObject(condition, entity)); + newConditions.add(isDocument(condition) ? getMappedObject((Document) condition, entity) + : convertSimpleOrDocument(condition, entity)); } - return new BasicDBObject(keyword.getKey(), newConditions); + return new Document(keyword.getKey(), newConditions); } if (keyword.isSample()) { - return exampleMapper.getMappedExample(keyword.> getValue(), entity); + return exampleMapper.getMappedExample(keyword.getValue(), entity); } - return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity)); + if (keyword.isJsonSchema()) { + return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()), + entity != null ? entity.getType() : Object.class); + } + + return new Document(keyword.getKey(), convertSimpleOrDocument(keyword.getValue(), entity)); } /** * Returns the mapped keyword considered defining a criteria for the given property. - * + * * @param property * @param keyword * @return */ - protected DBObject getMappedKeyword(Field property, Keyword keyword) { + protected Document getMappedKeyword(Field property, Keyword keyword) { - boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists(); + boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists() && keyword.mayHoldDbRef(); Object value = keyword.getValue(); Object convertedValue = needsAssociationConversion ? convertAssociation(value, property) : getMappedValue(property.with(keyword.getKey()), value); - return new BasicDBObject(keyword.key, convertedValue); + if (keyword.isSample() && convertedValue instanceof Document document) { + return document; + } + + return new Document(keyword.key, convertedValue); } /** * Returns the mapped value for the given source object assuming it's a value for the given * {@link MongoPersistentProperty}. - * - * @param value the source object to be mapped - * @param property the property the value is a value for - * @param newKey the key the value will be bound to eventually + * + * @param documentField the key the value will be bound to eventually + * @param sourceValue the source object to be mapped * @return */ - protected Object getMappedValue(Field documentField, Object value) { + @Nullable + @SuppressWarnings("unchecked") + protected Object getMappedValue(Field documentField, Object sourceValue) { - if (documentField.isIdField()) { + Object value = applyFieldTargetTypeHintToValue(documentField, sourceValue); - if (isDBObject(value)) { - DBObject valueDbo = (DBObject) value; - DBObject resultDbo = new BasicDBObject(valueDbo.toMap()); + if (documentField.getProperty() != null + && converter.getCustomConversions().hasValueConverter(documentField.getProperty())) { - if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) { - String inKey = valueDbo.containsField("$in") ? "$in" : "$nin"; - List ids = new ArrayList(); - for (Object id : (Iterable) valueDbo.get(inKey)) { - ids.add(convertId(id)); - } - resultDbo.put(inKey, ids.toArray(new Object[ids.size()])); - } else if (valueDbo.containsField("$ne")) { - resultDbo.put("$ne", convertId(valueDbo.get("$ne"))); - } else { - return getMappedObject(resultDbo, null); - } + PropertyValueConverter> valueConverter = converter + .getCustomConversions().getPropertyValueConversions().getValueConverter(documentField.getProperty()); - return resultDbo; + return convertValue(documentField, sourceValue, value, valueConverter); + } - } else { - return convertId(value); - } + if (documentField.isIdField() && !documentField.isAssociation()) { + return convertIdField(documentField, value); + } + + if (value == null) { + return null; } if (isNestedKeyword(value)) { - return getMappedKeyword(new Keyword((DBObject) value), null); + return getMappedKeyword(new Keyword((Bson) value), documentField.getPropertyEntity()); } if (isAssociationConversionNecessary(documentField, value)) { return convertAssociation(value, documentField); } - return convertSimpleOrDBObject(value, documentField.getPropertyEntity()); + return convertSimpleOrDocument(value, documentField.getPropertyEntity()); + } + + private boolean isIdField(Field documentField) { + return documentField.getProperty() != null + && documentField.getProperty().getOwner().isIdProperty(documentField.getProperty()); + } + + private Class getIdTypeForField(Field documentField) { + return isIdField(documentField) ? documentField.getProperty().getFieldType() : ObjectId.class; } /** @@ -331,16 +502,16 @@ protected Object getMappedValue(Field documentField, Object value) { * requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the * type of the given value is compatible with the type of the given document field in order to deal with potential * query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field. - * + * * @param documentField must not be {@literal null}. * @param value * @return */ - protected boolean isAssociationConversionNecessary(Field documentField, Object value) { + protected boolean isAssociationConversionNecessary(Field documentField, @Nullable Object value) { - Assert.notNull(documentField, "Document field must not be null!"); + Assert.notNull(documentField, "Document field must not be null"); - if (value == null) { + if (value == null || documentField.getProperty() == null) { return false; } @@ -348,33 +519,77 @@ protected boolean isAssociationConversionNecessary(Field documentField, Object v return false; } - Class type = value.getClass(); + Class type = value.getClass(); MongoPersistentProperty property = documentField.getProperty(); if (property.getActualType().isAssignableFrom(type)) { return true; } + if (property.isDocumentReference()) { + return true; + } + MongoPersistentEntity entity = documentField.getPropertyEntity(); return entity.hasIdProperty() - && (type.equals(DBRef.class) || entity.getIdProperty().getActualType().isAssignableFrom(type)); + && (type.equals(DBRef.class) || entity.getRequiredIdProperty().getActualType().isAssignableFrom(type)); } /** - * Retriggers mapping if the given source is a {@link DBObject} or simply invokes the - * + * Retriggers mapping if the given source is a {@link Document} or simply invokes the + * * @param source * @param entity * @return */ - protected Object convertSimpleOrDBObject(Object source, MongoPersistentEntity entity) { + @Nullable + protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity entity) { + + if (source instanceof Example example) { + return exampleMapper.getMappedExample(example, entity); + } + + if (source instanceof AggregationExpression age) { + return entity == null ? age.toDocument() : // + age.toDocument(new RelaxedTypeBasedAggregationOperationContext(entity.getType(), this.mappingContext, this)); + } - if (source instanceof BasicDBList) { + if (source instanceof MongoExpression exr) { + return exr.toDocument(); + } + + if (source instanceof List) { return delegateConvertToMongoType(source, entity); } + if (isDocument(source)) { + return getMappedObject((Document) source, entity); + } + if (isDBObject(source)) { - return getMappedObject((DBObject) source, entity); + return getMappedObject((BasicDBObject) source, entity); + } + + if (source instanceof BsonValue) { + return source; + } + + if (source instanceof Map sourceMap) { + + Map map = new LinkedHashMap<>(sourceMap.size(), 1F); + + for (Entry entry : sourceMap.entrySet()) { + + String key = ObjectUtils.nullSafeToString(converter.convertToMongoType(entry.getKey())); + + if (entry.getValue() instanceof Document document) { + map.put(key, getMappedObject(document, entity)); + } else { + map.put(key, delegateConvertToMongoType(entry.getValue(), entity)); + } + } + + return map; } return delegateConvertToMongoType(source, entity); @@ -383,96 +598,223 @@ protected Object convertSimpleOrDBObject(Object source, MongoPersistentEntity /** * Converts the given source Object to a mongo type with the type information of the original source type omitted. * Subclasses may overwrite this method to retain the type information of the source type on the resulting mongo type. - * + * * @param source * @param entity * @return the converted mongo type or null if source is null */ - protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity entity) { + @Nullable + protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity entity) { + + if (entity != null && entity.isUnwrapped()) { + return converter.convertToMongoType(source, entity); + } + return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation()); } + @Nullable protected Object convertAssociation(Object source, Field field) { - return convertAssociation(source, field.getProperty()); + Object value = convertAssociation(source, field.getProperty()); + if (value != null && field.isIdField() && field.getFieldType() != value.getClass()) { + return convertId(value, field.getFieldType()); + } + return value; } /** * Converts the given source assuming it's actually an association to another object. - * + * * @param source * @param property * @return */ - protected Object convertAssociation(Object source, MongoPersistentProperty property) { + @Nullable + protected Object convertAssociation(@Nullable Object source, @Nullable MongoPersistentProperty property) { - if (property == null || source == null || source instanceof DBObject) { + if (property == null || source == null || source instanceof Document || source instanceof DBObject) { return source; } - if (source instanceof DBRef) { + if (source instanceof DBRef ref) { + + Object id = convertId(ref.getId(), + property.getOwner().isIdProperty(property) ? property.getFieldType() : ObjectId.class); - DBRef ref = (DBRef) source; - return new DBRef(ref.getCollectionName(), convertId(ref.getId())); + if (StringUtils.hasText(ref.getDatabaseName())) { + return new DBRef(ref.getDatabaseName(), ref.getCollectionName(), id); + } else { + return new DBRef(ref.getCollectionName(), id); + } } - if (source instanceof Iterable) { + if (source instanceof Iterable iterable) { BasicDBList result = new BasicDBList(); - for (Object element : (Iterable) source) { - result.add(createDbRefFor(element, property)); + for (Object element : iterable) { + result.add(createReferenceFor(element, property)); } return result; } - if (property.isMap()) { - BasicDBObject result = new BasicDBObject(); - DBObject dbObject = (DBObject) source; + if (property.isMap() && source instanceof Document dbObject) { + Document result = new Document(); for (String key : dbObject.keySet()) { - result.put(key, createDbRefFor(dbObject.get(key), property)); + result.put(key, createReferenceFor(dbObject.get(key), property)); } return result; } - return createDbRefFor(source, property); + return createReferenceFor(source, property); + } + + @Nullable + private Object convertValue(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter) { + + MongoPersistentProperty property = documentField.getProperty(); + + OperatorContext criteriaContext = new QueryOperatorContext( + isKeyword(documentField.name) ? documentField.name : "$eq", property.getFieldName()); + MongoConversionContext conversionContext; + if (valueConverter instanceof MongoConversionContext mcc) { + conversionContext = mcc.forOperator(criteriaContext); + } else { + conversionContext = new MongoConversionContext(NoPropertyPropertyValueProvider.INSTANCE, property, converter, + criteriaContext); + } + + return convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext); + } + + @Nullable + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + MongoPersistentProperty property = documentField.getProperty(); + /* might be an $in clause with multiple entries */ + if (property != null && !property.isCollectionLike() && sourceValue instanceof Collection collection) { + + if (collection.isEmpty()) { + return collection; + } + + List converted = new ArrayList<>(collection.size()); + for (Object o : collection) { + converted.add(valueConverter.write(o, conversionContext)); + } + + return converted; + } + + if (property != null && !documentField.getProperty().isMap() && sourceValue instanceof Document document) { + + return BsonUtils.mapValues(document, (key, val) -> { + if (isKeyword(key)) { + return convertValueWithConversionContext(documentField, val, val, valueConverter, conversionContext + .forOperator(new QueryOperatorContext(key, conversionContext.getOperatorContext().path()))); + } + return val; + }); + } + + return valueConverter.write(value, conversionContext); + } + + @Nullable + @SuppressWarnings("unchecked") + private Object convertIdField(Field documentField, Object source) { + + Object value = source; + if (isDBObject(source)) { + DBObject valueDbo = (DBObject) source; + value = new Document(valueDbo.toMap()); + } + + if (!isDocument(value)) { + return convertId(value, getIdTypeForField(documentField)); + } + + Document valueDbo = (Document) value; + Document resultDbo = new Document(valueDbo); + + for (Entry entry : valueDbo.entrySet()) { + + String key = entry.getKey(); + if ("$nin".equals(key) || "$in".equals(key) || "$all".equals(key)) { + List ids = new ArrayList<>(); + for (Object id : (Iterable) valueDbo.get(key)) { + ids.add(convertId(id, getIdTypeForField(documentField))); + } + resultDbo.put(key, ids); + } else if (isKeyword(key)) { + resultDbo.put(key, convertIdField(documentField, entry.getValue())); + } else { + if (documentField.getProperty() != null && documentField.getProperty().isEntity()) { + Field propertyField = createPropertyField(documentField.getPropertyEntity(), key, mappingContext); + resultDbo.put(key, getMappedValue(propertyField, entry.getValue())); + } else { + resultDbo.put(key, getMappedValue(documentField, entry.getValue())); + } + } + } + + return resultDbo; + } + + /** + * Checks whether the given value is a {@link Document}. + * + * @param value can be {@literal null}. + * @return + */ + protected final boolean isDocument(@Nullable Object value) { + return value instanceof Document; } /** * Checks whether the given value is a {@link DBObject}. - * + * * @param value can be {@literal null}. * @return */ - protected final boolean isDBObject(Object value) { + protected final boolean isDBObject(@Nullable Object value) { return value instanceof DBObject; } /** * Creates a new {@link Entry} for the given {@link Field} with the given value. - * + * * @param field must not be {@literal null}. * @param value can be {@literal null}. * @return */ - protected final Entry createMapEntry(Field field, Object value) { + protected final Entry createMapEntry(Field field, @Nullable Object value) { return createMapEntry(field.getMappedKey(), value); } /** * Creates a new {@link Entry} with the given key and value. - * + * * @param key must not be {@literal null} or empty. - * @param value can be {@literal null} + * @param value can be {@literal null}. * @return */ - private Entry createMapEntry(String key, Object value) { + private Entry createMapEntry(String key, @Nullable Object value) { - Assert.hasText(key, "Key must not be null or empty!"); - return Collections.singletonMap(key, value).entrySet().iterator().next(); + Assert.hasText(key, "Key must not be null or empty"); + return new AbstractMap.SimpleEntry<>(key, value); } - private DBRef createDbRefFor(Object source, MongoPersistentProperty property) { + private Object createReferenceFor(Object source, MongoPersistentProperty property) { + + if (source instanceof DBRef dbRef) { + return dbRef; + } - if (source instanceof DBRef) { - return (DBRef) source; + if (property != null && (property.isDocumentReference() + || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) { + return converter.toDocumentPointer(source, property).getPointer(); } return converter.toDBRef(source, property); @@ -480,54 +822,71 @@ private DBRef createDbRefFor(Object source, MongoPersistentProperty property) { /** * Converts the given raw id value into either {@link ObjectId} or {@link String}. - * + * * @param id * @return + * @since 2.2 */ - public Object convertId(Object id) { + @Nullable + public Object convertId(@Nullable Object id) { + return convertId(id, ObjectId.class); + } - if (id == null) { - return null; - } + /** + * Converts the given raw id value into either {@link ObjectId} or {@link Class targetType}. + * + * @param id can be {@literal null}. + * @param targetType + * @return the converted {@literal id} or {@literal null} if the source was already {@literal null}. + * @since 2.2 + */ + @Nullable + public Object convertId(@Nullable Object id, Class targetType) { - if (id instanceof String) { - return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id; + if (Quirks.skipConversion(id)) { + return id; } - try { - return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class) - : delegateConvertToMongoType(id, null); - } catch (ConversionException o_O) { - return delegateConvertToMongoType(id, null); - } + return converter.convertId(id, targetType); } /** - * Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link DBObject} with a keyword key. - * + * Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link Document} with a keyword key. + * * @param candidate * @return */ - protected boolean isNestedKeyword(Object candidate) { + protected boolean isNestedKeyword(@Nullable Object candidate) { - if (!(candidate instanceof BasicDBObject)) { + if (!(candidate instanceof Document)) { return false; } - BasicDBObject dbObject = (BasicDBObject) candidate; - Set keys = dbObject.keySet(); + Map map = BsonUtils.asMap((Bson) candidate); - if (keys.size() != 1) { + if (map.size() != 1) { return false; } - return isKeyword(keys.iterator().next().toString()); + return isKeyword(map.entrySet().iterator().next().getKey()); + } + + /** + * Returns whether the given {@link String} is the type key. + * + * @param key + * @return + * @see MongoTypeMapper#isTypeKey(String) + * @since 2.2 + */ + protected boolean isTypeKey(String key) { + return converter.getTypeMapper().isTypeKey(key); } /** * Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the - * set of registered keywords returned by {@link #getKeywords()}. - * + * set of registered keywords. + * * @param candidate * @return */ @@ -535,35 +894,75 @@ protected boolean isKeyword(String candidate) { return candidate.startsWith("$"); } + /** + * Convert the given field value into its desired + * {@link org.springframework.data.mongodb.core.mapping.Field#targetType() target type} before applying further + * conversions. In case of a {@link Collection} (used eg. for {@code $in} queries) the individual values will be + * converted one by one. + * + * @param documentField the field and its metadata + * @param value the actual value. Can be {@literal null}. + * @return the potentially converted target value. + */ + @Nullable + private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { + + if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget() + || value instanceof Document || value instanceof DBObject || Quirks.skipConversion(value)) { + return value; + } + + if (!conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) { + return value; + } + + if (value instanceof Collection source) { + + Collection converted = new ArrayList<>(source.size()); + + for (Object o : source) { + converted.add(conversionService.convert(o, documentField.getProperty().getFieldType())); + } + + return converted; + } + + return conversionService.convert(value, documentField.getProperty().getFieldType()); + } + /** * Value object to capture a query keyword representation. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ static class Keyword { - private static final String N_OR_PATTERN = "\\$.*or"; + private static final Set NON_DBREF_CONVERTING_KEYWORDS = Set.of("$", "$size", "$slice", "$gt", "$lt"); private final String key; private final Object value; - public Keyword(DBObject source, String key) { + public Keyword(Bson source, String key) { this.key = key; - this.value = source.get(key); + this.value = BsonUtils.get(source, key); } - public Keyword(DBObject dbObject) { + public Keyword(Bson bson) { + + Map map = BsonUtils.asMap(bson); + Assert.isTrue(map.size() == 1, "Can only use a single value Document"); - Set keys = dbObject.keySet(); - Assert.isTrue(keys.size() == 1, "Can only use a single value DBObject!"); + Set> entries = map.entrySet(); + Entry entry = entries.iterator().next(); - this.key = keys.iterator().next(); - this.value = dbObject.get(key); + this.key = entry.getKey(); + this.value = entry.getValue(); } /** * Returns whether the current keyword is the {@code $exists} keyword. - * + * * @return */ public boolean isExists() { @@ -571,12 +970,12 @@ public boolean isExists() { } public boolean isOrOrNor() { - return key.matches(N_OR_PATTERN); + return key.equalsIgnoreCase("$or") || key.equalsIgnoreCase("$nor"); } /** * Returns whether the current keyword is the {@code $geometry} keyword. - * + * * @return * @since 1.8 */ @@ -585,13 +984,13 @@ public boolean isGeometry() { } /** - * Returns wheter the current keyword indicates a sample object. - * + * Returns whether the current keyword indicates a {@link Example} object. + * * @return * @since 1.8 */ public boolean isSample() { - return "$sample".equalsIgnoreCase(key); + return "$example".equalsIgnoreCase(key); } public boolean hasIterableValue() { @@ -606,33 +1005,51 @@ public String getKey() { public T getValue() { return (T) value; } + + /** + * @return {@literal true} if key may hold a DbRef. + * @since 2.1.4 + */ + public boolean mayHoldDbRef() { + return !NON_DBREF_CONVERTING_KEYWORDS.contains(key); + } + + /** + * Returns whether the current keyword indicates a {@literal $jsonSchema} object. + * + * @return {@literal true} if {@code key} equals {@literal $jsonSchema}. + * @since 2.1 + */ + public boolean isJsonSchema() { + return "$jsonSchema".equalsIgnoreCase(key); + } } /** * Value object to represent a field and its meta-information. - * + * * @author Oliver Gierke */ protected static class Field { - private static final String ID_KEY = "_id"; + protected static final Pattern POSITIONAL_OPERATOR = Pattern.compile("\\$\\[.*\\]"); protected final String name; /** - * Creates a new {@link DocumentField} without meta-information but the given name. - * + * Creates a new {@link Field} without meta-information but the given name. + * * @param name must not be {@literal null} or empty. */ public Field(String name) { - Assert.hasText(name, "Name must not be null!"); + Assert.hasText(name, "Name must not be null"); this.name = name; } /** - * Returns a new {@link DocumentField} with the given name. - * + * Returns a new {@link Field} with the given name. + * * @param name must not be {@literal null} or empty. * @return */ @@ -642,36 +1059,43 @@ public Field with(String name) { /** * Returns whether the current field is the id field. - * + * * @return */ public boolean isIdField() { - return ID_KEY.equals(name); + return FieldName.ID.name().equals(name); } /** * Returns the underlying {@link MongoPersistentProperty} backing the field. For path traversals this will be the * property that represents the value to handle. This means it'll be the leaf property for plain paths or the * association property in case we refer to an association somewhere in the path. - * - * @return + * + * @return can be {@literal null}. */ + @Nullable public MongoPersistentProperty getProperty() { return null; } /** * Returns the {@link MongoPersistentEntity} that field is conatined in. - * - * @return + * + * @return can be {@literal null}. */ + @Nullable public MongoPersistentEntity getPropertyEntity() { return null; } + @Nullable + MongoPersistentEntity getEntity() { + return null; + } + /** * Returns whether the field represents an association. - * + * * @return */ public boolean isAssociation() { @@ -680,51 +1104,68 @@ public boolean isAssociation() { /** * Returns the key to be used in the mapped document eventually. - * + * * @return */ public String getMappedKey() { - return isIdField() ? ID_KEY : name; + return isIdField() ? FieldName.ID.name() : name; } /** * Returns whether the field references an association in case it refers to a nested field. - * + * * @return */ public boolean containsAssociation() { return false; } + @Nullable public Association getAssociation() { return null; } + /** + * Returns whether the field references a {@link java.util.Map}. + * + * @return {@literal true} if property information is available and references a {@link java.util.Map}. + * @see PersistentProperty#isMap() + */ + public boolean isMap() { + return getProperty() != null && getProperty().isMap(); + } + public TypeInformation getTypeHint() { - return ClassTypeInformation.OBJECT; + return TypeInformation.OBJECT; + } + + public Class getFieldType() { + return Object.class; } } /** - * Extension of {@link DocumentField} to be backed with mapping metadata. - * + * Extension of {@link Field} to be backed with mapping metadata. + * * @author Oliver Gierke * @author Thomas Darimont */ - protected static class MetadataBackedField extends Field { + public static class MetadataBackedField extends Field { - private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!"; + private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?"); + private static final Pattern NUMERIC_SEGMENT = Pattern.compile("\\d+"); + private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s; Associations can only be pointed to directly or via their id property"; private final MongoPersistentEntity entity; private final MappingContext, MongoPersistentProperty> mappingContext; private final MongoPersistentProperty property; - private final PersistentPropertyPath path; - private final Association association; + private final @Nullable PersistentPropertyPath path; + private final @Nullable Association association; /** * Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and * {@link MappingContext}. - * + * * @param name must not be {@literal null} or empty. * @param entity must not be {@literal null}. * @param context must not be {@literal null}. @@ -737,7 +1178,7 @@ public MetadataBackedField(String name, MongoPersistentEntity entity, /** * Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and * {@link MappingContext} with the given {@link MongoPersistentProperty}. - * + * * @param name must not be {@literal null} or empty. * @param entity must not be {@literal null}. * @param context must not be {@literal null}. @@ -745,77 +1186,63 @@ public MetadataBackedField(String name, MongoPersistentEntity entity, */ public MetadataBackedField(String name, MongoPersistentEntity entity, MappingContext, MongoPersistentProperty> context, - MongoPersistentProperty property) { + @Nullable MongoPersistentProperty property) { super(name); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); this.entity = entity; this.mappingContext = context; - this.path = getPath(name); + this.path = getPath(removePlaceholders(POSITIONAL_PARAMETER_PATTERN, name), property); this.property = path == null ? property : path.getLeafProperty(); this.association = findAssociation(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#with(java.lang.String) - */ @Override public MetadataBackedField with(String name) { return new MetadataBackedField(name, entity, mappingContext, property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#isIdKey() - */ @Override public boolean isIdField() { + if (property != null) { + return property.getOwner().isIdProperty(property); + } + MongoPersistentProperty idProperty = entity.getIdProperty(); if (idProperty != null) { - return idProperty.getName().equals(name) || idProperty.getFieldName().equals(name); + return name.equals(idProperty.getName()) || name.equals(idProperty.getFieldName()); } return DEFAULT_ID_NAMES.contains(name); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getProperty() - */ @Override public MongoPersistentProperty getProperty() { return association == null ? property : association.getInverse(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getEntity() - */ @Override public MongoPersistentEntity getPropertyEntity() { MongoPersistentProperty property = getProperty(); return property == null ? null : mappingContext.getPersistentEntity(property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#isAssociation() - */ + @Nullable + @Override + public MongoPersistentEntity getEntity() { + return entity; + } + @Override public boolean isAssociation() { return association != null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getAssociation() - */ @Override public Association getAssociation() { return association; @@ -823,15 +1250,19 @@ public Association getAssociation() { /** * Finds the association property in the {@link PersistentPropertyPath}. - * + * * @return */ - private final Association findAssociation() { + @Nullable + private Association findAssociation() { if (this.path != null) { for (MongoPersistentProperty p : this.path) { - if (p.isAssociation()) { - return p.getAssociation(); + + Association association = p.getAssociation(); + + if (association != null) { + return association; } } } @@ -839,74 +1270,193 @@ private final Association findAssociation() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTargetKey() - */ + @Override + public Class getFieldType() { + return property.getFieldType(); + } + @Override public String getMappedKey() { + + if (getProperty() != null && getProperty().getMongoField().getName().isKey()) { + return getProperty().getFieldName(); + } + return path == null ? name : path.toDotPath(isAssociation() ? getAssociationConverter() : getPropertyConverter()); } + @Nullable protected PersistentPropertyPath getPath() { return path; } /** - * Returns the {@link PersistentPropertyPath} for the given pathExpression. - * + * Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}. + * * @param pathExpression * @return */ - private PersistentPropertyPath getPath(String pathExpression) { + @Nullable + private PersistentPropertyPath getPath(String pathExpression, + @Nullable MongoPersistentProperty sourceProperty) { + + if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) { + return mappingContext.getPersistentPropertyPath( + PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation())); + } + + String rawPath = resolvePath(pathExpression); + + PropertyPath path = forName(rawPath); + if (path == null || isPathToJavaLangClassProperty(path)) { + return null; + } + + PersistentPropertyPath propertyPath = tryToResolvePersistentPropertyPath(path); + + if (propertyPath == null) { + + if (QueryMapper.LOGGER.isInfoEnabled()) { + + String types = StringUtils.collectionToDelimitedString( + path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> "); + QueryMapper.LOGGER.info(String.format( + "Could not map '%s'; Maybe a fragment in '%s' is considered a simple type; Mapper continues with %s", + path, types, pathExpression)); + } + return null; + } + + Iterator iterator = propertyPath.iterator(); + boolean associationDetected = false; + + while (iterator.hasNext()) { + + MongoPersistentProperty property = iterator.next(); + + if (property.isAssociation()) { + associationDetected = true; + continue; + } + + if (associationDetected && !property.getOwner().isIdProperty(property)) { + throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); + } + } + + return propertyPath; + } + + @Nullable + private PersistentPropertyPath tryToResolvePersistentPropertyPath(PropertyPath path) { try { + return mappingContext.getPersistentPropertyPath(path); + } catch (MappingException e) { + return null; + } + } - PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d", ""), entity.getTypeInformation()); - PersistentPropertyPath propertyPath = mappingContext.getPersistentPropertyPath(path); + /** + * Querydsl happens to map id fields directly to {@literal _id} which breaks {@link PropertyPath} resolution. So if + * the first attempt fails we try to replace {@literal _id} with just {@literal id} and see if we can resolve if + * then. + * + * @param path + * @return the path or {@literal null} + */ + @Nullable + private PropertyPath forName(String path) { - Iterator iterator = propertyPath.iterator(); - boolean associationDetected = false; + try { - while (iterator.hasNext()) { + if (entity.getPersistentProperty(path) != null) { + return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation()); + } - MongoPersistentProperty property = iterator.next(); + return PropertyPath.from(path, entity.getTypeInformation()); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath e) { - if (property.isAssociation()) { - associationDetected = true; - continue; - } + if (path.endsWith("_id")) { + return forName(path.substring(0, path.length() - 3) + "id"); + } + + // Ok give it another try quoting + try { + return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation()); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath ex) { - if (associationDetected && !property.isIdProperty()) { - throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); - } } - return propertyPath; - } catch (PropertyReferenceException e) { return null; } } + private boolean isPathToJavaLangClassProperty(PropertyPath path) { + + return (path.getType() == Class.class || path.getType().equals(Object.class)) + && path.getLeafProperty().getType() == Class.class; + } + + private static String resolvePath(String source) { + + String[] segments = source.split("\\."); + if (segments.length == 1) { + return source; + } + + List path = new ArrayList<>(segments.length); + + /* always start from a property, so we can skip the first segment. + from there remove any position placeholder */ + for (int i = 1; i < segments.length; i++) { + String segment = segments[i]; + if (segment.startsWith("[") && segment.endsWith("]")) { + continue; + } + if (NUMERIC_SEGMENT.matcher(segment).matches()) { + continue; + } + path.add(segment); + } + + // when property is followed only by placeholders eg. 'values.0.3.90' + // or when there is no difference in the number of segments + if (path.isEmpty() || segments.length == path.size() + 1) { + return source; + } + + path.add(0, segments[0]); + return StringUtils.collectionToDelimitedString(path, "."); + } + /** * Return the {@link Converter} to be used to created the mapped key. Default implementation will use * {@link PropertyToFieldNameConverter}. - * + * * @return */ protected Converter getPropertyConverter() { - return new PositionParameterRetainingPropertyKeyConverter(name); + return new PositionParameterRetainingPropertyKeyConverter(name, mappingContext); } /** * Return the {@link Converter} to use for creating the mapped key of an association. Default implementation is * {@link AssociationConverter}. - * + * * @return * @since 1.7 */ protected Converter getAssociationConverter() { - return new AssociationConverter(getAssociation()); + return new AssociationConverter(name, getAssociation()); + } + + protected MappingContext, MongoPersistentProperty> getMappingContext() { + return mappingContext; + } + + private static String removePlaceholders(Pattern pattern, String raw) { + return pattern.matcher(raw).replaceAll(""); } /** @@ -917,24 +1467,17 @@ static class PositionParameterRetainingPropertyKeyConverter implements Converter private final KeyMapper keyMapper; - public PositionParameterRetainingPropertyKeyConverter(String rawKey) { - this.keyMapper = new KeyMapper(rawKey); + public PositionParameterRetainingPropertyKeyConverter(String rawKey, + MappingContext, MongoPersistentProperty> ctx) { + this.keyMapper = new KeyMapper(rawKey, ctx); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { return keyMapper.mapPropertyName(source); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint() - */ @Override public TypeInformation getTypeHint() { @@ -946,7 +1489,7 @@ public TypeInformation getTypeHint() { if (property.getActualType().isInterface() || java.lang.reflect.Modifier.isAbstract(property.getActualType().getModifiers())) { - return ClassTypeInformation.OBJECT; + return TypeInformation.OBJECT; } return NESTED_DOCUMENT; @@ -959,45 +1502,68 @@ public TypeInformation getTypeHint() { static class KeyMapper { private final Iterator iterator; + private int currentIndex; + private final List pathParts; - public KeyMapper(String key) { + public KeyMapper(String key, + MappingContext, MongoPersistentProperty> mappingContext) { - this.iterator = Arrays.asList(key.split("\\.")).iterator(); - this.iterator.next(); + this.pathParts = Arrays.asList(key.split("\\.")); + this.iterator = pathParts.iterator(); + this.currentIndex = 0; + } + + String nextToken() { + return pathParts.get(currentIndex + 1); + } + + boolean hasNexToken() { + return pathParts.size() > currentIndex + 1; } /** * Maps the property name while retaining potential positional operator {@literal $}. - * + * * @param property * @return */ protected String mapPropertyName(MongoPersistentProperty property) { StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property)); - boolean inspect = iterator.hasNext(); + if (!hasNexToken()) { + return mappedName.toString(); + } - while (inspect) { + String nextToken = nextToken(); + if (isPositionalParameter(nextToken)) { - String partial = iterator.next(); - boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike())); + mappedName.append(".").append(nextToken); + currentIndex += 2; + return mappedName.toString(); + } - if (isPositional) { - mappedName.append(".").append(partial); - } + if (property.isMap()) { - inspect = isPositional && iterator.hasNext(); + mappedName.append(".").append(nextToken); + currentIndex += 2; + return mappedName.toString(); } + currentIndex++; return mappedName.toString(); } - private static boolean isPositionalParameter(String partial) { + static boolean isPositionalParameter(String partial) { if ("$".equals(partial)) { return true; } + Matcher matcher = POSITIONAL_OPERATOR.matcher(partial); + if (matcher.find()) { + return true; + } + try { Long.valueOf(partial); return true; @@ -1010,29 +1576,27 @@ private static boolean isPositionalParameter(String partial) { /** * Converter to skip all properties after an association property was rendered. - * + * * @author Oliver Gierke */ protected static class AssociationConverter implements Converter { + private final String name; private final MongoPersistentProperty property; private boolean associationFound; /** * Creates a new {@link AssociationConverter} for the given {@link Association}. - * + * * @param association must not be {@literal null}. */ - public AssociationConverter(Association association) { + public AssociationConverter(String name, Association association) { - Assert.notNull(association, "Association must not be null!"); + Assert.notNull(association, "Association must not be null"); this.property = association.getInverse(); + this.name = name; } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { @@ -1044,7 +1608,48 @@ public String convert(MongoPersistentProperty source) { associationFound = true; } + if (associationFound) { + if (name.endsWith("$") && property.isCollectionLike()) { + return source.getFieldName() + ".$"; + } + } + return source.getFieldName(); } } + + public MappingContext, MongoPersistentProperty> getMappingContext() { + return mappingContext; + } + + public MongoConverter getConverter() { + return converter; + } + + enum NoPropertyPropertyValueProvider implements PropertyValueProvider { + + INSTANCE; + + @Override + public T getPropertyValue(MongoPersistentProperty property) { + throw new IllegalStateException("No enclosing property source available"); + } + } + + /* + * Types that must not be converted. + */ + static class Quirks { + + private static final Set> types = Set.of(Pattern.class, BsonRegularExpression.class); + + static boolean skipConversion(@Nullable Object value) { + + if (value == null) { + return false; + } + + return types.contains(value.getClass()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java new file mode 100644 index 0000000000..5a1adf9114 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -0,0 +1,156 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.Collections; +import java.util.Iterator; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoCollection; + +/** + * The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a + * {@link ReferenceLoader.DocumentReferenceQuery}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public interface ReferenceLoader { + + /** + * Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ + @Nullable + default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { + + Iterator it = fetchMany(referenceQuery, context).iterator(); + return it.hasNext() ? it.next() : null; + } + + /** + * Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ + Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context); + + /** + * The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched + * applying potentially given order criteria. + */ + interface DocumentReferenceQuery { + + /** + * Get the query to obtain matching {@link Document documents}. + * + * @return never {@literal null}. + */ + Bson getQuery(); + + /** + * Get the sort criteria for ordering results. + * + * @return an empty {@link Document} by default. Never {@literal null}. + */ + default Bson getSort() { + return new Document(); + } + + default Iterable apply(MongoCollection collection) { + return restoreOrder(collection.find(getQuery()).sort(getSort())); + } + + /** + * Restore the order of fetched documents. + * + * @param documents must not be {@literal null}. + * @return never {@literal null}. + */ + default Iterable restoreOrder(Iterable documents) { + return documents; + } + + static DocumentReferenceQuery forSingleDocument(Bson bson) { + + return new DocumentReferenceQuery() { + + @Override + public Bson getQuery() { + return bson; + } + + @Override + public Iterable apply(MongoCollection collection) { + + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); + } + }; + } + + static DocumentReferenceQuery forManyDocuments(Bson bson) { + + return new DocumentReferenceQuery() { + + @Override + public Bson getQuery() { + return bson; + } + + @Override + public Iterable apply(MongoCollection collection) { + return collection.find(getQuery()).sort(getSort()); + } + }; + } + + /** + * @return a {@link DocumentReferenceQuery} that will not match any documents. + * @since 4.2.5 + */ + static DocumentReferenceQuery forNoResult() { + return NoResultsFilter.INSTANCE; + } + } + + /** + * A dedicated {@link DocumentReferenceQuery} that will not match any documents. + * + * @since 4.2.5 + */ + enum NoResultsFilter implements DocumentReferenceQuery { + INSTANCE; + + private static final Document NO_RESULTS_PREDICATE = new Document(FieldName.ID.name(), + new Document("$exists", false)); + + @Override + public Bson getQuery() { + return NO_RESULTS_PREDICATE; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java new file mode 100644 index 0000000000..b912cfb540 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -0,0 +1,506 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.lang.annotation.Annotation; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.NoResultsFilter; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.mongodb.util.json.ValueProvider; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.data.util.Streamable; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.DBRef; +import com.mongodb.client.MongoCollection; + +/** + * A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents + * that are converted to entities. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Stefan Bildl + * @since 3.3 + */ +public final class ReferenceLookupDelegate { + + private final MappingContext, MongoPersistentProperty> mappingContext; + private final SpELContext spELContext; + private final ParameterBindingDocumentCodec codec; + + /** + * Create a new {@link ReferenceLookupDelegate}. + * + * @param mappingContext must not be {@literal null}. + * @param spELContext must not be {@literal null}. + */ + public ReferenceLookupDelegate( + MappingContext, MongoPersistentProperty> mappingContext, + SpELContext spELContext) { + + Assert.notNull(mappingContext, "MappingContext must not be null"); + Assert.notNull(spELContext, "SpELContext must not be null"); + + this.mappingContext = mappingContext; + this.spELContext = spELContext; + this.codec = new ParameterBindingDocumentCodec(); + } + + /** + * Read the reference expressed by the given property. + * + * @param property the reference defining property. Must not be {@literal null}. THe + * @param source the source value identifying to the referenced entity. Must not be {@literal null}. + * @param lookupFunction to execute a lookup query. Must not be {@literal null}. + * @param entityReader the callback to convert raw source values into actual domain types. Must not be + * {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, + MongoEntityReader entityReader) { + + Object value = source instanceof DocumentReferenceSource documentReferenceSource + ? documentReferenceSource.getTargetSource() + : source; + + Iterable result = retrieveRawDocuments(property, source, lookupFunction, value); + + if (result == null) { + return null; + } + + if (property.isCollectionLike()) { + return entityReader.read(result, property.getTypeInformation()); + } + + if (!result.iterator().hasNext()) { + return null; + } + + Object resultValue = result.iterator().next(); + return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null; + } + + @Nullable + private Iterable retrieveRawDocuments(MongoPersistentProperty property, Object source, + LookupFunction lookupFunction, Object value) { + + DocumentReferenceQuery filter = computeFilter(property, source, spELContext); + if (filter instanceof NoResultsFilter) { + return Collections.emptyList(); + } + + ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); + return lookupFunction.apply(filter, referenceCollection); + } + + private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, + SpELContext spELContext) { + + // Use the first value as a reference for others in case of collection like + if (value instanceof Iterable iterable) { + + Iterator iterator = iterable.iterator(); + value = iterator.hasNext() ? iterator.next() : new Document(); + } + + // handle DBRef value + if (value instanceof DBRef dbRef) { + return ReferenceCollection.fromDBRef(dbRef); + } + + String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection(); + + if (value instanceof Document documentPointer) { + + if (property.isDocumentReference()) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getDocumentReference(); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, + () -> documentPointer.get("db", String.class)); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, + () -> documentPointer.get("collection", collection)); + return new ReferenceCollection(targetDatabase, targetCollection); + } + + return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection)); + } + + if (property.isDocumentReference()) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getDocumentReference(); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection); + + return new ReferenceCollection(targetDatabase, targetCollection); + } + + return new ReferenceCollection(null, collection); + } + + /** + * Use the given {@link ParameterBindingContext} to compute potential expressions against the value. + * + * @param value must not be {@literal null}. + * @param bindingContext must not be {@literal null}. + * @param defaultValue + * @param + * @return can be {@literal null}. + */ + @SuppressWarnings("unchecked") + private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { + + if (!StringUtils.hasText(value)) { + return defaultValue.get(); + } + + // parameter binding requires a document, since we do not have one, construct it. + if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { + String s = "{ 'target-value' : " + value + "}"; + T evaluated = (T) codec.decode(s, bindingContext).get("target-value"); + return evaluated != null ? evaluated : defaultValue.get(); + } + + if (BsonUtils.isJsonDocument(value)) { + return (T) codec.decode(value, bindingContext); + } + + if (!value.startsWith("#") && ExpressionUtils.detectExpression(value) == null) { + return (T) value; + } + + T evaluated = (T) bindingContext.evaluateExpression(value); + return evaluated != null ? evaluated : defaultValue.get(); + } + + ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source)); + + return new ParameterBindingContext(valueProvider, spELContext.getParser(), + () -> evaluationContextFor(property, source, spELContext)); + } + + ValueProvider valueProviderFor(Object source) { + + return index -> { + if (source instanceof Document document) { + return Streamable.of(document.values()).toList().get(index); + } + return source; + }; + } + + EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + Object target = source instanceof DocumentReferenceSource documentReferenceSource + ? documentReferenceSource.getTargetSource() + : source; + + if (target == null) { + target = new Document(); + } + + EvaluationContext ctx = spELContext.getEvaluationContext(target); + ctx.setVariable("target", target); + ctx.setVariable("self", DocumentReferenceSource.getSelf(source)); + ctx.setVariable(property.getName(), target); + + return ctx; + } + + /** + * Compute the query to retrieve linked documents. + * + * @param property must not be {@literal null}. + * @param source must not be {@literal null}. + * @param spELContext must not be {@literal null}. + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference() + : ReferenceEmulatingDocumentReference.INSTANCE; + + String lookup = documentReference.lookup(); + + Object value = DocumentReferenceSource.getTargetSource(source); + + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), + Document::new); + + if (property.isCollectionLike() && (value instanceof Collection || value == null)) { + + if (value == null) { + return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), + sort); + } + + Collection objects = (Collection) value; + + // optimization: bypass query if the collection pointing to the references is empty + if (objects.isEmpty()) { + return DocumentReferenceQuery.forNoResult(); + } + + List ors = new ArrayList<>(objects.size()); + for (Object entry : objects) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); + ors.add(decoded); + } + + return new ListDocumentReferenceQuery(new Document("$or", ors), sort); + } + + if (property.isMap() && value instanceof Map) { + + if (ObjectUtils.isEmpty(value)) { + return DocumentReferenceQuery.forNoResult(); + } + + Set> entries = ((Map) value).entrySet(); + Map filterMap = new LinkedHashMap<>(entries.size()); + + for (Entry entry : entries) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); + filterMap.put(entry.getKey(), decoded); + } + + return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap); + } + + return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort); + } + + enum ReferenceEmulatingDocumentReference implements DocumentReference { + + INSTANCE; + + @Override + public Class annotationType() { + return DocumentReference.class; + } + + @Override + public String db() { + return ""; + } + + @Override + public String collection() { + return ""; + } + + @Override + public String lookup() { + return "{ '_id' : ?#{#target} }"; + } + + @Override + public String sort() { + return ""; + } + + @Override + public boolean lazy() { + return false; + } + } + + /** + * {@link DocumentReferenceQuery} implementation fetching a single {@link Document}. + */ + static class SingleDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + + public SingleDocumentReferenceQuery(Document query, Document sort) { + + this.query = query; + this.sort = sort; + } + + @Override + public Bson getQuery() { + return query; + } + + @Override + public Document getSort() { + return sort; + } + + @Override + public Iterable apply(MongoCollection collection) { + + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); + } + } + + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Map} structure. Restores the original map order by matching individual query documents against the actual + * values. + */ + static class MapDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + private final Map filterOrderMap; + + public MapDocumentReferenceQuery(Document query, Document sort, Map filterOrderMap) { + + this.query = query; + this.sort = sort; + this.filterOrderMap = filterOrderMap; + } + + @Override + public Bson getQuery() { + return query; + } + + @Override + public Bson getSort() { + return sort; + } + + @Override + public Iterable restoreOrder(Iterable documents) { + + Map targetMap = new LinkedHashMap<>(); + List collected = documents instanceof List list ? list : Streamable.of(documents).toList(); + + for (Entry filterMapping : filterOrderMap.entrySet()) { + + Optional first = collected.stream() + .filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst(); + + targetMap.put(filterMapping.getKey().toString(), first.orElse(null)); + } + return Collections.singleton(new Document(targetMap)); + } + } + + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Collection} like structure. Restores the original order by matching individual query documents against the + * actual values. + */ + static class ListDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + + public ListDocumentReferenceQuery(Document query, Document sort) { + + this.query = query; + this.sort = sort; + } + + @Override + public Iterable restoreOrder(Iterable documents) { + + List target = documents instanceof List list ? list : Streamable.of(documents).toList(); + + if (!sort.isEmpty() || !query.containsKey("$or")) { + return target; + } + + List ors = query.get("$or", List.class); + return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList()); + } + + @Override + public Document getQuery() { + return query; + } + + @Override + public Document getSort() { + return sort; + } + + int compareAgainstReferenceIndex(List referenceList, Document document1, Document document2) { + + for (Document document : referenceList) { + + Set> entries = document.entrySet(); + if (document1.entrySet().containsAll(entries)) { + return -1; + } + if (document2.entrySet().containsAll(entries)) { + return 1; + } + } + return referenceList.size(); + } + } + + /** + * The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to + * obtain raw results. + */ + @FunctionalInterface + interface LookupFunction { + + /** + * @param referenceQuery never {@literal null}. + * @param referenceCollection never {@literal null}. + * @return never {@literal null}. + */ + Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java new file mode 100644 index 0000000000..715327d18e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -0,0 +1,119 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.DBRef; + +/** + * The {@link ReferenceResolver} allows to load and convert linked entities. + * + * @author Christoph Strobl + * @since 3.3 + */ +@FunctionalInterface +public interface ReferenceResolver { + + /** + * Resolve the association defined via the given property from a given source value. May return a + * {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. The resolved value is assignable to + * {@link PersistentProperty#getType()}. + * + * @param property the association defining property. + * @param source the association source value. + * @param referenceLookupDelegate the lookup executing component. + * @param entityReader conversion function capable of constructing entities from raw source. + * @return can be {@literal null}. + */ + @Nullable + Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader); + + /** + * {@link ReferenceCollection} is a value object that contains information about the target database and collection + * name of an association. + */ + class ReferenceCollection { + + @Nullable // + private final String database; + private final String collection; + + /** + * @param database can be {@literal null} to indicate the configured default + * {@link MongoDatabaseFactory#getMongoDatabase() database} should be used. + * @param collection the target collection name. Must not be {@literal null}. + */ + public ReferenceCollection(@Nullable String database, String collection) { + + Assert.hasText(collection, "Collection must not be empty or null"); + + this.database = database; + this.collection = collection; + } + + /** + * Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}. + * + * @param dbRef must not be {@literal null}. + * @return new instance of {@link ReferenceCollection}. + */ + public static ReferenceCollection fromDBRef(DBRef dbRef) { + return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName()); + } + + /** + * Get the target collection name. + * + * @return never {@literal null}. + */ + public String getCollection() { + return collection; + } + + /** + * Get the target database name. If {@literal null} the default database should be used. + * + * @return can be {@literal null}. + */ + @Nullable + public String getDatabase() { + return database; + } + } + + /** + * Domain type conversion callback interface that allows to read the {@code source} object into a mapped object. + */ + @FunctionalInterface + interface MongoEntityReader { + + /** + * Read values from the given source into an object defined via the given {@link TypeInformation}. + * + * @param source never {@literal null}. + * @param typeInformation information about the desired target type. + * @return never {@literal null}. + */ + Object read(Object source, TypeInformation typeInformation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolver.java deleted file mode 100644 index 8537e2933a..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolver.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.util.ReflectionUtils.*; - -import java.lang.reflect.Method; - -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.util.Assert; - -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.DBRef; - -/** - * {@link ReflectiveDBRefResolver} provides reflective access to {@link DBRef} API that is not consistently available - * for various driver versions. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.7 - */ -class ReflectiveDBRefResolver { - - private static final Method FETCH_METHOD; - - static { - FETCH_METHOD = findMethod(DBRef.class, "fetch"); - } - - /** - * Fetches the object referenced from the database either be directly calling {@link DBRef#fetch()} or - * {@link DBCollection#findOne(Object)}. - * - * @param db can be {@literal null} when using MongoDB Java driver in version 2.x. - * @param ref must not be {@literal null}. - * @return the document that this references. - */ - public static DBObject fetch(MongoDbFactory factory, DBRef ref) { - - Assert.notNull(ref, "DBRef to fetch must not be null!"); - - if (isMongo3Driver()) { - - Assert.notNull(factory, "DbFactory to fetch DB from must not be null!"); - return factory.getDb().getCollection(ref.getCollectionName()).findOne(ref.getId()); - } - - return (DBObject) invokeMethod(FETCH_METHOD, ref); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java index ea60fbb7df..805bafe974 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,38 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map.Entry; +import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConversionContext; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.WriteOperatorContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update.Modifier; import org.springframework.data.mongodb.core.query.Update.Modifiers; -import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * A subclass of {@link QueryMapper} that retains type information on the mongo types. - * + * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public class UpdateMapper extends QueryMapper { @@ -44,7 +54,7 @@ public class UpdateMapper extends QueryMapper { /** * Creates a new {@link UpdateMapper} using the given {@link MongoConverter}. - * + * * @param converter must not be {@literal null}. */ public UpdateMapper(MongoConverter converter) { @@ -53,28 +63,92 @@ public UpdateMapper(MongoConverter converter) { this.converter = converter; } + @Override + public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity entity) { + + Document document = super.getMappedObject(query, entity); + + boolean hasOperators = false; + boolean hasFields = false; + + Document set = null; + for (String s : document.keySet()) { + if (s.startsWith("$")) { + + if (s.equals("$set")) { + set = document.get(s, Document.class); + } + hasOperators = true; + } else { + hasFields = true; + } + } + + if (hasOperators && hasFields) { + + Document updateObject = new Document(); + Document fieldsToSet = set == null ? new Document() : set; + + for (String s : document.keySet()) { + if (s.startsWith("$")) { + updateObject.put(s, document.get(s)); + } else { + fieldsToSet.put(s, document.get(s)); + } + } + updateObject.put("$set", fieldsToSet); + + return updateObject; + } + return document; + } + + /** + * Returns {@literal true} if the given {@link Document} is an update object that uses update operators. + * + * @param updateObj can be {@literal null}. + * @return {@literal true} if the given {@link Document} is an update object. + */ + public static boolean isUpdateObject(@Nullable Document updateObj) { + + if (updateObj == null) { + return false; + } + + for (String s : updateObj.keySet()) { + if (s.startsWith("$")) { + return true; + } + } + + return false; + } + /** * Converts the given source object to a mongo type retaining the original type information of the source type on the * mongo type. - * + * * @see org.springframework.data.mongodb.core.convert.QueryMapper#delegateConvertToMongoType(java.lang.Object, * org.springframework.data.mongodb.core.mapping.MongoPersistentEntity) */ @Override - protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity entity) { + protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity entity) { + + if (entity != null && entity.isUnwrapped()) { + return converter.convertToMongoType(source, entity); + } + return converter.convertToMongoType(source, - entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity)); + entity == null ? TypeInformation.OBJECT : getTypeHintForEntity(source, entity)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#getMappedObjectForField(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object) - */ @Override protected Entry getMappedObjectForField(Field field, Object rawValue) { - if (isDBObject(rawValue)) { - return createMapEntry(field, convertSimpleOrDBObject(rawValue, field.getPropertyEntity())); + if (isDocument(rawValue)) { + + Object val = field.isMap() ? new LinkedHashMap<>((Document) rawValue) : rawValue; // unwrap to preserve field type + return createMapEntry(field, convertSimpleOrDocument(val, field.getPropertyEntity())); } if (isQuery(rawValue)) { @@ -89,55 +163,80 @@ protected Entry getMappedObjectForField(Field field, Object rawV return super.getMappedObjectForField(field, rawValue); } + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + return super.convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext.forOperator(new WriteOperatorContext(documentField.name))); + } + private Entry getMappedUpdateModifier(Field field, Object rawValue) { - Object value = null; + Object value; - if (rawValue instanceof Modifier) { + if (rawValue instanceof Modifier modifier) { - value = getMappedValue(field, (Modifier) rawValue); + value = getMappedValue(field, modifier); - } else if (rawValue instanceof Modifiers) { + } else if (rawValue instanceof Modifiers modifiers) { - DBObject modificationOperations = new BasicDBObject(); + Document modificationOperations = new Document(); - for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) { - modificationOperations.putAll(getMappedValue(field, modifier).toMap()); + for (Modifier modifier : modifiers.getModifiers()) { + modificationOperations.putAll(getMappedValue(field, modifier)); } value = modificationOperations; } else { - throw new IllegalArgumentException(String.format("Unable to map value of type '%s'!", rawValue.getClass())); + throw new IllegalArgumentException(String.format("Unable to map value of type '%s'", rawValue.getClass())); } return createMapEntry(field, value); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#isAssociationConversionNecessary(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object) - */ @Override - protected boolean isAssociationConversionNecessary(Field documentField, Object value) { + protected boolean isAssociationConversionNecessary(Field documentField, @Nullable Object value) { return super.isAssociationConversionNecessary(documentField, value) || documentField.containsAssociation(); } - private boolean isUpdateModifier(Object value) { + private boolean isUpdateModifier(@Nullable Object value) { return value instanceof Modifier || value instanceof Modifiers; } - private boolean isQuery(Object value) { + private boolean isQuery(@Nullable Object value) { return value instanceof Query; } - private DBObject getMappedValue(Field field, Modifier modifier) { + private Document getMappedValue(@Nullable Field field, Modifier modifier) { + return new Document(modifier.getKey(), getMappedModifier(field, modifier)); + } + + private Object getMappedModifier(@Nullable Field field, Modifier modifier) { + + Object value = modifier.getValue(); + + if (value instanceof Sort) { - TypeInformation typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint(); + Document sortObject = getSortObject((Sort) value); + return field == null || field.getPropertyEntity() == null ? sortObject + : getMappedSort(sortObject, field.getPropertyEntity()); + } + + if (isAssociationConversionNecessary(field, value)) { + if (ObjectUtils.isArray(value) || value instanceof Collection) { + List targetPointers = new ArrayList<>(); + for (Object val : converter.getConversionService().convert(value, List.class)) { + targetPointers.add(getMappedValue(field, val)); + } + return targetPointers; + } + return super.getMappedValue(field, value); + } - Object value = converter.convertToMongoType(modifier.getValue(), typeHint); - return new BasicDBObject(modifier.getKey(), value); + TypeInformation typeHint = field == null ? TypeInformation.OBJECT : field.getTypeHint(); + return converter.convertToMongoType(value, typeHint); } - private TypeInformation getTypeHintForEntity(Object source, MongoPersistentEntity entity) { + private TypeInformation getTypeHintForEntity(@Nullable Object source, MongoPersistentEntity entity) { TypeInformation info = entity.getTypeInformation(); Class type = info.getActualType().getType(); @@ -146,6 +245,10 @@ private TypeInformation getTypeHintForEntity(Object source, MongoPersistentEn return info; } + if (source instanceof Collection) { + return NESTED_DOCUMENT; + } + if (!type.equals(source.getClass())) { return info; } @@ -153,10 +256,6 @@ private TypeInformation getTypeHintForEntity(Object source, MongoPersistentEn return NESTED_DOCUMENT; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext) - */ @Override protected Field createPropertyField(MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { @@ -165,14 +264,26 @@ protected Field createPropertyField(MongoPersistentEntity entity, String key, : new MetadataBackedUpdateField(entity, key, mappingContext); } + private static Document getSortObject(Sort sort) { + + Document document = new Document(); + + for (Order order : sort) { + document.put(order.getProperty(), order.isAscending() ? 1 : -1); + } + + return document; + } + /** * {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key * containing a {@literal $} before handing it to the super class to make sure property lookups and transformations * continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s * when constructing the mapped key. - * + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl */ private static class MetadataBackedUpdateField extends MetadataBackedField { @@ -182,7 +293,7 @@ private static class MetadataBackedUpdateField extends MetadataBackedField { * Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and * {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to * work as expected. - * + * * @param entity must not be {@literal null}. * @param key must not be {@literal null} or empty. * @param mappingContext must not be {@literal null}. @@ -190,40 +301,28 @@ private static class MetadataBackedUpdateField extends MetadataBackedField { public MetadataBackedUpdateField(MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { - super(key.replaceAll("\\.\\$", ""), entity, mappingContext); + super(key, entity, mappingContext); this.key = key; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getMappedKey() - */ @Override public String getMappedKey() { return this.getPath() == null ? key : super.getMappedKey(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter() - */ @Override protected Converter getPropertyConverter() { - return new PositionParameterRetainingPropertyKeyConverter(key); + return new PositionParameterRetainingPropertyKeyConverter(key, getMappingContext()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getAssociationConverter() - */ @Override protected Converter getAssociationConverter() { - return new UpdateAssociationConverter(getAssociation(), key); + return new UpdateAssociationConverter(getMappingContext(), getAssociation(), key); } /** * {@link Converter} retaining positional parameter {@literal $} for {@link Association}s. - * + * * @author Christoph Strobl */ protected static class UpdateAssociationConverter extends AssociationConverter { @@ -232,19 +331,17 @@ protected static class UpdateAssociationConverter extends AssociationConverter { /** * Creates a new {@link AssociationConverter} for the given {@link Association}. - * + * * @param association must not be {@literal null}. */ - public UpdateAssociationConverter(Association association, String key) { + public UpdateAssociationConverter( + MappingContext, MongoPersistentProperty> mappingContext, + Association association, String key) { - super(association); - this.mapper = new KeyMapper(key); + super(key, association); + this.mapper = new KeyMapper(key, mappingContext); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { return super.convert(source) == null ? null : mapper.mapPropertyName(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java index 04b7f667ed..0a96cc867a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,32 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.bson.Document; +import org.bson.conversions.Bson; -import com.mongodb.DBObject; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; /** * Internal API to trigger the resolution of properties. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ interface ValueResolver { /** - * Resolves the value for the given {@link MongoPersistentProperty} within the given {@link DBObject} using the given - * {@link SpELExpressionEvaluator} and {@link ObjectPath}. - * + * Resolves the value for the given {@link MongoPersistentProperty} within the given {@link Document} using the given + * {@link ValueExpressionEvaluator} and {@link ObjectPath}. + * * @param prop - * @param dbo + * @param bson * @param evaluator - * @param parent + * @param path * @return */ - Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator, - ObjectPath parent); + @Nullable + Object getValueInternal(MongoPersistentProperty prop, Bson bson, ValueExpressionEvaluator evaluator, ObjectPath path); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java new file mode 100644 index 0000000000..4097be7704 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoValueConverter; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; + +/** + * A specialized {@link MongoValueConverter} for {@literal encrypting} and {@literal decrypting} properties. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface EncryptingConverter extends MongoValueConverter { + + @Override + default S read(Object value, MongoConversionContext context) { + return decrypt(value, buildEncryptionContext(context)); + } + + /** + * Decrypt the given encrypted source value within the given {@link EncryptionContext context}. + * + * @param encryptedValue the encrypted source. + * @param context the context to operate in. + * @return never {@literal null}. + */ + S decrypt(Object encryptedValue, EncryptionContext context); + + @Override + default T write(Object value, MongoConversionContext context) { + return encrypt(value, buildEncryptionContext(context)); + } + + /** + * Encrypt the given raw source value within the given {@link EncryptionContext context}. + * + * @param value the encrypted source. + * @param context the context to operate in. + * @return never {@literal null}. + */ + T encrypt(Object value, EncryptionContext context); + + /** + * Obtain the {@link EncryptionContext} for a given {@link MongoConversionContext value conversion context}. + * + * @param context the current MongoDB specific {@link org.springframework.data.convert.ValueConversionContext}. + * @return the {@link EncryptionContext} to operate in. + * @see org.springframework.data.convert.ValueConversionContext + */ + EncryptionContext buildEncryptionContext(MongoConversionContext context); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java new file mode 100644 index 0000000000..67c30fcf94 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java @@ -0,0 +1,77 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; + +/** + * Default {@link EncryptionContext} implementation. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +class ExplicitEncryptionContext implements EncryptionContext { + + private final MongoConversionContext conversionContext; + + public ExplicitEncryptionContext(MongoConversionContext conversionContext) { + this.conversionContext = conversionContext; + } + + @Override + public MongoPersistentProperty getProperty() { + return conversionContext.getProperty(); + } + + @Nullable + @Override + public Object lookupValue(String path) { + return conversionContext.getValue(path); + } + + @Override + public Object convertToMongoType(Object value) { + return conversionContext.write(value); + } + + @Override + public EvaluationContext getEvaluationContext(Object source) { + return conversionContext.getSpELContext().getEvaluationContext(source); + } + + @Override + public T read(@Nullable Object value, TypeInformation target) { + return conversionContext.read(value, target); + } + + @Override + public T write(@Nullable Object value, TypeInformation target) { + return conversionContext.write(value, target); + } + + @Override + @Nullable + public OperatorContext getOperatorContext() { + return conversionContext.getOperatorContext(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java new file mode 100644 index 0000000000..8d29847aae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java @@ -0,0 +1,317 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.springframework.data.mongodb.core.encryption.EncryptionOptions.*; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.types.Binary; + +import org.springframework.core.CollectionFactory; +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.encryption.Encryption; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; +import org.springframework.data.mongodb.core.encryption.EncryptionKey; +import org.springframework.data.mongodb.core.encryption.EncryptionKeyResolver; +import org.springframework.data.mongodb.core.encryption.EncryptionOptions; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Default implementation of {@link EncryptingConverter}. Properties used with this converter must be annotated with + * {@link Encrypted @Encrypted} to provide key and algorithm metadata. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class MongoEncryptionConverter implements EncryptingConverter { + + private static final Log LOGGER = LogFactory.getLog(MongoEncryptionConverter.class); + private static final List RANGE_OPERATORS = asList("$gt", "$gte", "$lt", "$lte"); + public static final String AND_OPERATOR = "$and"; + + private final Encryption encryption; + private final EncryptionKeyResolver keyResolver; + + public MongoEncryptionConverter(Encryption encryption, EncryptionKeyResolver keyResolver) { + + this.encryption = encryption; + this.keyResolver = keyResolver; + } + + @Nullable + @Override + public Object read(Object value, MongoConversionContext context) { + + Object decrypted = EncryptingConverter.super.read(value, context); + return decrypted instanceof BsonValue bsonValue ? BsonUtils.toJavaType(bsonValue) : decrypted; + } + + @Override + public Object decrypt(Object encryptedValue, EncryptionContext context) { + + Object decryptedValue = encryptedValue; + if (encryptedValue instanceof Binary || encryptedValue instanceof BsonBinary) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Decrypting %s.%s.", getProperty(context).getOwner().getName(), + getProperty(context).getName())); + } + + decryptedValue = encryption.decrypt((BsonBinary) BsonUtils.simpleToBsonValue(encryptedValue)); + + // in case the driver has auto decryption (aka .bypassAutoEncryption(true)) active + // https://github.com/mongodb/mongo-java-driver/blob/master/driver-sync/src/examples/tour/ClientSideEncryptionExplicitEncryptionOnlyTour.java + if (encryptedValue == decryptedValue) { + return decryptedValue; + } + } + + MongoPersistentProperty persistentProperty = getProperty(context); + if (getProperty(context).isCollectionLike() && decryptedValue instanceof Iterable iterable) { + + int size = iterable instanceof Collection c ? c.size() : 10; + + if (!persistentProperty.isEntity()) { + Collection collection = CollectionFactory.createCollection(persistentProperty.getType(), size); + iterable.forEach(it -> { + if (it instanceof BsonValue bsonValue) { + collection.add(BsonUtils.toJavaType(bsonValue)); + } else { + collection.add(context.read(it, persistentProperty.getActualType())); + } + }); + + return collection; + } else { + Collection collection = CollectionFactory.createCollection(persistentProperty.getType(), size); + iterable.forEach(it -> { + if (it instanceof BsonValue bsonValue) { + collection.add(context.read(BsonUtils.toJavaType(bsonValue), persistentProperty.getActualType())); + } else { + collection.add(context.read(it, persistentProperty.getActualType())); + } + }); + return collection; + } + } + + if (!persistentProperty.isEntity() && persistentProperty.isMap()) { + if (persistentProperty.getType() != Document.class) { + if (decryptedValue instanceof BsonValue bsonValue) { + return new LinkedHashMap<>((Document) BsonUtils.toJavaType(bsonValue)); + } + if (decryptedValue instanceof Document document) { + return new LinkedHashMap<>(document); + } + if (decryptedValue instanceof Map map) { + return map; + } + } + } + + if (persistentProperty.isEntity() && decryptedValue instanceof BsonDocument bsonDocument) { + return context.read(BsonUtils.toJavaType(bsonDocument), persistentProperty.getTypeInformation().getType()); + } + + if (persistentProperty.isEntity() && decryptedValue instanceof Document document) { + return context.read(document, persistentProperty.getTypeInformation().getType()); + } + + return decryptedValue; + } + + @Override + public Object encrypt(Object value, EncryptionContext context) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Encrypting %s.%s.", getProperty(context).getOwner().getName(), + getProperty(context).getName())); + } + + MongoPersistentProperty persistentProperty = getProperty(context); + + Encrypted annotation = persistentProperty.findAnnotation(Encrypted.class); + if (annotation == null) { + annotation = persistentProperty.getOwner().findAnnotation(Encrypted.class); + } + + if (annotation == null) { + throw new IllegalStateException(String.format("Property %s.%s is not annotated with @Encrypted", + persistentProperty.getOwner().getName(), persistentProperty.getName())); + } + + String algorithm = annotation.algorithm(); + EncryptionKey key = keyResolver.getKey(context); + OperatorContext operatorContext = context.getOperatorContext(); + + EncryptionOptions encryptionOptions = new EncryptionOptions(algorithm, key, + getEQOptions(persistentProperty, operatorContext)); + + if (operatorContext != null && !operatorContext.isWriteOperation() && encryptionOptions.queryableEncryptionOptions() != null + && !encryptionOptions.queryableEncryptionOptions().getQueryType().equals("equality")) { + return encryptExpression(operatorContext, value, encryptionOptions); + } else { + return encryptValue(value, context, persistentProperty, encryptionOptions); + } + } + + private static @Nullable QueryableEncryptionOptions getEQOptions(MongoPersistentProperty persistentProperty, + @Nullable OperatorContext operatorContext) { + + Queryable queryableAnnotation = persistentProperty.findAnnotation(Queryable.class); + if (queryableAnnotation == null || !StringUtils.hasText(queryableAnnotation.queryType())) { + return null; + } + + QueryableEncryptionOptions queryableEncryptionOptions = QueryableEncryptionOptions.none(); + + String queryAttributes = queryableAnnotation.queryAttributes(); + if (!queryAttributes.isEmpty()) { + queryableEncryptionOptions = queryableEncryptionOptions.attributes(Document.parse(queryAttributes)); + } + + if (queryableAnnotation.contentionFactor() >= 0) { + queryableEncryptionOptions = queryableEncryptionOptions.contentionFactor(queryableAnnotation.contentionFactor()); + } + + boolean isPartOfARangeQuery = operatorContext != null && !operatorContext.isWriteOperation(); + if (isPartOfARangeQuery) { + queryableEncryptionOptions = queryableEncryptionOptions.queryType(queryableAnnotation.queryType()); + } + return queryableEncryptionOptions; + } + + private BsonBinary encryptValue(Object value, EncryptionContext context, MongoPersistentProperty persistentProperty, + EncryptionOptions encryptionOptions) { + + if (!persistentProperty.isEntity()) { + + if (persistentProperty.isCollectionLike()) { + return encryption.encrypt(collectionLikeToBsonValue(value, persistentProperty, context), encryptionOptions); + } + if (persistentProperty.isMap()) { + Object convertedMap = context.write(value); + if (convertedMap instanceof Document document) { + return encryption.encrypt(document.toBsonDocument(), encryptionOptions); + } + } + return encryption.encrypt(BsonUtils.simpleToBsonValue(value), encryptionOptions); + } + + if (persistentProperty.isCollectionLike()) { + return encryption.encrypt(collectionLikeToBsonValue(value, persistentProperty, context), encryptionOptions); + } + + Object write = context.write(value); + if (write instanceof Document doc) { + return encryption.encrypt(doc.toBsonDocument(), encryptionOptions); + } + return encryption.encrypt(BsonUtils.simpleToBsonValue(write), encryptionOptions); + } + + /** + * Encrypts a range query expression. + *

                    + * The mongodb-crypt {@code encryptExpression} has strict formatting requirements so this method ensures these + * requirements are met and then picks out and returns just the value for use with a range query. + * + * @param operatorContext field name and query operator. + * @param value the value of the expression to be encrypted. + * @param encryptionOptions the options. + * @return the encrypted range value for use in a range query. + */ + private BsonValue encryptExpression(OperatorContext operatorContext, Object value, + EncryptionOptions encryptionOptions) { + + BsonValue doc = BsonUtils.simpleToBsonValue(value); + + String fieldName = operatorContext.path(); + String queryOperator = operatorContext.operator(); + + if (!RANGE_OPERATORS.contains(queryOperator)) { + throw new AssertionError(String.format("Not a valid range query. Querying a range encrypted field but the " + + "query operator '%s' for field path '%s' is not a range query.", queryOperator, fieldName)); + } + + BsonDocument encryptExpression = new BsonDocument(AND_OPERATOR, + new BsonArray(singletonList(new BsonDocument(fieldName, new BsonDocument(queryOperator, doc))))); + + BsonDocument result = encryption.encryptExpression(encryptExpression, encryptionOptions); + return result.getArray(AND_OPERATOR).get(0).asDocument().getDocument(fieldName).getBinary(queryOperator); + } + + private BsonValue collectionLikeToBsonValue(Object value, MongoPersistentProperty property, + EncryptionContext context) { + + BsonArray bsonArray = new BsonArray(); + boolean isEntity = property.isEntity(); + + if (value instanceof Collection values) { + values.forEach(it -> { + + if (isEntity) { + Document document = (Document) context.write(it, property.getTypeInformation()); + bsonArray.add(document == null ? null : document.toBsonDocument()); + } else { + bsonArray.add(BsonUtils.simpleToBsonValue(it)); + } + }); + } else if (ObjectUtils.isArray(value)) { + + for (Object o : ObjectUtils.toObjectArray(value)) { + + if (isEntity) { + Document document = (Document) context.write(o, property.getTypeInformation()); + bsonArray.add(document == null ? null : document.toBsonDocument()); + } else { + bsonArray.add(BsonUtils.simpleToBsonValue(o)); + } + } + } + + return bsonArray; + } + + @Override + public EncryptionContext buildEncryptionContext(MongoConversionContext context) { + return new ExplicitEncryptionContext(context); + } + + protected MongoPersistentProperty getProperty(EncryptionContext context) { + return context.getProperty(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java new file mode 100644 index 0000000000..4a6f78357a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java @@ -0,0 +1,7 @@ +/** + * Converters integrating with + * explicit encryption + * mechanism of Client-Side Field Level Encryption. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.convert.encryption; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/package-info.java index 89a163a21a..cfa07fa8f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/package-info.java @@ -1,5 +1,6 @@ /** * Spring Data MongoDB specific converter infrastructure. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.convert; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java new file mode 100644 index 0000000000..a80a72ed1f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonDocument; + +/** + * Component responsible for encrypting and decrypting values. + * + * @param

                    plaintext type. + * @param ciphertext type. + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public interface Encryption { + + /** + * Encrypt the given value. + * + * @param value must not be {@literal null}. + * @param options must not be {@literal null}. + * @return the encrypted value. + */ + C encrypt(P value, EncryptionOptions options); + + /** + * Decrypt the given value. + * + * @param value must not be {@literal null}. + * @return the decrypted value. + */ + P decrypt(C value); + + /** + * Encrypt the given expression. + * + * @param value must not be {@literal null}. + * @param options must not be {@literal null}. + * @return the encrypted expression. + * @since 4.5.0 + */ + default BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + throw new UnsupportedOperationException("Unsupported encryption method"); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java new file mode 100644 index 0000000000..5f5e29578d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java @@ -0,0 +1,142 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; + +/** + * Context to encapsulate encryption for a specific {@link MongoPersistentProperty}. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public interface EncryptionContext { + + /** + * Returns the {@link MongoPersistentProperty} to be handled. + * + * @return will never be {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Shortcut for converting a given {@literal value} into its store representation using the root + * {@code ValueConversionContext}. + * + * @param value + * @return + */ + Object convertToMongoType(Object value); + + /** + * Reads the value as an instance of the {@link PersistentProperty#getTypeInformation() property type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + default T read(@Nullable Object value) { + return (T) read(value, getProperty().getTypeInformation()); + } + + /** + * Reads the value as an instance of {@link Class type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @param target {@link Class type} of value to be read; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + default T read(@Nullable Object value, Class target) { + return read(value, TypeInformation.of(target)); + } + + /** + * Reads the value as an instance of {@link TypeInformation type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @param target {@link TypeInformation type} of value to be read; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + T read(@Nullable Object value, TypeInformation target); + + /** + * Write the value as an instance of the {@link PersistentProperty#getTypeInformation() property type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of the + * {@link PersistentProperty#getTypeInformation() property type}. + * @see PersistentProperty#getTypeInformation() + * @see #write(Object, TypeInformation) + */ + @Nullable + default T write(@Nullable Object value) { + return (T) write(value, getProperty().getTypeInformation()); + } + + /** + * Write the value as an instance of {@link Class type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @param target {@link Class type} of value to be written; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of {@link Class type}. + */ + @Nullable + default T write(@Nullable Object value, Class target) { + return write(value, TypeInformation.of(target)); + } + + /** + * Write the value as an instance of given {@link TypeInformation type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @param target {@link TypeInformation type} of value to be written; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of {@link Class type}. + */ + @Nullable + T write(@Nullable Object value, TypeInformation target); + + /** + * Lookup the value for a given path within the current context. + * + * @param path the path/property name to resolve the current value for. + * @return can be {@literal null}. + */ + @Nullable + Object lookupValue(String path); + + EvaluationContext getEvaluationContext(Object source); + + /** + * The field name and field query operator + * + * @return can be {@literal null}. + */ + @Nullable + default OperatorContext getOperatorContext() { + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java new file mode 100644 index 0000000000..d908a5ae26 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java @@ -0,0 +1,81 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.springframework.util.Assert; + +/** + * The {@link EncryptionKey} represents a {@literal Data Encryption Key} reference that can be either direct via the + * {@link KeyId key id} or its {@link KeyAltName Key Alternative Name}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface EncryptionKey { + + /** + * Create a new {@link EncryptionKey} that uses the keys id for reference. + * + * @param key must not be {@literal null}. + * @return new instance of {@link EncryptionKey KeyId}. + */ + static EncryptionKey keyId(BsonBinary key) { + + Assert.notNull(key, "KeyId must not be null"); + + return new KeyId(key); + } + + /** + * Create a new {@link EncryptionKey} that uses an {@literal Key Alternative Name} for reference. + * + * @param keyAltName must not be {@literal null} or empty. + * @return new instance of {@link EncryptionKey KeyAltName}. + */ + static EncryptionKey keyAltName(String keyAltName) { + + Assert.hasText(keyAltName, "Key Alternative Name must not be empty"); + + return new KeyAltName(keyAltName); + } + + /** + * @return the value that allows to reference a specific key. + */ + Object value(); + + /** + * @return the {@link Type} of reference. + */ + Type type(); + + /** + * The key reference type. + */ + enum Type { + + /** + * Key referenced via its {@literal id}. + */ + ID, + + /** + * Key referenced via an {@literal Key Alternative Name}. + */ + ALT + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java new file mode 100644 index 0000000000..a7ae7e3f3e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.bson.types.Binary; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Interface to obtain a {@link EncryptionKey Data Encryption Key} that is valid in a given {@link EncryptionContext + * context}. + *

                    + * Use the {@link #annotated(EncryptionKeyResolver) based} variant which will first try to resolve a potential + * {@link ExplicitEncrypted#keyAltName() Key Alternate Name} from annotations before calling the fallback resolver. + * + * @author Christoph Strobl + * @since 4.1 + * @see EncryptionKey + */ +@FunctionalInterface +public interface EncryptionKeyResolver { + + /** + * Get the {@link EncryptionKey Data Encryption Key}. + * + * @param encryptionContext the current {@link EncryptionContext context}. + * @return never {@literal null}. + */ + EncryptionKey getKey(EncryptionContext encryptionContext); + + /** + * Obtain an {@link EncryptionKeyResolver} that evaluates {@link ExplicitEncrypted#keyAltName()} and only calls the + * fallback {@link EncryptionKeyResolver resolver} if no {@literal Key Alternate Name} is present. + * + * @param fallback must not be {@literal null}. + * @return new instance of {@link EncryptionKeyResolver}. + */ + static EncryptionKeyResolver annotated(EncryptionKeyResolver fallback) { + + Assert.notNull(fallback, "Fallback EncryptionKeyResolver must not be nul"); + + return ((encryptionContext) -> { + + MongoPersistentProperty property = encryptionContext.getProperty(); + ExplicitEncrypted annotation = property.findAnnotation(ExplicitEncrypted.class); + if (annotation == null || !StringUtils.hasText(annotation.keyAltName())) { + + Encrypted encrypted = property.getOwner().findAnnotation(Encrypted.class); + if (encrypted == null) { + return fallback.getKey(encryptionContext); + } + + Object o = EncryptionUtils.resolveKeyId(encrypted.keyId()[0], + () -> encryptionContext.getEvaluationContext(new Object())); + if (o instanceof BsonBinary binary) { + return EncryptionKey.keyId(binary); + } + if (o instanceof Binary binary) { + return EncryptionKey.keyId((BsonBinary) BsonUtils.simpleToBsonValue(binary)); + } + if (o instanceof String string) { + return EncryptionKey.keyAltName(string); + } + + throw new IllegalStateException(String.format("Cannot determine encryption key for %s.%s using key type %s", + property.getOwner().getName(), property.getName(), o == null ? "null" : o.getClass().getName())); + } + + String keyAltName = annotation.keyAltName(); + if (keyAltName.startsWith("/")) { + Object fieldValue = encryptionContext.lookupValue(keyAltName.replace("/", "")); + if (fieldValue == null) { + throw new IllegalStateException(String.format("Key Alternative Name for %s was null", keyAltName)); + } + return new KeyAltName(fieldValue.toString()); + } else { + return new KeyAltName(keyAltName); + } + }); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java new file mode 100644 index 0000000000..73a66e4a8a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java @@ -0,0 +1,235 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import java.util.Map; +import java.util.Objects; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Options used to provide additional information when {@link Encryption encrypting} values. like the + * {@link #algorithm()} to be used. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class EncryptionOptions { + + private final String algorithm; + private final EncryptionKey key; + private final @Nullable QueryableEncryptionOptions queryableEncryptionOptions; + + public EncryptionOptions(String algorithm, EncryptionKey key) { + this(algorithm, key, null); + } + + public EncryptionOptions(String algorithm, EncryptionKey key, + @Nullable QueryableEncryptionOptions queryableEncryptionOptions) { + + Assert.hasText(algorithm, "Algorithm must not be empty"); + Assert.notNull(key, "EncryptionKey must not be empty"); + Assert.notNull(key, "QueryableEncryptionOptions must not be empty"); + + this.key = key; + this.algorithm = algorithm; + this.queryableEncryptionOptions = queryableEncryptionOptions; + } + + public EncryptionKey key() { + return key; + } + + public String algorithm() { + return algorithm; + } + + /** + * @return {@literal null} if not set. + * @since 4.5 + */ + public @Nullable QueryableEncryptionOptions queryableEncryptionOptions() { + return queryableEncryptionOptions; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + EncryptionOptions that = (EncryptionOptions) o; + + if (!ObjectUtils.nullSafeEquals(algorithm, that.algorithm)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(key, that.key)) { + return false; + } + + return ObjectUtils.nullSafeEquals(queryableEncryptionOptions, that.queryableEncryptionOptions); + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(algorithm); + result = 31 * result + ObjectUtils.nullSafeHashCode(key); + result = 31 * result + ObjectUtils.nullSafeHashCode(queryableEncryptionOptions); + return result; + } + + @Override + public String toString() { + return "EncryptionOptions{" + "algorithm='" + algorithm + '\'' + ", key=" + key + ", queryableEncryptionOptions='" + + queryableEncryptionOptions + "'}"; + } + + /** + * Options, like the {@link #getQueryType()}, to apply when encrypting queryable values. + * + * @author Ross Lawley + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableEncryptionOptions { + + private static final QueryableEncryptionOptions NONE = new QueryableEncryptionOptions(null, null, Map.of()); + + private final @Nullable String queryType; + private final @Nullable Long contentionFactor; + private final Map attributes; + + private QueryableEncryptionOptions(@Nullable String queryType, @Nullable Long contentionFactor, + Map attributes) { + + this.queryType = queryType; + this.contentionFactor = contentionFactor; + this.attributes = attributes; + } + + /** + * Create an empty {@link QueryableEncryptionOptions}. + * + * @return unmodifiable {@link QueryableEncryptionOptions} instance. + */ + public static QueryableEncryptionOptions none() { + return NONE; + } + + /** + * Define the {@code queryType} to be used for queryable document encryption. + * + * @param queryType can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions queryType(@Nullable String queryType) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code contentionFactor} to be used for queryable document encryption. + * + * @param contentionFactor can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions contentionFactor(@Nullable Long contentionFactor) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code rangeOptions} to be used for queryable document encryption. + * + * @param attributes can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions attributes(Map attributes) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Get the {@code queryType} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable String getQueryType() { + return queryType; + } + + /** + * Get the {@code contentionFactor} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable Long getContentionFactor() { + return contentionFactor; + } + + /** + * Get the {@code rangeOptions} to apply. + * + * @return never {@literal null}. + */ + public Map getAttributes() { + return Map.copyOf(attributes); + } + + /** + * @return {@literal true} if no arguments set. + */ + boolean isEmpty() { + return getQueryType() == null && getContentionFactor() == null && getAttributes().isEmpty(); + } + + @Override + public String toString() { + return "QueryableEncryptionOptions{" + "queryType='" + queryType + '\'' + ", contentionFactor=" + contentionFactor + + ", attributes=" + attributes + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + QueryableEncryptionOptions that = (QueryableEncryptionOptions) o; + + if (!ObjectUtils.nullSafeEquals(queryType, that.queryType)) { + return false; + } + + if (!ObjectUtils.nullSafeEquals(contentionFactor, that.contentionFactor)) { + return false; + } + return ObjectUtils.nullSafeEquals(attributes, that.attributes); + } + + @Override + public int hashCode() { + return Objects.hash(queryType, contentionFactor, attributes); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java new file mode 100644 index 0000000000..3f6afc4a8c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.springframework.util.ObjectUtils; + +record KeyAltName(String value) implements EncryptionKey { + + @Override + public Type type() { + return Type.ALT; + } + + @Override + public String toString() { + + if (value().length() <= 3) { + return "KeyAltName('***')"; + } + return String.format("KeyAltName('%s***')", value.substring(0, 3)); + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + KeyAltName that = (KeyAltName) o; + return ObjectUtils.nullSafeEquals(value, that.value); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java new file mode 100644 index 0000000000..b09a67bb0a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.springframework.util.ObjectUtils; + +record KeyId(BsonBinary value) implements EncryptionKey { + + @Override + public Type type() { + return Type.ID; + } + + @Override + public String toString() { + + if (BsonBinarySubType.isUuid(value.getType())) { + String representation = value.asUuid().toString(); + if (representation.length() > 6) { + return String.format("KeyId('%s***')", representation.substring(0, 6)); + } + } + return "KeyId('***')"; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + org.springframework.data.mongodb.core.encryption.KeyId that = (org.springframework.data.mongodb.core.encryption.KeyId) o; + return ObjectUtils.nullSafeEquals(value, that.value); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java new file mode 100644 index 0000000000..f83f98d4ac --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java @@ -0,0 +1,146 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.springframework.data.mongodb.util.MongoCompatibilityAdapter.rangeOptionsAdapter; + +import java.util.Map; +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonValue; +import org.springframework.data.mongodb.core.encryption.EncryptionKey.Type; +import org.springframework.data.mongodb.core.encryption.EncryptionOptions.QueryableEncryptionOptions; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.Assert; + +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.client.vault.ClientEncryption; + +/** + * {@link ClientEncryption} based {@link Encryption} implementation. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class MongoClientEncryption implements Encryption { + + private final Supplier source; + + MongoClientEncryption(Supplier source) { + this.source = source; + } + + /** + * Create a new {@link MongoClientEncryption} instance for the given {@link ClientEncryption}. + * + * @param clientEncryption must not be {@literal null}. + * @return new instance of {@link MongoClientEncryption}. + */ + public static MongoClientEncryption just(ClientEncryption clientEncryption) { + + Assert.notNull(clientEncryption, "ClientEncryption must not be null"); + + return new MongoClientEncryption(() -> clientEncryption); + } + + @Override + public BsonValue decrypt(BsonBinary value) { + return getClientEncryption().decrypt(value); + } + + @Override + public BsonBinary encrypt(BsonValue value, EncryptionOptions options) { + return getClientEncryption().encrypt(value, createEncryptOptions(options)); + } + + @Override + public BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + return getClientEncryption().encryptExpression(value, createEncryptOptions(options)); + } + + public ClientEncryption getClientEncryption() { + return source.get(); + } + + private EncryptOptions createEncryptOptions(EncryptionOptions options) { + + EncryptOptions encryptOptions = new EncryptOptions(options.algorithm()); + + if (Type.ALT.equals(options.key().type())) { + encryptOptions = encryptOptions.keyAltName(options.key().value().toString()); + } else { + encryptOptions = encryptOptions.keyId((BsonBinary) options.key().value()); + } + + if (options.queryableEncryptionOptions() == null) { + return encryptOptions; + } + + QueryableEncryptionOptions qeOptions = options.queryableEncryptionOptions(); + if (qeOptions.getQueryType() != null) { + encryptOptions.queryType(qeOptions.getQueryType()); + } + if (qeOptions.getContentionFactor() != null) { + encryptOptions.contentionFactor(qeOptions.getContentionFactor()); + } + if (!qeOptions.getAttributes().isEmpty()) { + encryptOptions.rangeOptions(rangeOptions(qeOptions.getAttributes())); + } + return encryptOptions; + } + + protected RangeOptions rangeOptions(Map attributes) { + + RangeOptions encryptionRangeOptions = new RangeOptions(); + if (attributes.isEmpty()) { + return encryptionRangeOptions; + } + + if (attributes.containsKey("min")) { + encryptionRangeOptions.min(BsonUtils.simpleToBsonValue(attributes.get("min"))); + } + if (attributes.containsKey("max")) { + encryptionRangeOptions.max(BsonUtils.simpleToBsonValue(attributes.get("max"))); + } + if (attributes.containsKey("trimFactor")) { + Object trimFactor = attributes.get("trimFactor"); + Assert.isInstanceOf(Integer.class, trimFactor, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, trimFactor.getClass())); + + rangeOptionsAdapter(encryptionRangeOptions).trimFactor((Integer) trimFactor); + } + + if (attributes.containsKey("sparsity")) { + Object sparsity = attributes.get("sparsity"); + Assert.isInstanceOf(Number.class, sparsity, + () -> String.format("Expected to find a %s but it turned out to be %s.", Long.class, sparsity.getClass())); + encryptionRangeOptions.sparsity(((Number) sparsity).longValue()); + } + + if (attributes.containsKey("precision")) { + Object precision = attributes.get("precision"); + Assert.isInstanceOf(Number.class, precision, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, precision.getClass())); + encryptionRangeOptions.precision(((Number) precision).intValue()); + } + return encryptionRangeOptions; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java new file mode 100644 index 0000000000..f3906d89dd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java @@ -0,0 +1,6 @@ +/** + * Infrastructure for explicit + * encryption mechanism of Client-Side Field Level Encryption. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.encryption; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java index 91f48672a5..88d3d46c48 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core.geo; /** - * Interface definition for structures defined in GeoJSON ({@link http://geojson.org/}) format. - * + * Interface definition for structures defined in GeoJSON format. + * * @author Christoph Strobl * @since 1.7 */ @@ -25,18 +25,18 @@ public interface GeoJson> { /** * String value representing the type of the {@link GeoJson} object. - * + * * @return will never be {@literal null}. - * @see http://geojson.org/geojson-spec.html#geojson-objects + * @see https://geojson.org/geojson-spec.html#geojson-objects */ String getType(); /** * The value of the coordinates member is always an {@link Iterable}. The structure for the elements within is * determined by {@link #getType()} of geometry. - * + * * @return will never be {@literal null}. - * @see http://geojson.org/geojson-spec.html#geometry-objects + * @see https://geojson.org/geojson-spec.html#geometry-objects */ T getCoordinates(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java index 96cc28cae3..2372700aec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,15 +19,16 @@ import java.util.Collections; import java.util.List; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * Defines a {@link GeoJsonGeometryCollection} that consists of a {@link List} of {@link GeoJson} objects. - * + * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#geometry-collection + * @see https://geojson.org/geojson-spec.html#geometry-collection */ public class GeoJsonGeometryCollection implements GeoJson>> { @@ -37,60 +38,42 @@ public class GeoJsonGeometryCollection implements GeoJson>> /** * Creates a new {@link GeoJsonGeometryCollection} for the given {@link GeoJson} instances. - * - * @param geometries + * + * @param geometries must not be {@literal null}. */ public GeoJsonGeometryCollection(List> geometries) { - Assert.notNull(geometries, "Geometries must not be null!"); + Assert.notNull(geometries, "Geometries must not be null"); this.geometries.addAll(geometries); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public Iterable> getCoordinates() { return Collections.unmodifiableList(this.geometries); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.geometries); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonGeometryCollection)) { + if (!(obj instanceof GeoJsonGeometryCollection other)) { return false; } - GeoJsonGeometryCollection other = (GeoJsonGeometryCollection) obj; - return ObjectUtils.nullSafeEquals(this.geometries, other.geometries); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java index 921a8dbf87..942138fc76 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +21,10 @@ /** * {@link GeoJsonLineString} is defined as list of at least 2 {@link Point}s. - * + * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#linestring + * @see https://geojson.org/geojson-spec.html#linestring */ public class GeoJsonLineString extends GeoJsonMultiPoint { @@ -32,7 +32,7 @@ public class GeoJsonLineString extends GeoJsonMultiPoint { /** * Creates a new {@link GeoJsonLineString} for the given {@link Point}s. - * + * * @param points must not be {@literal null} and have at least 2 entries. */ public GeoJsonLineString(List points) { @@ -41,7 +41,7 @@ public GeoJsonLineString(List points) { /** * Creates a new {@link GeoJsonLineString} for the given {@link Point}s. - * + * * @param first must not be {@literal null} * @param second must not be {@literal null} * @param others can be {@literal null} @@ -50,10 +50,6 @@ public GeoJsonLineString(Point first, Point second, Point... others) { super(first, second, others); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint#getType() - */ @Override public String getType() { return TYPE; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java index 6b8be6f0c6..bc74a56df3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,9 +21,10 @@ import java.util.List; import org.springframework.data.geo.Point; +import org.springframework.lang.Nullable; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -33,10 +34,14 @@ import com.fasterxml.jackson.databind.node.ArrayNode; /** - * A Jackson {@link Module} to register custom {@link JsonSerializer} and {@link JsonDeserializer}s for GeoJSON types. - * + * A Jackson {@link Module} to register custom {@link JsonDeserializer}s for GeoJSON types. + *
                    + * Use {@link #geoJsonModule()} to obtain a {@link Module} containing both {@link JsonSerializer serializers} and + * {@link JsonDeserializer deserializers}. + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch * @since 1.7 */ public class GeoJsonModule extends SimpleModule { @@ -45,12 +50,87 @@ public class GeoJsonModule extends SimpleModule { public GeoJsonModule() { - addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer()); - addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer()); - addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer()); - addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer()); - addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer()); - addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer()); + registerDeserializersIn(this); + // TODO: add serializers as of next major version (4.0). + } + + /** + * Obtain a {@link Module} containing {@link JsonDeserializer deserializers} for the following {@link GeoJson} types: + *

                      + *
                    • {@link GeoJsonPoint}
                    • + *
                    • {@link GeoJsonMultiPoint}
                    • + *
                    • {@link GeoJsonLineString}
                    • + *
                    • {@link GeoJsonMultiLineString}
                    • + *
                    • {@link GeoJsonPolygon}
                    • + *
                    • {@link GeoJsonMultiPolygon}
                    • + *
                    + * + * @return a {@link Module} containing {@link JsonDeserializer deserializers} for {@link GeoJson} types. + * @since 3.2 + */ + public static Module deserializers() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Deserializers", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + registerDeserializersIn(module); + return module; + } + + /** + * Obtain a {@link Module} containing {@link JsonSerializer serializers} for the following {@link GeoJson} types: + *
                      + *
                    • {@link GeoJsonPoint}
                    • + *
                    • {@link GeoJsonMultiPoint}
                    • + *
                    • {@link GeoJsonLineString}
                    • + *
                    • {@link GeoJsonMultiLineString}
                    • + *
                    • {@link GeoJsonPolygon}
                    • + *
                    • {@link GeoJsonMultiPolygon}
                    • + *
                    + * + * @return a {@link Module} containing {@link JsonSerializer serializers} for {@link GeoJson} types. + * @since 3.2 + */ + public static Module serializers() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Serializers", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + GeoJsonSerializersModule.registerSerializersIn(module); + return module; + } + + /** + * Obtain a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers} + * for the following {@link GeoJson} types: + *
                      + *
                    • {@link GeoJsonPoint}
                    • + *
                    • {@link GeoJsonMultiPoint}
                    • + *
                    • {@link GeoJsonLineString}
                    • + *
                    • {@link GeoJsonMultiLineString}
                    • + *
                    • {@link GeoJsonPolygon}
                    • + *
                    • {@link GeoJsonMultiPolygon}
                    • + *
                    + * + * @return a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers} + * for {@link GeoJson} types. + * @since 3.2 + */ + public static Module geoJsonModule() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + GeoJsonSerializersModule.registerSerializersIn(module); + registerDeserializersIn(module); + return module; + } + + private static void registerDeserializersIn(SimpleModule module) { + + module.addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer()); + module.addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer()); + module.addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer()); + module.addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer()); + module.addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer()); + module.addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer()); } /** @@ -59,12 +139,9 @@ public GeoJsonModule() { */ private static abstract class GeoJsonDeserializer> extends JsonDeserializer { - /* - * (non-Javadoc) - * @see com.fasterxml.jackson.databind.JsonDeserializer#deserialize(com.fasterxml.jackson.core.JsonParser, com.fasterxml.jackson.databind.DeserializationContext) - */ + @Nullable @Override - public T deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + public T deserialize(@Nullable JsonParser jp, @Nullable DeserializationContext ctxt) throws IOException { JsonNode node = jp.readValueAsTree(); JsonNode coordinates = node.get("coordinates"); @@ -77,20 +154,22 @@ public T deserialize(JsonParser jp, DeserializationContext ctxt) throws IOExcept /** * Perform the actual deserialization given the {@literal coordinates} as {@link ArrayNode}. - * + * * @param coordinates * @return */ + @Nullable protected abstract T doDeserialize(ArrayNode coordinates); /** * Get the {@link GeoJsonPoint} representation of given {@link ArrayNode} assuming {@code node.[0]} represents * {@literal x - coordinate} and {@code node.[1]} is {@literal y}. - * + * * @param node can be {@literal null}. * @return {@literal null} when given a {@code null} value. */ - protected GeoJsonPoint toGeoJsonPoint(ArrayNode node) { + @Nullable + protected GeoJsonPoint toGeoJsonPoint(@Nullable ArrayNode node) { if (node == null) { return null; @@ -102,11 +181,12 @@ protected GeoJsonPoint toGeoJsonPoint(ArrayNode node) { /** * Get the {@link Point} representation of given {@link ArrayNode} assuming {@code node.[0]} represents * {@literal x - coordinate} and {@code node.[1]} is {@literal y}. - * + * * @param node can be {@literal null}. * @return {@literal null} when given a {@code null} value. */ - protected Point toPoint(ArrayNode node) { + @Nullable + protected Point toPoint(@Nullable ArrayNode node) { if (node == null) { return null; @@ -117,17 +197,17 @@ protected Point toPoint(ArrayNode node) { /** * Get the points nested within given {@link ArrayNode}. - * + * * @param node can be {@literal null}. * @return {@literal empty list} when given a {@code null} value. */ - protected List toPoints(ArrayNode node) { + protected List toPoints(@Nullable ArrayNode node) { if (node == null) { return Collections.emptyList(); } - List points = new ArrayList(node.size()); + List points = new ArrayList<>(node.size()); for (JsonNode coordinatePair : node) { if (coordinatePair.isArray()) { @@ -138,28 +218,25 @@ protected List toPoints(ArrayNode node) { } protected GeoJsonLineString toLineString(ArrayNode node) { - return new GeoJsonLineString(toPoints((ArrayNode) node)); + return new GeoJsonLineString(toPoints(node)); } } /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal Point}. - * + * *
                     	 * 
                     	 * { "type": "Point", "coordinates": [10.0, 20.0] }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonPointDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ + @Nullable @Override protected GeoJsonPoint doDeserialize(ArrayNode coordinates) { return toGeoJsonPoint(coordinates); @@ -168,27 +245,23 @@ protected GeoJsonPoint doDeserialize(ArrayNode coordinates) { /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal LineString}. - * + * *
                     	 * 
                    -	 * { 
                    -	 *   "type": "LineString", 
                    -	 *   "coordinates": [ 
                    +	 * {
                    +	 *   "type": "LineString",
                    +	 *   "coordinates": [
                     	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
                     	 *   ]
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonLineStringDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonLineString doDeserialize(ArrayNode coordinates) { return new GeoJsonLineString(toPoints(coordinates)); @@ -197,27 +270,23 @@ protected GeoJsonLineString doDeserialize(ArrayNode coordinates) { /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal MultiPoint}. - * + * *
                     	 * 
                    -	 * { 
                    -	 *   "type": "MultiPoint", 
                    -	 *   "coordinates": [ 
                    +	 * {
                    +	 *   "type": "MultiPoint",
                    +	 *   "coordinates": [
                     	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
                     	 *   ]
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonMultiPointDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiPoint doDeserialize(ArrayNode coordinates) { return new GeoJsonMultiPoint(toPoints(coordinates)); @@ -226,32 +295,28 @@ protected GeoJsonMultiPoint doDeserialize(ArrayNode coordinates) { /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal MultiLineString}. - * + * *
                     	 * 
                    -	 * { 
                    -	 *   "type": "MultiLineString", 
                    +	 * {
                    +	 *   "type": "MultiLineString",
                     	 *   "coordinates": [
                    -	 *     [ [10.0, 20.0], [30.0, 40.0] ], 
                    +	 *     [ [10.0, 20.0], [30.0, 40.0] ],
                     	 *     [ [50.0, 60.0] , [70.0, 80.0] ]
                     	 *   ]
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonMultiLineStringDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiLineString doDeserialize(ArrayNode coordinates) { - List lines = new ArrayList(coordinates.size()); + List lines = new ArrayList<>(coordinates.size()); for (JsonNode lineString : coordinates) { if (lineString.isArray()) { @@ -265,27 +330,24 @@ protected GeoJsonMultiLineString doDeserialize(ArrayNode coordinates) { /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal Polygon}. - * + * *
                     	 * 
                    -	 * { 
                    -	 *   "type": "Polygon", 
                    -	 *   "coordinates": [ 
                    -	 *     [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] 
                    +	 * {
                    +	 *   "type": "Polygon",
                    +	 *   "coordinates": [
                    +	 *     [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
                     	 *   ]
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonPolygonDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ + @Nullable @Override protected GeoJsonPolygon doDeserialize(ArrayNode coordinates) { @@ -301,11 +363,11 @@ protected GeoJsonPolygon doDeserialize(ArrayNode coordinates) { /** * {@link JsonDeserializer} converting GeoJSON representation of {@literal MultiPolygon}. - * + * *
                     	 * 
                    -	 * { 
                    -	 *   "type": "MultiPolygon", 
                    +	 * {
                    +	 *   "type": "MultiPolygon",
                     	 *   "coordinates": [
                     	 *     [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
                     	 *     [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
                    @@ -314,23 +376,19 @@ protected GeoJsonPolygon doDeserialize(ArrayNode coordinates) {
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @author Christoph Strobl * @since 1.7 */ private static class GeoJsonMultiPolygonDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiPolygon doDeserialize(ArrayNode coordinates) { - List polygones = new ArrayList(coordinates.size()); + List polygones = new ArrayList<>(coordinates.size()); for (JsonNode polygon : coordinates) { - for (JsonNode ring : (ArrayNode) polygon) { + for (JsonNode ring : polygon) { polygones.add(new GeoJsonPolygon(toPoints((ArrayNode) ring))); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java index 90b046ccc2..8dafe9ea00 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,15 +20,16 @@ import java.util.List; import org.springframework.data.geo.Point; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * {@link GeoJsonMultiLineString} is defined as list of {@link GeoJsonLineString}s. - * + * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#multilinestring + * @see https://geojson.org/geojson-spec.html#multilinestring */ public class GeoJsonMultiLineString implements GeoJson> { @@ -38,12 +39,12 @@ public class GeoJsonMultiLineString implements GeoJson... lines) { - Assert.notEmpty(lines, "Points for MultiLineString must not be null!"); + Assert.notEmpty(lines, "Points for MultiLineString must not be null"); for (List line : lines) { this.coordinates.add(new GeoJsonLineString(line)); @@ -52,58 +53,42 @@ public GeoJsonMultiLineString(List... lines) { /** * Creates new {@link GeoJsonMultiLineString} for the given {@link GeoJsonLineString}s. - * + * * @param lines must not be {@literal null}. */ public GeoJsonMultiLineString(List lines) { - Assert.notNull(lines, "Lines for MultiLineString must not be null!"); + Assert.notNull(lines, "Lines for MultiLineString must not be null"); this.coordinates.addAll(lines); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public Iterable getCoordinates() { return Collections.unmodifiableList(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiLineString)) { + if (!(obj instanceof GeoJsonMultiLineString other)) { return false; } - return ObjectUtils.nullSafeEquals(this.coordinates, ((GeoJsonMultiLineString) obj).coordinates); + return ObjectUtils.nullSafeEquals(this.coordinates, other.coordinates); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index 0812533163..bcb4c3e79e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,15 +21,17 @@ import java.util.List; import org.springframework.data.geo.Point; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * {@link GeoJsonMultiPoint} is defined as list of {@link Point}s. - * + * * @author Christoph Strobl + * @author Ivan Volzhev * @since 1.7 - * @see http://geojson.org/geojson-spec.html#multipoint + * @see https://geojson.org/geojson-spec.html#multipoint */ public class GeoJsonMultiPoint implements GeoJson> { @@ -37,80 +39,78 @@ public class GeoJsonMultiPoint implements GeoJson> { private final List points; + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + * @since 3.2.5 + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "Point must not be null"); + + this.points = new ArrayList<>(); + this.points.add(point); + } + /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. - * - * @param points points must not be {@literal null} and have at least 2 entries. + * + * @param points points must not be {@literal null} and not empty */ public GeoJsonMultiPoint(List points) { - Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 2, "Minimum of 2 Points required."); + Assert.notNull(points, "Points must not be null"); + Assert.notEmpty(points, "Points must contain at least one point"); - this.points = new ArrayList(points); + this.points = new ArrayList<>(points); } /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. - * + * * @param first must not be {@literal null}. * @param second must not be {@literal null}. * @param others must not be {@literal null}. */ public GeoJsonMultiPoint(Point first, Point second, Point... others) { - Assert.notNull(first, "First point must not be null!"); - Assert.notNull(second, "Second point must not be null!"); - Assert.notNull(others, "Additional points must not be null!"); + Assert.notNull(first, "First point must not be null"); + Assert.notNull(second, "Second point must not be null"); + Assert.notNull(others, "Additional points must not be null"); - this.points = new ArrayList(); + this.points = new ArrayList<>(); this.points.add(first); this.points.add(second); this.points.addAll(Arrays.asList(others)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.points); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.points); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiPoint)) { + if (!(obj instanceof GeoJsonMultiPoint other)) { return false; } - return ObjectUtils.nullSafeEquals(this.points, ((GeoJsonMultiPoint) obj).points); + return ObjectUtils.nullSafeEquals(this.points, other.points); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java index da4a1be178..12b9de9da4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,12 +19,13 @@ import java.util.Collections; import java.util.List; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * {@link GeoJsonMultiPolygon} is defined as a list of {@link GeoJsonPolygon}s. - * + * * @author Christoph Strobl * @since 1.7 */ @@ -36,58 +37,42 @@ public class GeoJsonMultiPolygon implements GeoJson> { /** * Creates a new {@link GeoJsonMultiPolygon} for the given {@link GeoJsonPolygon}s. - * + * * @param polygons must not be {@literal null}. */ public GeoJsonMultiPolygon(List polygons) { - Assert.notNull(polygons, "Polygons for MultiPolygon must not be null!"); + Assert.notNull(polygons, "Polygons for MultiPolygon must not be null"); this.coordinates.addAll(polygons); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiPolygon)) { + if (!(obj instanceof GeoJsonMultiPolygon other)) { return false; } - return ObjectUtils.nullSafeEquals(this.coordinates, ((GeoJsonMultiPolygon) obj).coordinates); + return ObjectUtils.nullSafeEquals(this.coordinates, other.coordinates); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java index a44aa856ce..7bd4e1203c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,11 +21,12 @@ import org.springframework.data.geo.Point; /** - * {@link GeoJson} representation of {@link Point}. - * + * {@link GeoJson} representation of {@link Point}. Uses {@link Point#getX()} as {@literal longitude} and + * {@link Point#getY()} as {@literal latitude}. + * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#point + * @see https://geojson.org/geojson-spec.html#point */ public class GeoJsonPoint extends Point implements GeoJson> { @@ -35,9 +36,9 @@ public class GeoJsonPoint extends Point implements GeoJson> { /** * Creates {@link GeoJsonPoint} for given coordinates. - * - * @param x - * @param y + * + * @param x longitude between {@literal -180} and {@literal 180} (inclusive). + * @param y latitude between {@literal -90} and {@literal 90} (inclusive). */ public GeoJsonPoint(double x, double y) { super(x, y); @@ -45,28 +46,27 @@ public GeoJsonPoint(double x, double y) { /** * Creates {@link GeoJsonPoint} for given {@link Point}. - * + *

                    + * {@link Point#getX()} translates to {@literal longitude}, {@link Point#getY()} to {@literal latitude}. + * * @param point must not be {@literal null}. */ public GeoJsonPoint(Point point) { super(point); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() + /** + * Obtain the coordinates (x/longitude, y/latitude) array. + * + * @return the coordinates putting {@link #getX() x/longitude} first, and {@link #getY() y/latitude} second. */ @Override public List getCoordinates() { - return Arrays.asList(Double.valueOf(getX()), Double.valueOf(getY())); + return Arrays.asList(getX(), getY()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java index a5e8b1066e..166a10df08 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,15 +23,18 @@ import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * {@link GeoJson} representation of {@link Polygon}. Unlike {@link Polygon} the {@link GeoJsonPolygon} requires a * closed border. Which means that the first and last {@link Point} have to have same coordinate pairs. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.7 - * @see http://geojson.org/geojson-spec.html#polygon + * @see https://geojson.org/geojson-spec.html#polygon */ public class GeoJsonPolygon extends Polygon implements GeoJson> { @@ -47,9 +50,9 @@ public class GeoJsonPolygon extends Polygon implements GeoJson points) { * @param second must not be {@literal null}. * @param third must not be {@literal null}. * @param fourth must not be {@literal null}. - * @param others can be {@literal null}. + * @param others can be empty. * @return new {@link GeoJsonPolygon}. * @since 1.10 */ - public GeoJsonPolygon withInnerRing(Point first, Point second, Point third, Point fourth, final Point... others) { + public GeoJsonPolygon withInnerRing(Point first, Point second, Point third, Point fourth, Point... others) { return withInnerRing(asList(first, second, third, fourth, others)); } @@ -98,7 +101,7 @@ public GeoJsonPolygon withInnerRing(List points) { */ public GeoJsonPolygon withInnerRing(GeoJsonLineString lineString) { - Assert.notNull(lineString, "LineString must not be null!"); + Assert.notNull(lineString, "LineString must not be null"); Iterator it = this.coordinates.iterator(); GeoJsonPolygon polygon = new GeoJsonPolygon(it.next().getCoordinates()); @@ -111,25 +114,17 @@ public GeoJsonPolygon withInnerRing(GeoJsonLineString lineString) { return polygon; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.coordinates); } - private static List asList(Point first, Point second, Point third, Point fourth, final Point... others) { + private static List asList(Point first, Point second, Point third, Point fourth, Point... others) { ArrayList result = new ArrayList(3 + others.length); @@ -141,4 +136,28 @@ private static List asList(Point first, Point second, Point third, Point return result; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + GeoJsonPolygon that = (GeoJsonPolygon) o; + + return ObjectUtils.nullSafeEquals(this.coordinates, that.coordinates); + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + ObjectUtils.nullSafeHashCode(coordinates); + return result; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java new file mode 100644 index 0000000000..5b80720da9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java @@ -0,0 +1,309 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.geo; + +import java.io.IOException; + +import org.springframework.data.geo.Point; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; + +/** + * A Jackson {@link Module} to register custom {@link JsonSerializer}s for GeoJSON types. + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ +class GeoJsonSerializersModule extends SimpleModule { + + private static final long serialVersionUID = 1340494654898895610L; + + GeoJsonSerializersModule() { + registerSerializersIn(this); + } + + + static void registerSerializersIn(SimpleModule module) { + + module.addSerializer(GeoJsonPoint.class, new GeoJsonPointSerializer()); + module.addSerializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointSerializer()); + module.addSerializer(GeoJsonLineString.class, new GeoJsonLineStringSerializer()); + module.addSerializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringSerializer()); + module.addSerializer(GeoJsonPolygon.class, new GeoJsonPolygonSerializer()); + module.addSerializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonSerializer()); + } + + /** + * @param + * @author Christoph Strobl + */ + private static abstract class GeoJsonSerializer> extends JsonSerializer { + + @Override + public void serialize(T shape, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException { + + jsonGenerator.writeStartObject(); + jsonGenerator.writeStringField("type", shape.getType()); + jsonGenerator.writeArrayFieldStart("coordinates"); + + doSerialize(shape, jsonGenerator); + + jsonGenerator.writeEndArray(); + jsonGenerator.writeEndObject(); + } + + /** + * Perform the actual serialization given the {@literal shape} as {@link GeoJson}. + * + * @param shape + * @param jsonGenerator + * @return + */ + protected abstract void doSerialize(T shape, JsonGenerator jsonGenerator) throws IOException; + + /** + * Write a {@link Point} as array.
                    + * {@code [10.0, 20.0]} + * + * @param point + * @param jsonGenerator + * @throws IOException + */ + protected void writePoint(Point point, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeStartArray(); + writeRawCoordinates(point, jsonGenerator); + jsonGenerator.writeEndArray(); + } + + /** + * Write the {@link Point} coordinates.
                    + * {@code 10.0, 20.0} + * + * @param point + * @param jsonGenerator + * @throws IOException + */ + protected void writeRawCoordinates(Point point, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeNumber(point.getX()); + jsonGenerator.writeNumber(point.getY()); + } + + /** + * Write an {@link Iterable} of {@link Point} as array.
                    + * {@code [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]} + * + * @param points + * @param jsonGenerator + * @throws IOException + */ + protected void writeLine(Iterable points, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeStartArray(); + writeRawLine(points, jsonGenerator); + jsonGenerator.writeEndArray(); + } + + /** + * Write an {@link Iterable} of {@link Point}.
                    + * {@code [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]} + * + * @param points + * @param jsonGenerator + * @throws IOException + */ + protected void writeRawLine(Iterable points, JsonGenerator jsonGenerator) throws IOException { + + for (Point point : points) { + writePoint(point, jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonPoint} to: + * + *

                    +	 * 
                    +	 * { "type": "Point", "coordinates": [10.0, 20.0] }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonPointSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonPoint value, JsonGenerator jsonGenerator) throws IOException { + writeRawCoordinates(value, jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonLineString} to: + * + *
                    +	 * 
                    +	 * {
                    +	 *   "type": "LineString",
                    +	 *   "coordinates": [
                    +	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
                    +	 *   ]
                    +	 * }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonLineStringSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonLineString value, JsonGenerator jsonGenerator) throws IOException { + writeRawLine(value.getCoordinates(), jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiPoint} to: + * + *
                    +	 * 
                    +	 * {
                    +	 *   "type": "MultiPoint",
                    +	 *   "coordinates": [
                    +	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
                    +	 *   ]
                    +	 * }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiPointSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiPoint value, JsonGenerator jsonGenerator) throws IOException { + writeRawLine(value.getCoordinates(), jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiLineString} to: + * + *
                    +	 * 
                    +	 * {
                    +	 *   "type": "MultiLineString",
                    +	 *   "coordinates": [
                    +	 *     [ [10.0, 20.0], [30.0, 40.0] ],
                    +	 *     [ [50.0, 60.0] , [70.0, 80.0] ]
                    +	 *   ]
                    +	 * }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiLineStringSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiLineString value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonLineString lineString : value.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonPolygon} to: + * + *
                    +	 * 
                    +	 * {
                    +	 *   "type": "Polygon",
                    +	 *   "coordinates": [
                    +	 *     [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
                    +	 *   ]
                    +	 * }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonPolygonSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonPolygon value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonLineString lineString : value.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiPolygon} to: + * + *
                    +	 * 
                    +	 * {
                    +	 *   "type": "MultiPolygon",
                    +	 *   "coordinates": [
                    +	 *     [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
                    +	 *     [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
                    +	 *     [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]
                    +	 *   ]
                    +	 * }
                    +	 * 
                    +	 * 
                    + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiPolygonSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiPolygon value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonPolygon polygon : value.getCoordinates()) { + + jsonGenerator.writeStartArray(); + for (GeoJsonLineString lineString : polygon.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + jsonGenerator.writeEndArray(); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java index 0cb623ef29..a482c136e7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,17 +18,19 @@ import java.util.Arrays; import java.util.List; -import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.annotation.PersistenceCreator; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * Represents a geospatial sphere value. - * + * * @author Thomas Darimont + * @author Mark Paluch * @since 1.5 */ public class Sphere implements Shape { @@ -39,16 +41,16 @@ public class Sphere implements Shape { /** * Creates a Sphere around the given center {@link Point} with the given radius. - * + * * @param center must not be {@literal null}. * @param radius must not be {@literal null}. */ - @PersistenceConstructor + @PersistenceCreator public Sphere(Point center, Distance radius) { - Assert.notNull(center); - Assert.notNull(radius); - Assert.isTrue(radius.getValue() >= 0, "Radius must not be negative!"); + Assert.notNull(center, "Center point must not be null"); + Assert.notNull(radius, "Radius must not be null"); + Assert.isTrue(radius.getValue() >= 0, "Radius must not be negative"); this.center = center; this.radius = radius; @@ -56,8 +58,8 @@ public Sphere(Point center, Distance radius) { /** * Creates a Sphere around the given center {@link Point} with the given radius. - * - * @param center + * + * @param center must not be {@literal null}. * @param radius */ public Sphere(Point center, double radius) { @@ -66,8 +68,8 @@ public Sphere(Point center, double radius) { /** * Creates a Sphere from the given {@link Circle}. - * - * @param circle + * + * @param circle must not be {@literal null}. */ public Sphere(Circle circle) { this(circle.getCenter(), circle.getRadius()); @@ -75,7 +77,7 @@ public Sphere(Circle circle) { /** * Returns the center of the {@link Circle}. - * + * * @return will never be {@literal null}. */ public Point getCenter() { @@ -84,45 +86,32 @@ public Point getCenter() { /** * Returns the radius of the {@link Circle}. - * - * @return + * + * @return never {@literal null}. */ public Distance getRadius() { return radius; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("Sphere [center=%s, radius=%s]", center, radius); } - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (obj == null || !(obj instanceof Sphere)) { + if (!(obj instanceof Sphere other)) { return false; } - Sphere that = (Sphere) obj; - - return this.center.equals(that.center) && this.radius.equals(that.radius); + return this.center.equals(other.center) && this.radius.equals(other.radius); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { int result = 17; @@ -134,8 +123,8 @@ public int hashCode() { /** * Returns the {@link Shape} as a list of usually {@link Double} or {@link List}s of {@link Double}s. Wildcard bound * to allow implementations to return a more concrete element type. - * - * @return + * + * @return never {@literal null}. */ public List asList() { return Arrays.asList(Arrays.asList(center.getX(), center.getY()), this.radius.getValue()); @@ -143,8 +132,8 @@ public List asList() { /** * Returns the command to be used to create the {@literal $within} criterion. - * - * @return + * + * @return never {@literal null}. */ public String getCommand() { return COMMAND; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/package-info.java index 52505fe2ed..6cc77f832b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/package-info.java @@ -1,20 +1,6 @@ -/* - * Copyright 2011-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ /** * Support for MongoDB geo-spatial queries. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.geo; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java index 9e5c4a088a..05cf13f66b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,131 +17,172 @@ import java.lang.annotation.Documented; import java.lang.annotation.ElementType; +import java.lang.annotation.Repeatable; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; +import org.springframework.data.mongodb.core.mapping.Document; + /** - * Mark a class to use compound indexes. - * + * Mark a class to use compound indexes.
                    + *

                    + * NOTE: This annotation is repeatable according to Java 8 conventions using {@link CompoundIndexes#value()} as + * container. + * + *

                    + * @Document
                    + * @CompoundIndex(def = "{'firstname': 1, 'lastname': 1}")
                    + * @CompoundIndex(def = "{'address.city': 1, 'address.street': 1}")
                    + * class Person {
                    + * 	String firstname;
                    + * 	String lastname;
                    + *
                    + * 	Address address;
                    + * }
                    + * 
                    + * * @author Jon Brisbin * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Christoph Strobl + * @author Dave Perryman + * @author Stefan Tirea */ +@Collation @Target({ ElementType.TYPE }) @Documented +@Repeatable(CompoundIndexes.class) @Retention(RetentionPolicy.RUNTIME) public @interface CompoundIndex { /** - * The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values - * define the index direction (1 for ascending, -1 for descending).
                    + * The actual index definition in JSON format or a {@link org.springframework.expression.spel.standard.SpelExpression + * template expression} resolving to either a JSON String or a {@link org.bson.Document}. The keys of the JSON + * document are the fields to be indexed, the values define the index direction (1 for ascending, -1 for descending). + *
                    * If left empty on nested document, the whole document will be indexed. - * - * @return + * + *
                    +	 * @Document
                    +	 * @CompoundIndex(def = "{'h1': 1, 'h2': 1}")
                    +	 * class JsonStringIndexDefinition {
                    +	 *   String h1, h2;
                    +	 * }
                    +	 *
                    +	 * @Document
                    +	 * @CompoundIndex(def = "#{T(org.bson.Document).parse("{ 'h1': 1, 'h2': 1 }")}")
                    +	 * class ExpressionIndexDefinition {
                    +	 *   String h1, h2;
                    +	 * }
                    +	 * 
                    + * + * @return empty String by default. */ String def() default ""; /** - * It does not actually make sense to use that attribute as the direction has to be defined in the {@link #def()} - * attribute actually. - * - * @return - */ - @Deprecated - IndexDirection direction() default IndexDirection.ASCENDING; - - /** - * @see http://docs.mongodb.org/manual/core/index-unique/ - * @return + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-unique/ */ boolean unique() default false; /** - * If set to true index will skip over any document that is missing the indexed field. - * - * @see http://docs.mongodb.org/manual/core/index-sparse/ - * @return + * If set to true index will skip over any document that is missing the indexed field.
                    + * Must not be used with {@link #partialFilter()}. + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-sparse/ */ boolean sparse() default false; /** - * @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping - * @return - */ - boolean dropDups() default false; - - /** - * The name of the index to be created.
                    + * Index name of the index to be created either as plain value or as + * {@link org.springframework.expression.spel.standard.SpelExpression template expression}.
                    *
                    * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
                    *
                    * The structure below - * - *
                    -	 * 
                    +	 *
                    +	 * 
                     	 * @Document
                     	 * class Root {
                    -	 *   Hybrid hybrid;
                    -	 *   Nested nested;
                    +	 * 	Hybrid hybrid;
                    +	 * 	Nested nested;
                     	 * }
                    -	 * 
                    +	 *
                     	 * @Document
                     	 * @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
                     	 * class Hybrid {
                    -	 *   String h1, h2;
                    +	 * 	String h1, h2;
                     	 * }
                    -	 * 
                    +	 *
                     	 * @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
                     	 * class Nested {
                    -	 *   String n1, n2;
                    +	 * 	String n1, n2;
                     	 * }
                    -	 * 
                     	 * 
                    - * + * * resolves in the following index structures - * - *
                    -	 * 
                    +	 *
                    +	 * 
                     	 * db.root.createIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
                     	 * db.root.createIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
                     	 * db.hybrid.createIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
                    -	 * 
                     	 * 
                    - * - * @return + * + * @return empty String by default. */ String name() default ""; /** * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. - * - * @return + * + * @return {@literal false} by default * @since 1.5 */ boolean useGeneratedName() default false; /** - * The collection the index will be created in. Will default to the collection the annotated domain class will be - * stored in. - * - * @return - * @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might - * result in broken definitions. Will be removed in 1.7. + * If {@literal true} the index will be created in the background. + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/indexes/#background-construction */ - @Deprecated - String collection() default ""; + boolean background() default false; /** - * If {@literal true} the index will be created in the background. - * - * @see http://docs.mongodb.org/manual/core/indexes/#background-construction - * @return + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
                    + * Must not be used with {@link #sparse() sparse = true}. + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 3.1 */ - boolean background() default false; + String partialFilter() default ""; + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation + * (language-specific rules for string comparison) to be applied on string properties being part of the index. + *

                    + * NOTE: Overrides {@link Document#collation()}. + * + * @return empty String by default. + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @since 4.0 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java index c5d76be3fe..62f4495408 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,36 @@ */ package org.springframework.data.mongodb.core.index; +import org.bson.Document; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Index definition to span multiple keys. - * + * * @author Christoph Strobl * @since 1.5 */ public class CompoundIndexDefinition extends Index { - private DBObject keys; + private Document keys; /** * Creates a new {@link CompoundIndexDefinition} for the given keys. - * + * * @param keys must not be {@literal null}. */ - public CompoundIndexDefinition(DBObject keys) { + public CompoundIndexDefinition(Document keys) { - Assert.notNull(keys, "Keys must not be null!"); + Assert.notNull(keys, "Keys must not be null"); this.keys = keys; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.Index#getIndexKeys() - */ @Override - public DBObject getIndexKeys() { + public Document getIndexKeys() { - BasicDBObject dbo = new BasicDBObject(); - dbo.putAll(this.keys); - dbo.putAll(super.getIndexKeys()); - return dbo; + Document document = new Document(); + document.putAll(this.keys); + document.putAll(super.getIndexKeys()); + return document; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java index e7704aa66f..d9195969d9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,18 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.index; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * @author Jon Brisbin + * Container annotation that allows to collect multiple {@link CompoundIndex} annotations. + *

                    + * Can be used natively, declaring several nested {@link CompoundIndex} annotations. Can also be used in conjunction + * with Java 8's support for repeatable annotations, where {@link CompoundIndex} can simply be declared several + * times on the same {@linkplain ElementType#TYPE type}, implicitly generating this container annotation. + * + * @author Jon Brisbin + * @author Christoph Strobl */ @Target({ ElementType.TYPE }) +@Documented @Retention(RetentionPolicy.RUNTIME) public @interface CompoundIndexes { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java new file mode 100644 index 0000000000..225bb41ac8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java @@ -0,0 +1,125 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.BsonString; +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.SearchIndexModel; +import com.mongodb.client.model.SearchIndexType; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class DefaultSearchIndexOperations implements SearchIndexOperations { + + private final MongoOperations mongoOperations; + private final String collectionName; + private final TypeInformation entityTypeInformation; + + public DefaultSearchIndexOperations(MongoOperations mongoOperations, Class type) { + this(mongoOperations, mongoOperations.getCollectionName(type), type); + } + + public DefaultSearchIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { + + this.collectionName = collectionName; + + if (type != null) { + + MappingContext, MongoPersistentProperty> mappingContext = mongoOperations + .getConverter().getMappingContext(); + entityTypeInformation = mappingContext.getRequiredPersistentEntity(type).getTypeInformation(); + } else { + entityTypeInformation = null; + } + + this.mongoOperations = mongoOperations; + } + + @Override + public String createIndex(SearchIndexDefinition indexDefinition) { + + Document index = indexDefinition.getIndexDocument(entityTypeInformation, + mongoOperations.getConverter().getMappingContext()); + + mongoOperations.getCollection(collectionName) + .createSearchIndexes(List.of(new SearchIndexModel(indexDefinition.getName(), + index.get("definition", Document.class), SearchIndexType.of(new BsonString(indexDefinition.getType()))))); + + return indexDefinition.getName(); + } + + @Override + public void updateIndex(SearchIndexDefinition indexDefinition) { + + Document indexDocument = indexDefinition.getIndexDocument(entityTypeInformation, + mongoOperations.getConverter().getMappingContext()); + + mongoOperations.getCollection(collectionName).updateSearchIndex(indexDefinition.getName(), indexDocument); + } + + @Override + public boolean exists(String indexName) { + return getSearchIndex(indexName) != null; + } + + @Override + public SearchIndexStatus status(String indexName) { + + Document searchIndex = getSearchIndex(indexName); + return searchIndex != null ? SearchIndexStatus.valueOf(searchIndex.getString("status")) + : SearchIndexStatus.DOES_NOT_EXIST; + } + + @Override + public void dropAllIndexes() { + getSearchIndexes(null).forEach(indexInfo -> dropIndex(indexInfo.getString("name"))); + } + + @Override + public void dropIndex(String indexName) { + mongoOperations.getCollection(collectionName).dropSearchIndex(indexName); + } + + @Nullable + private Document getSearchIndex(String indexName) { + + List indexes = getSearchIndexes(indexName); + return indexes.isEmpty() ? null : indexes.iterator().next(); + } + + private List getSearchIndexes(@Nullable String indexName) { + + Document filter = StringUtils.hasText(indexName) ? new Document("name", indexName) : new Document(); + + return mongoOperations.getCollection(collectionName).aggregate(List.of(new Document("$listSearchIndexes", filter))) + .into(new ArrayList<>()); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java index 647d83d75b..073f18c40b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,7 @@ /** * Geoposatial index type. - * + * * @author Laurent Canet * @author Oliver Gierke * @since 1.4 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java index ee20619ac0..3fb797559b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,7 +22,7 @@ /** * Mark a field to be indexed using MongoDB's geospatial indexing feature. - * + * * @author Jon Brisbin * @author Laurent Canet * @author Thomas Darimont @@ -34,13 +34,13 @@ public @interface GeoSpatialIndexed { /** - * Index name.
                    - *
                    + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
                    * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
                    *
                    * The structure below - * + * *

                     	 * 
                     	 * @Document
                    @@ -48,93 +48,87 @@
                     	 *   Hybrid hybrid;
                     	 *   Nested nested;
                     	 * }
                    -	 * 
                    +	 *
                     	 * @Document
                     	 * class Hybrid {
                     	 *   @GeoSpatialIndexed(name="index") Point h1;
                    +	 *   @GeoSpatialIndexed(name="#{@myBean.indexName}") Point h2;
                     	 * }
                    -	 * 
                    +	 *
                     	 * class Nested {
                     	 *   @GeoSpatialIndexed(name="index") Point n1;
                     	 * }
                     	 * 
                     	 * 
                    - * + * * resolves in the following index structures - * + * *
                     	 * 
                     	 * db.root.createIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
                     	 * db.root.createIndex( { nested.n1: "2d" } , { name: "nested.index" } )
                     	 * db.hybrid.createIndex( { h1: "2d" } , { name: "index" } )
                    +	 * db.hybrid.createIndex( { h2: "2d"} , { name: the value myBean.getIndexName() returned } )
                     	 * 
                     	 * 
                    - * - * @return + * + * @return empty {@link String} by default. */ String name() default ""; /** * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. - * - * @return + * + * @return {@literal false} by default. * @since 1.5 */ boolean useGeneratedName() default false; - /** - * Name of the collection in which to create the index. - * - * @return - * @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might - * result in broken definitions. Will be removed in 1.7. - */ - @Deprecated - String collection() default ""; - /** * Minimum value for indexed values. - * - * @return + * + * @return {@literal -180} by default. */ int min() default -180; /** * Maximum value for indexed values. - * - * @return + * + * @return {@literal +180} by default. */ int max() default 180; /** * Bits of precision for boundary calculations. - * - * @return + * + * @return {@literal 26} by default. */ int bits() default 26; /** * The type of the geospatial index. Default is {@link GeoSpatialIndexType#GEO_2D} - * + * * @since 1.4 - * @return + * @return {@link GeoSpatialIndexType#GEO_2D} by default. */ GeoSpatialIndexType type() default GeoSpatialIndexType.GEO_2D; /** * The bucket size for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes, in coordinate units. - * + * * @since 1.4 - * @return + * @return {@literal 1.0} by default. + * @deprecated since MongoDB server version 4.4 */ + @Deprecated double bucketSize() default 1.0; /** * The name of the additional field to use for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes - * + * * @since 1.4 - * @return + * @return empty {@link String} by default. */ String additionalField() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java index 64d8841a66..0949506195 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2014 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,52 @@ */ package org.springframework.data.mongodb.core.index; +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Value object to capture data to create a geo index. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Laurent Canet * @author Christoph Strobl + * @author Mark Paluch */ public class GeospatialIndex implements IndexDefinition { private final String field; - private String name; - private Integer min; - private Integer max; - private Integer bits; + private @Nullable String name; + private @Nullable Integer min; + private @Nullable Integer max; + private @Nullable Integer bits; private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D; - private Double bucketSize = 1.0; - private String additionalField; + private Double bucketSize = MongoClientVersion.isVersion5orNewer() ? null : 1.0; + private @Nullable String additionalField; + private Optional filter = Optional.empty(); + private Optional collation = Optional.empty(); /** * Creates a new {@link GeospatialIndex} for the given field. - * + * * @param field must not be empty or {@literal null}. */ public GeospatialIndex(String field) { - Assert.hasText(field, "Field must have text!"); + Assert.hasText(field, "Field must have text"); this.field = field; } /** * @param name must not be {@literal null} or empty. - * @return + * @return this. */ public GeospatialIndex named(String name) { @@ -64,38 +70,38 @@ public GeospatialIndex named(String name) { /** * @param min - * @return + * @return this. */ public GeospatialIndex withMin(int min) { - this.min = Integer.valueOf(min); + this.min = min; return this; } /** * @param max - * @return + * @return this. */ public GeospatialIndex withMax(int max) { - this.max = Integer.valueOf(max); + this.max = max; return this; } /** * @param bits - * @return + * @return this. */ public GeospatialIndex withBits(int bits) { - this.bits = Integer.valueOf(bits); + this.bits = bits; return this; } /** * @param type must not be {@literal null}. - * @return + * @return this. */ public GeospatialIndex typed(GeoSpatialIndexType type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); this.type = type; return this; @@ -103,60 +109,82 @@ public GeospatialIndex typed(GeoSpatialIndexType type) { /** * @param bucketSize - * @return + * @return this. + * @deprecated since MongoDB server version 4.4 */ + @Deprecated public GeospatialIndex withBucketSize(double bucketSize) { this.bucketSize = bucketSize; return this; } /** - * @param fieldName. - * @return + * @param fieldName + * @return this. */ public GeospatialIndex withAdditionalField(String fieldName) { this.additionalField = fieldName; return this; } - public DBObject getIndexKeys() { + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. + * + * @param filter can be {@literal null}. + * @return this. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 1.10 + */ + public GeospatialIndex partial(@Nullable IndexFilter filter) { - DBObject dbo = new BasicDBObject(); + this.filter = Optional.ofNullable(filter); + return this; + } - switch (type) { + /** + * Set the {@link Collation} to specify language-specific rules for string comparison, such as rules for lettercase + * and accent marks.
                    + * NOTE: Only queries using the same {@link Collation} as the {@link Index} actually make use of the + * index. + * + * @param collation can be {@literal null}. + * @return this. + * @since 2.0 + */ + public GeospatialIndex collation(@Nullable Collation collation) { - case GEO_2D: - dbo.put(field, "2d"); - break; + this.collation = Optional.ofNullable(collation); + return this; + } - case GEO_2DSPHERE: - dbo.put(field, "2dsphere"); - break; + @Override + public Document getIndexKeys() { - case GEO_HAYSTACK: - dbo.put(field, "geoHaystack"); + Document document = new Document(); + + switch (type) { + case GEO_2D -> document.put(field, "2d"); + case GEO_2DSPHERE -> document.put(field, "2dsphere"); + case GEO_HAYSTACK -> { + document.put(field, "geoHaystack"); if (!StringUtils.hasText(additionalField)) { - throw new IllegalArgumentException("When defining geoHaystack index, an additionnal field must be defined"); + throw new IllegalArgumentException("When defining geoHaystack index, an additional field must be defined"); } - dbo.put(additionalField, 1); - break; - - default: - throw new IllegalArgumentException("Unsupported geospatial index " + type); + document.put(additionalField, 1); + } + default -> throw new IllegalArgumentException("Unsupported geospatial index " + type); } - return dbo; + return document; } - public DBObject getIndexOptions() { - - if (!StringUtils.hasText(name) && min == null && max == null && bucketSize == null) { - return null; - } + @Override + public Document getIndexOptions() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); if (StringUtils.hasText(name)) { - dbo.put("name", name); + document.put("name", name); } switch (type) { @@ -164,13 +192,13 @@ public DBObject getIndexOptions() { case GEO_2D: if (min != null) { - dbo.put("min", min); + document.put("min", min); } if (max != null) { - dbo.put("max", max); + document.put("max", max); } if (bits != null) { - dbo.put("bits", bits); + document.put("bits", bits); } break; @@ -181,18 +209,17 @@ public DBObject getIndexOptions() { case GEO_HAYSTACK: if (bucketSize != null) { - dbo.put("bucketSize", bucketSize); + document.put("bucketSize", bucketSize); } break; } - return dbo; + filter.ifPresent(val -> document.put("partialFilterExpression", val.getFilterObject())); + collation.ifPresent(val -> document.append("collation", val.toDocument())); + + return document; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("Geo index: %s - Options: %s", getIndexKeys(), getIndexOptions()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java new file mode 100644 index 0000000000..ce7a29cfc8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation for a property that should be used as key for a + * Hashed Index. If used on a simple property, the + * index uses a hashing function to compute the hash of the value of the index field. Added to a property of complex + * type the embedded document is collapsed and the hash computed for the entire object. + *
                    + * + *
                    + * @Document
                    + * public class DomainType {
                    + *
                    + * 	@HashIndexed @Id String id;
                    + * }
                    + * 
                    + * + * {@link HashIndexed} can also be used as meta {@link java.lang.annotation.Annotation} to create composed annotations: + * + *
                    + * @Indexed
                    + * @HashIndexed
                    + * @Retention(RetentionPolicy.RUNTIME)
                    + * public @interface IndexAndHash {
                    + *
                    + * 	@AliasFor(annotation = Indexed.class, attribute = "name")
                    + * 	String name() default "";
                    + * }
                    + *
                    + * @Document
                    + * public class DomainType {
                    + *
                    + * 	@ComposedHashIndexed(name = "idx-name") String value;
                    + * }
                    + * 
                    + * + * @author Christoph Strobl + * @since 2.2 + * @see HashedIndex + */ +@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface HashIndexed { +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java new file mode 100644 index 0000000000..4542834110 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * {@link IndexDefinition} implementation for MongoDB + * Hashed Indexes maintaining entries with hashes of + * the values of the indexed field. + * + * @author Christoph Strobl + * @since 2.2 + */ +public class HashedIndex implements IndexDefinition { + + private final String field; + + private HashedIndex(String field) { + + Assert.hasText(field, "Field must not be null nor empty"); + this.field = field; + } + + /** + * Creates a new {@link HashedIndex} for the given field. + * + * @param field must not be {@literal null} nor empty. + * @return new instance of {@link HashedIndex}. + */ + public static HashedIndex hashed(String field) { + return new HashedIndex(field); + } + + @Override + public Document getIndexKeys() { + return new Document(field, "hashed"); + } + + @Override + public Document getIndexOptions() { + return new Document(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java index dbf59f6e2b..95f4226e28 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,36 @@ */ package org.springframework.data.mongodb.core.index; +import java.time.Duration; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.concurrent.TimeUnit; +import org.bson.Document; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.query.Order; +import org.springframework.data.mongodb.core.index.IndexOptions.Unique; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ @SuppressWarnings("deprecation") public class Index implements IndexDefinition { - public enum Duplicates { - RETAIN, // - - /** - * Dropping Duplicates was removed in MongoDB Server 2.8.0-rc0. - *

                    - * See https://jira.mongodb.org/browse/SERVER-14710 - * - * @deprecated since 1.7. - */ - @Deprecated // - DROP - } - private final Map fieldSpec = new LinkedHashMap(); - - private String name; - - private boolean unique = false; - - private boolean dropDuplicates = false; - + private @Nullable String name; private boolean sparse = false; - private boolean background = false; - - private long expire = -1; + private final IndexOptions options = IndexOptions.none(); + private Optional filter = Optional.empty(); + private Optional collation = Optional.empty(); public Index() {} @@ -69,31 +52,6 @@ public Index(String key, Direction direction) { fieldSpec.put(key, direction); } - /** - * Creates a new {@link Indexed} on the given key and {@link Order}. - * - * @deprecated use {@link #Index(String, Direction)} instead. - * @param key must not be {@literal null} or empty. - * @param order must not be {@literal null}. - */ - @Deprecated - public Index(String key, Order order) { - this(key, order.toDirection()); - } - - /** - * Adds the given field to the index. - * - * @deprecated use {@link #on(String, Direction)} instead. - * @param key must not be {@literal null} or empty. - * @param order must not be {@literal null}. - * @return - */ - @Deprecated - public Index on(String key, Order order) { - return on(key, order.toDirection()); - } - public Index on(String key, Direction direction) { fieldSpec.put(key, direction); return this; @@ -106,20 +64,23 @@ public Index named(String name) { /** * Reject all documents that contain a duplicate value for the indexed field. - * - * @see http://docs.mongodb.org/manual/core/index-unique/ - * @return + * + * @return this. + * @see https://docs.mongodb.org/manual/core/index-unique/ */ public Index unique() { - this.unique = true; + + this.options.setUnique(Unique.YES); return this; } /** * Skip over any document that is missing the indexed field. - * - * @see http://docs.mongodb.org/manual/core/index-sparse/ - * @return + * + * @return this. + * @see https://docs.mongodb.org/manual/core/index-sparse/ */ public Index sparse() { this.sparse = true; @@ -128,8 +89,8 @@ public Index sparse() { /** * Build the index in background (non blocking). - * - * @return + * + * @return this. * @since 1.5 */ public Index background() { @@ -138,82 +99,120 @@ public Index background() { return this; } + /** + * Hidden indexes are not visible to the query planner and cannot be used to support a query. + * + * @return this. + * @see https://www.mongodb.com/docs/manual/core/index-hidden/ + * @since 4.1 + */ + public Index hidden() { + + options.setHidden(true); + return this; + } + /** * Specifies TTL in seconds. - * + * * @param value - * @return + * @return this. * @since 1.5 */ public Index expire(long value) { return expire(value, TimeUnit.SECONDS); } + /** + * Specifies the TTL. + * + * @param timeout must not be {@literal null}. + * @return this. + * @throws IllegalArgumentException if given {@literal timeout} is {@literal null}. + * @since 2.2 + */ + public Index expire(Duration timeout) { + + Assert.notNull(timeout, "Timeout must not be null"); + return expire(timeout.getSeconds()); + } + /** * Specifies TTL with given {@link TimeUnit}. - * + * * @param value - * @param unit - * @return + * @param unit must not be {@literal null}. + * @return this. * @since 1.5 */ public Index expire(long value, TimeUnit unit) { - Assert.notNull(unit, "TimeUnit for expiration must not be null."); - this.expire = unit.toSeconds(value); + Assert.notNull(unit, "TimeUnit for expiration must not be null"); + options.setExpire(Duration.ofSeconds(unit.toSeconds(value))); return this; } /** - * @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping - * @param duplicates - * @return + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. + * + * @param filter can be {@literal null}. + * @return this. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 1.10 */ - public Index unique(Duplicates duplicates) { - if (duplicates == Duplicates.DROP) { - this.dropDuplicates = true; - } - return unique(); + public Index partial(@Nullable IndexFilter filter) { + + this.filter = Optional.ofNullable(filter); + return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() + /** + * Set the {@link Collation} to specify language-specific rules for string comparison, such as rules for lettercase + * and accent marks.
                    + * NOTE: Only queries using the same {@link Collation} as the {@link Index} actually make use of the + * index. + * + * @param collation can be {@literal null}. + * @return this. + * @since 2.0 */ - public DBObject getIndexKeys() { + public Index collation(@Nullable Collation collation) { + + this.collation = Optional.ofNullable(collation); + return this; + } - DBObject dbo = new BasicDBObject(); + public Document getIndexKeys() { + + Document document = new Document(); for (Entry entry : fieldSpec.entrySet()) { - dbo.put(entry.getKey(), Direction.ASC.equals(entry.getValue()) ? 1 : -1); + document.put(entry.getKey(), Direction.ASC.equals(entry.getValue()) ? 1 : -1); } - return dbo; + return document; } - public DBObject getIndexOptions() { + public Document getIndexOptions() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); if (StringUtils.hasText(name)) { - dbo.put("name", name); - } - if (unique) { - dbo.put("unique", true); - } - if (dropDuplicates) { - dbo.put("dropDups", true); + document.put("name", name); } if (sparse) { - dbo.put("sparse", true); + document.put("sparse", true); } if (background) { - dbo.put("background", true); - } - if (expire >= 0) { - dbo.put("expireAfterSeconds", expire); + document.put("background", true); } + document.putAll(options.toDocument()); + + filter.ifPresent(val -> document.put("partialFilterExpression", val.getFilterObject())); + collation.ifPresent(val -> document.append("collation", val.toDocument())); - return dbo; + return document; } @Override diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java index 42c0152b99..663577f420 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2014 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,18 +13,28 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.index; -import com.mongodb.DBObject; +import org.bson.Document; /** * @author Jon Brisbin * @author Christoph Strobl + * @author Mark Paluch */ public interface IndexDefinition { - DBObject getIndexKeys(); + /** + * Get the {@link Document} containing properties covered by the index. + * + * @return never {@literal null}. + */ + Document getIndexKeys(); - DBObject getIndexOptions(); + /** + * Get the index properties such as {@literal unique},... + * + * @return never {@literal null}. + */ + Document getIndexOptions(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java index b8f03ba860..65fcf85a36 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.index; /** * @author Jon Brisbin */ public enum IndexDirection { - ASCENDING, DESCENDING; + ASCENDING, DESCENDING } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index 83bf354369..a5cbf6c896 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,36 +16,57 @@ package org.springframework.data.mongodb.core.index; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.query.Order; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * Value object for an index field. - * + * * @author Oliver Gierke * @author Christoph Strobl */ -@SuppressWarnings("deprecation") public final class IndexField { enum Type { - GEO, TEXT, DEFAULT; + GEO, TEXT, DEFAULT, + + /** + * @since 2.2 + */ + HASH, + + /** + * @since 3.3 + */ + WILDCARD, + + /** + * @since ?.? + */ + VECTOR } private final String key; - private final Direction direction; + private final @Nullable Direction direction; private final Type type; private final Float weight; - private IndexField(String key, Direction direction, Type type) { + private IndexField(String key, @Nullable Direction direction, @Nullable Type type) { this(key, direction, type, Float.NaN); } - private IndexField(String key, Direction direction, Type type, Float weight) { + private IndexField(String key, @Nullable Direction direction, @Nullable Type type, @Nullable Float weight) { + + Assert.hasText(key, "Key must not be null or empty"); - Assert.hasText(key); - Assert.isTrue(direction != null ^ (Type.GEO.equals(type) || Type.TEXT.equals(type))); + if (Type.GEO.equals(type) || Type.TEXT.equals(type)) { + Assert.isNull(direction, "Geo/Text indexes must not have a direction"); + } else { + if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type) || Type.VECTOR.equals(type))) { + Assert.notNull(direction, "Default indexes require a direction"); + } + } this.key = key; this.direction = direction; @@ -53,30 +74,45 @@ private IndexField(String key, Direction direction, Type type, Float weight) { this.weight = weight == null ? Float.NaN : weight; } + public static IndexField create(String key, Direction order) { + + Assert.notNull(order, "Direction must not be null"); + + return new IndexField(key, order, Type.DEFAULT); + } + + public static IndexField vector(String key) { + return new IndexField(key, null, Type.VECTOR); + } + /** - * Creates a default {@link IndexField} with the given key and {@link Order}. - * - * @deprecated use {@link #create(String, Direction)}. - * @param key must not be {@literal null} or emtpy. - * @param direction must not be {@literal null}. - * @return + * Creates a {@literal hashed} {@link IndexField} for the given key. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 2.2 */ - @Deprecated - public static IndexField create(String key, Order order) { - Assert.notNull(order); - return new IndexField(key, order.toDirection(), Type.DEFAULT); + static IndexField hashed(String key) { + return new IndexField(key, null, Type.HASH); } - public static IndexField create(String key, Direction order) { - Assert.notNull(order); - return new IndexField(key, order, Type.DEFAULT); + /** + * Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the + * {@code fieldName.$**} notation. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 3.3 + */ + static IndexField wildcard(String key) { + return new IndexField(key, null, Type.WILDCARD); } /** * Creates a geo {@link IndexField} for the given key. - * + * * @param key must not be {@literal null} or empty. - * @return + * @return new instance of {@link IndexField}. */ public static IndexField geo(String key) { return new IndexField(key, null, Type.GEO); @@ -84,7 +120,7 @@ public static IndexField geo(String key) { /** * Creates a text {@link IndexField} for the given key. - * + * * @since 1.6 */ public static IndexField text(String key, Float weight) { @@ -100,27 +136,17 @@ public String getKey() { /** * Returns the direction of the {@link IndexField} or {@literal null} in case we have a geo index field. - * - * @deprecated use {@link #getDirection()} instead. - * @return the direction - */ - @Deprecated - public Order getOrder() { - return Direction.ASC.equals(direction) ? Order.ASCENDING : Order.DESCENDING; - } - - /** - * Returns the direction of the {@link IndexField} or {@literal null} in case we have a geo index field. - * + * * @return the direction */ + @Nullable public Direction getDirection() { return direction; } /** * Returns whether the {@link IndexField} is a geo index field. - * + * * @return true if type is {@link Type#GEO}. */ public boolean isGeo() { @@ -128,8 +154,8 @@ public boolean isGeo() { } /** - * Returns wheter the {@link IndexField} is a text index field. - * + * Returns whether the {@link IndexField} is a text index field. + * * @return true if type is {@link Type#TEXT} * @since 1.6 */ @@ -137,31 +163,41 @@ public boolean isText() { return Type.TEXT.equals(type); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) + /** + * Returns whether the {@link IndexField} is a {@literal hashed}. + * + * @return {@literal true} if {@link IndexField} is hashed. + * @since 2.2 + */ + public boolean isHashed() { + return Type.HASH.equals(type); + } + + /** + * Returns whether the {@link IndexField} is contains a {@literal wildcard} expression. + * + * @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}. + * @since 3.3 */ + public boolean isWildcard() { + return Type.WILDCARD.equals(type); + } + @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof IndexField)) { + if (!(obj instanceof IndexField other)) { return false; } - IndexField that = (IndexField) obj; - - return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.direction, that.direction) - && this.type == that.type; + return this.key.equals(other.key) && ObjectUtils.nullSafeEquals(this.direction, other.direction) + && this.type == other.type; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -173,10 +209,6 @@ public int hashCode() { return result; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("IndexField [ key: %s, direction: %s, type: %s, weight: %s]", key, direction, type, weight); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java new file mode 100644 index 0000000000..097075811b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java @@ -0,0 +1,36 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; + +/** + * Use {@link IndexFilter} to create the partial filter expression used when creating + * Partial Indexes. + * + * @author Christoph Strobl + * @since 1.10 + */ +public interface IndexFilter { + + /** + * Get the raw (unmapped) filter expression. + * + * @return never {@literal null}. + */ + Document getFilterObject(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index 2073fc1c28..de7153bfb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2014 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,165 @@ */ package org.springframework.data.mongodb.core.index; +import static org.springframework.data.domain.Sort.Direction.*; + +import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; /** + * Index information for a MongoDB index. + * * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public class IndexInfo { + private static final Double ONE = 1.0; + private static final Double MINUS_ONE = (double) -1; + private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); + private final List indexFields; private final String name; private final boolean unique; - private final boolean dropDuplicates; private final boolean sparse; private final String language; + private final boolean hidden; + private @Nullable Duration expireAfter; + private @Nullable String partialFilterExpression; + private @Nullable Document collation; + private @Nullable Document wildcardProjection; - /** - * @deprecated Will be removed in 1.7. Please use {@link #IndexInfo(List, String, boolean, boolean, boolean, String)} - * @param indexFields - * @param name - * @param unique - * @param dropDuplicates - * @param sparse - */ - @Deprecated - public IndexInfo(List indexFields, String name, boolean unique, boolean dropDuplicates, boolean sparse) { - this(indexFields, name, unique, dropDuplicates, sparse, ""); + public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language) { + + this.indexFields = Collections.unmodifiableList(indexFields); + this.name = name; + this.unique = unique; + this.sparse = sparse; + this.language = language; + this.hidden = false; } - public IndexInfo(List indexFields, String name, boolean unique, boolean dropDuplicates, boolean sparse, - String language) { + public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language, + boolean hidden) { this.indexFields = Collections.unmodifiableList(indexFields); this.name = name; this.unique = unique; - this.dropDuplicates = dropDuplicates; this.sparse = sparse; this.language = language; + this.hidden = hidden; + } + + /** + * Creates new {@link IndexInfo} parsing required properties from the given {@literal sourceDocument}. + * + * @param sourceDocument never {@literal null}. + * @return new instance of {@link IndexInfo}. + * @since 1.10 + */ + public static IndexInfo indexInfoOf(Document sourceDocument) { + + Document keyDbObject = (Document) sourceDocument.get("key"); + int numberOfElements = keyDbObject.keySet().size(); + + List indexFields = new ArrayList(numberOfElements); + + for (String key : keyDbObject.keySet()) { + + Object value = keyDbObject.get(key); + + if (TWO_D_IDENTIFIERS.contains(value)) { + + indexFields.add(IndexField.geo(key)); + + } else if ("text".equals(value)) { + + Document weights = (Document) sourceDocument.get("weights"); + + for (String fieldName : weights.keySet()) { + indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); + } + + } else { + + if (ObjectUtils.nullSafeEquals("hashed", value)) { + indexFields.add(IndexField.hashed(key)); + } else if (key.endsWith("$**")) { + indexFields.add(IndexField.wildcard(key)); + } else { + + Double keyValue = Double.valueOf(value.toString()); + + if (ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, ASC)); + } else if (MINUS_ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, DESC)); + } + } + } + } + + String name = sourceDocument.get("name").toString(); + + boolean unique = sourceDocument.get("unique", false); + boolean sparse = sourceDocument.get("sparse", false); + boolean hidden = sourceDocument.getBoolean("hidden", false); + String language = sourceDocument.containsKey("default_language") ? sourceDocument.getString("default_language") + : ""; + + String partialFilter = extractPartialFilterString(sourceDocument); + + IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language, hidden); + info.partialFilterExpression = partialFilter; + info.collation = sourceDocument.get("collation", Document.class); + + if (sourceDocument.containsKey("expireAfterSeconds")) { + + Number expireAfterSeconds = sourceDocument.get("expireAfterSeconds", Number.class); + info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class)); + } + + if (sourceDocument.containsKey("wildcardProjection")) { + info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class); + } + + return info; + } + + /** + * @param sourceDocument never {@literal null}. + * @return the {@link String} representation of the partial filter {@link Document}. + * @since 2.1.11 + */ + @Nullable + private static String extractPartialFilterString(Document sourceDocument) { + + if (!sourceDocument.containsKey("partialFilterExpression")) { + return null; + } + + return BsonUtils.toJson(sourceDocument.get("partialFilterExpression", Document.class)); } /** * Returns the individual index fields of the index. - * + * * @return */ public List getIndexFields() { @@ -73,20 +182,15 @@ public List getIndexFields() { /** * Returns whether the index is covering exactly the fields given independently of the order. - * + * * @param keys must not be {@literal null}. * @return */ public boolean isIndexForFields(Collection keys) { - Assert.notNull(keys); - List indexKeys = new ArrayList(indexFields.size()); - - for (IndexField field : indexFields) { - indexKeys.add(field.getKey()); - } + Assert.notNull(keys, "Collection of keys must not be null"); - return indexKeys.containsAll(keys); + return this.indexFields.stream().map(IndexField::getKey).collect(Collectors.toSet()).containsAll(keys); } public String getName() { @@ -97,10 +201,6 @@ public boolean isUnique() { return unique; } - public boolean isDropDuplicates() { - return dropDuplicates; - } - public boolean isSparse() { return sparse; } @@ -113,28 +213,91 @@ public String getLanguage() { return language; } + /** + * @return + * @since 1.0 + */ + @Nullable + public String getPartialFilterExpression() { + return partialFilterExpression; + } + + /** + * Get collation information. + * + * @return + * @since 2.0 + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * Get {@literal wildcardProjection} information. + * + * @return {@link Optional#empty() empty} if not set. + * @since 3.3 + */ + public Optional getWildcardProjection() { + return Optional.ofNullable(wildcardProjection); + } + + /** + * Get the duration after which documents within the index expire. + * + * @return the expiration time if set, {@link Optional#empty()} otherwise. + * @since 2.2 + */ + public Optional getExpireAfter() { + return Optional.ofNullable(expireAfter); + } + + /** + * @return {@literal true} if a hashed index field is present. + * @since 2.2 + */ + public boolean isHashed() { + return getIndexFields().stream().anyMatch(IndexField::isHashed); + } + + /** + * @return {@literal true} if a wildcard index field is present. + * @since 3.3 + */ + public boolean isWildcard() { + return getIndexFields().stream().anyMatch(IndexField::isWildcard); + } + + public boolean isHidden() { + return hidden; + } + @Override public String toString() { - return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", dropDuplicates=" - + dropDuplicates + ", sparse=" + sparse + ", language=" + language + "]"; + + return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", sparse=" + sparse + + ", language=" + language + ", partialFilterExpression=" + partialFilterExpression + ", collation=" + collation + + ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + ", hidden=" + hidden + "]"; } @Override public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (dropDuplicates ? 1231 : 1237); - result = prime * result + ObjectUtils.nullSafeHashCode(indexFields); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + (sparse ? 1231 : 1237); - result = prime * result + (unique ? 1231 : 1237); - result = prime * result + ObjectUtils.nullSafeHashCode(language); + int result = 17; + result += 31 * ObjectUtils.nullSafeHashCode(indexFields); + result += 31 * ObjectUtils.nullSafeHashCode(name); + result += 31 * ObjectUtils.nullSafeHashCode(unique); + result += 31 * ObjectUtils.nullSafeHashCode(sparse); + result += 31 * ObjectUtils.nullSafeHashCode(language); + result += 31 * ObjectUtils.nullSafeHashCode(partialFilterExpression); + result += 31 * ObjectUtils.nullSafeHashCode(collation); + result += 31 * ObjectUtils.nullSafeHashCode(expireAfter); + result += 31 * ObjectUtils.nullSafeHashCode(hidden); return result; } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } @@ -145,9 +308,6 @@ public boolean equals(Object obj) { return false; } IndexInfo other = (IndexInfo) obj; - if (dropDuplicates != other.dropDuplicates) { - return false; - } if (indexFields == null) { if (other.indexFields != null) { return false; @@ -171,6 +331,19 @@ public boolean equals(Object obj) { if (!ObjectUtils.nullSafeEquals(language, other.language)) { return false; } + if (!ObjectUtils.nullSafeEquals(partialFilterExpression, other.partialFilterExpression)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(collation, other.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(expireAfter, other.expireAfter)) { + return false; + } + if (hidden != other.hidden) { + return false; + } return true; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java similarity index 56% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperations.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java index cc97d7bfb5..88e6d7a815 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,33 +13,55 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.core; +package org.springframework.data.mongodb.core.index; import java.util.List; -import org.springframework.data.mongodb.core.index.IndexDefinition; -import org.springframework.data.mongodb.core.index.IndexInfo; - /** * Index operations on a collection. - * + * * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl + * @author Jens Schauder */ public interface IndexOperations { /** * Ensure that an index for the provided {@link IndexDefinition} exists for the collection indicated by the entity * class. If not it will be created. - * + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @deprecated since 4.5, in favor of {@link #createIndex(IndexDefinition)}. + */ + @Deprecated(since = "4.5", forRemoval = true) + String ensureIndex(IndexDefinition indexDefinition); + + /** + * Create the index for the provided {@link IndexDefinition} exists for the collection indicated by the entity class. + * If not it will be created. + * * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @since 4.5 */ - void ensureIndex(IndexDefinition indexDefinition); + default String createIndex(IndexDefinition indexDefinition) { + return ensureIndex(indexDefinition); + } + + /** + * Alters the index with given {@literal name}. + * + * @param name name of index to change. + * @param options index options. + * @since 4.1 + */ + void alterIndex(String name, IndexOptions options); /** * Drops an index from this collection. - * + * * @param name name of index to drop */ void dropIndex(String name); @@ -49,18 +71,9 @@ public interface IndexOperations { */ void dropAllIndexes(); - /** - * Clears all indices that have not yet been applied to this collection. - * - * @deprecated since 1.7. The MongoDB Java driver version 3.0 does no longer support reseting the index cache. - * @throws {@link UnsupportedOperationException} when used with MongoDB Java driver version 3.0. - */ - @Deprecated - void resetIndexCache(); - /** * Returns the index information on the collection. - * + * * @return index information on the collection */ List getIndexInfo(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java new file mode 100644 index 0000000000..613a3dc4f4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java @@ -0,0 +1,69 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.index; + +import java.util.List; + +import org.springframework.util.Assert; + +/** + * Adapter for creating synchronous {@link IndexOperations}. + * + * @author Christoph Strobl + * @since 2.0 + */ +public interface IndexOperationsAdapter extends IndexOperations { + + /** + * Obtain a blocking variant of {@link IndexOperations} wrapping {@link ReactiveIndexOperations}. + * + * @param reactiveIndexOperations must not be {@literal null}. + * @return never {@literal null} + */ + static IndexOperationsAdapter blocking(ReactiveIndexOperations reactiveIndexOperations) { + + Assert.notNull(reactiveIndexOperations, "ReactiveIndexOperations must not be null"); + + return new IndexOperationsAdapter() { + + @Override + public String ensureIndex(IndexDefinition indexDefinition) { + return reactiveIndexOperations.ensureIndex(indexDefinition).block(); + } + + @Override + public void dropIndex(String name) { + reactiveIndexOperations.dropIndex(name).block(); + } + + @Override + public void alterIndex(String name, IndexOptions options) { + reactiveIndexOperations.alterIndex(name, options); + } + + @Override + public void dropAllIndexes() { + reactiveIndexOperations.dropAllIndexes().block(); + } + + @Override + public List getIndexInfo() { + return reactiveIndexOperations.getIndexInfo().collectList().block(); + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java new file mode 100644 index 0000000000..ca3d951c94 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.springframework.lang.Nullable; + +/** + * Provider interface to obtain {@link IndexOperations} by MongoDB collection name or entity type. + * + * @author Mark Paluch + * @author Jens Schauder + * @since 2.0 + */ +@FunctionalInterface +public interface IndexOperationsProvider { + + /** + * Returns the operations that can be performed on indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + default IndexOperations indexOps(String collectionName) { + return indexOps(collectionName, null); + } + + /** + * Returns the operations that can be performed on indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @param type the type used for field mapping. Can be {@literal null}. + * @return index operations on the named collection + * @since 3.2 + */ + IndexOperations indexOps(String collectionName, @Nullable Class type); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java new file mode 100644 index 0000000000..887542cb0c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java @@ -0,0 +1,160 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +/** + * Changeable properties of an index. Can be used for index creation and modification. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class IndexOptions { + + @Nullable + private Duration expire; + + @Nullable + private Boolean hidden; + + @Nullable + private Unique unique; + + public enum Unique { + + NO, + + /** + * When unique is true the index rejects duplicate entries. + */ + YES, + + /** + * An existing index is not checked for pre-existing, duplicate index entries but inserting new duplicate entries + * fails. + */ + PREPARE + } + + /** + * @return new empty instance of {@link IndexOptions}. + */ + public static IndexOptions none() { + return new IndexOptions(); + } + + /** + * @return new instance of {@link IndexOptions} having the {@link Unique#YES} flag set. + */ + public static IndexOptions unique() { + + IndexOptions options = new IndexOptions(); + options.unique = Unique.YES; + return options; + } + + /** + * @return new instance of {@link IndexOptions} having the hidden flag set. + */ + public static IndexOptions hidden() { + + IndexOptions options = new IndexOptions(); + options.hidden = true; + return options; + } + + /** + * @return new instance of {@link IndexOptions} with given expiration. + */ + public static IndexOptions expireAfter(Duration duration) { + + IndexOptions options = new IndexOptions(); + options.expire = duration; + return options; + } + + /** + * @return the expiration time. A {@link Duration#isNegative() negative value} represents no expiration, {@literal null} if not set. + */ + @Nullable + public Duration getExpire() { + return expire; + } + + /** + * @param expire must not be {@literal null}. + */ + public void setExpire(Duration expire) { + this.expire = expire; + } + + /** + * @return {@literal true} if hidden, {@literal null} if not set. + */ + @Nullable + public Boolean isHidden() { + return hidden; + } + + /** + * @param hidden + */ + public void setHidden(boolean hidden) { + this.hidden = hidden; + } + + /** + * @return the unique property value, {@literal null} if not set. + */ + @Nullable + public Unique getUnique() { + return unique; + } + + /** + * @param unique must not be {@literal null}. + */ + public void setUnique(Unique unique) { + this.unique = unique; + } + + /** + * @return the store native representation + */ + public Document toDocument() { + + Document document = new Document(); + if(unique != null) { + switch (unique) { + case NO -> document.put("unique", false); + case YES -> document.put("unique", true); + case PREPARE -> document.put("prepareUnique", true); + } + } + if(hidden != null) { + document.put("hidden", hidden); + } + + if (expire != null && !expire.isNegative()) { + document.put("expireAfterSeconds", expire.getSeconds()); + } + return document; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java index ea17d03c44..362247725f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,18 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.index; +import org.springframework.lang.Nullable; + /** * @author Jon Brisbin */ public abstract class IndexPredicate { - private String name; + private @Nullable String name; private IndexDirection direction = IndexDirection.ASCENDING; private boolean unique = false; + @Nullable public String getName() { return name; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java index 132616ef78..8428941474 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,67 @@ */ package org.springframework.data.mongodb.core.index; -import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; +import org.springframework.util.Assert; /** * {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class. - * + *

                    + * The {@link IndexResolver} considers index annotations like {@link Indexed}, {@link GeoSpatialIndexed}, + * {@link HashIndexed}, {@link TextIndexed} and {@link WildcardIndexed} on properties as well as {@link CompoundIndex} + * and {@link WildcardIndexed} on types. + *

                    + * Unless specified otherwise the index name will be created out of the keys/path involved in the index.
                    + * {@link TextIndexed} properties are collected into a single index that covers the detected fields.
                    + * {@link java.util.Map} like structures, unless annotated with {@link WildcardIndexed}, are skipped because the + * {@link java.util.Map.Entry#getKey() map key}, which cannot be resolved from static metadata, needs to be part of the + * index. + * * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch * @since 1.5 */ -interface IndexResolver { +public interface IndexResolver { + + /** + * Creates a new {@link IndexResolver} given {@link MongoMappingContext}. + * + * @param mappingContext must not be {@literal null}. + * @return the new {@link IndexResolver}. + * @since 2.2 + */ + static IndexResolver create( + MappingContext, MongoPersistentProperty> mappingContext) { + + Assert.notNull(mappingContext, "MongoMappingContext must not be null"); + + return new MongoPersistentEntityIndexResolver(mappingContext); + } + + /** + * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s + * are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. + * + * @param typeInformation must not be {@literal null}. + * @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type. + */ + Iterable resolveIndexFor(TypeInformation typeInformation); /** - * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created - * for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. - * - * @param typeInformation + * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s + * are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. + * + * @param entityType must not be {@literal null}. * @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type. + * @see 2.2 */ - Iterable resolveIndexFor(TypeInformation typeInformation); + default Iterable resolveIndexFor(Class entityType) { + return resolveIndexFor(TypeInformation.of(entityType)); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java index a322622426..0866556c10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,9 +20,13 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; +import org.springframework.data.mongodb.core.mapping.Document; + /** * Mark a field to be indexed using MongoDB's indexing feature. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Philipp Schneider @@ -30,43 +34,49 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Jordi Llach + * @author Mark Paluch + * @author Stefan Tirea */ -@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD}) +@Collation +@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) public @interface Indexed { /** * If set to true reject all documents that contain a duplicate value for the indexed field. - * - * @see http://docs.mongodb.org/manual/core/index-unique/ - * @return + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-unique/ */ boolean unique() default false; - IndexDirection direction() default IndexDirection.ASCENDING; - /** - * If set to true index will skip over any document that is missing the indexed field. - * - * @see http://docs.mongodb.org/manual/core/index-sparse/ - * @return + * The index sort direction. + * + * @return {@link IndexDirection#ASCENDING} by default. */ - boolean sparse() default false; + IndexDirection direction() default IndexDirection.ASCENDING; /** - * @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping - * @return + * If set to true index will skip over any document that is missing the indexed field.
                    + * Must not be used with {@link #partialFilter()}. + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-sparse/ */ - boolean dropDups() default false; + boolean sparse() default false; /** - * Index name.
                    + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
                    *
                    * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
                    *
                    * The structure below - * + * *

                     	 * 
                     	 * @Document
                    @@ -74,64 +84,112 @@
                     	 *   Hybrid hybrid;
                     	 *   Nested nested;
                     	 * }
                    -	 * 
                    +	 *
                     	 * @Document
                     	 * class Hybrid {
                     	 *   @Indexed(name="index") String h1;
                    +	 *   @Indexed(name="#{@myBean.indexName}") String h2;
                     	 * }
                    -	 * 
                    +	 *
                     	 * class Nested {
                     	 *   @Indexed(name="index") String n1;
                     	 * }
                     	 * 
                     	 * 
                    - * + * * resolves in the following index structures - * + * *
                     	 * 
                     	 * db.root.createIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
                     	 * db.root.createIndex( { nested.n1: 1 } , { name: "nested.index" } )
                     	 * db.hybrid.createIndex( { h1: 1} , { name: "index" } )
                    +	 * db.hybrid.createIndex( { h2: 1} , { name: the value myBean.getIndexName() returned } )
                     	 * 
                     	 * 
                    - * - * @return + * + * @return empty String by default. */ String name() default ""; /** * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. - * - * @return + * + * @return {@literal false} by default. * @since 1.5 */ boolean useGeneratedName() default false; - /** - * Collection name for index to be created on. - * - * @return - * @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might - * result in broken definitions. Will be removed in 1.7. - */ - @Deprecated - String collection() default ""; - /** * If {@literal true} the index will be created in the background. - * - * @see http://docs.mongodb.org/manual/core/indexes/#background-construction - * @return + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/indexes/#background-construction */ boolean background() default false; /** * Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry. - * - * @see http://docs.mongodb.org/manual/tutorial/expire-data/ - * @return + * + * @return {@literal -1} by default. + * @see https://docs.mongodb.org/manual/tutorial/expire-data/ + * @deprecated since 4.4 - Please use {@link #expireAfter()} instead. */ + @Deprecated(since="4.4", forRemoval = true) int expireAfterSeconds() default -1; + + /** + * Alternative for {@link #expireAfterSeconds()} to configure the timeout after which the document should expire. + * Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure: + *
                      + *
                    • d: Days
                    • + *
                    • h: Hours
                    • + *
                    • m: Minutes
                    • + *
                    • s: Seconds
                    • + *
                    • Alternatively: A Spring {@literal template expression}. The expression can result in a + * {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned + * conventions.
                    • + *
                    + * Supports ISO-8601 style. + * + *
                    +	 * @Indexed(expireAfter = "10s") String expireAfterTenSeconds;
                    +	 * @Indexed(expireAfter = "1d") String expireAfterOneDay;
                    +	 * @Indexed(expireAfter = "P2D") String expireAfterTwoDays;
                    +	 * @Indexed(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
                    +	 * 
                    + * + * @return empty by default. + * @since 2.2 + */ + String expireAfter() default ""; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
                    + * Must not be used with {@link #sparse() sparse = true}. + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 3.1 + */ + String partialFilter() default ""; + + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation + * (language-specific rules for string comparison) applied to the indexed based on the field value. + *

                    + * NOTE: Overrides {@link Document#collation()}. + * + * @return empty by default. + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @since 4.0 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java index 26ce216809..940d676fdc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,9 @@ import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationListener; import org.springframework.data.mapping.context.MappingContextEvent; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.util.Assert; @@ -31,39 +30,46 @@ * creator when MongoTemplate is used 'stand-alone', that is not declared inside a Spring {@link ApplicationContext}. * Declare {@link MongoTemplate} inside an {@link ApplicationContext} to enable the publishing of all persistence events * such as {@link AfterLoadEvent}, {@link AfterSaveEvent}, etc. - * + * * @author Jon Brisbin * @author Oliver Gierke + * @author Mark Paluch */ public class MongoMappingEventPublisher implements ApplicationEventPublisher { - private final MongoPersistentEntityIndexCreator indexCreator; + private final ApplicationListener> indexCreator; + + /** + * Creates a new {@link MongoMappingEventPublisher} for the given {@link ApplicationListener}. + * + * @param indexCreator must not be {@literal null}. + * @since 2.1 + */ + public MongoMappingEventPublisher(ApplicationListener> indexCreator) { + + Assert.notNull(indexCreator, "ApplicationListener must not be null"); + + this.indexCreator = indexCreator; + } /** * Creates a new {@link MongoMappingEventPublisher} for the given {@link MongoPersistentEntityIndexCreator}. - * + * * @param indexCreator must not be {@literal null}. */ public MongoMappingEventPublisher(MongoPersistentEntityIndexCreator indexCreator) { - Assert.notNull(indexCreator); + Assert.notNull(indexCreator, "MongoPersistentEntityIndexCreator must not be null"); + this.indexCreator = indexCreator; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationEventPublisher#publishEvent(org.springframework.context.ApplicationEvent) - */ @SuppressWarnings("unchecked") public void publishEvent(ApplicationEvent event) { - if (event instanceof MappingContextEvent) { - indexCreator.onApplicationEvent((MappingContextEvent, MongoPersistentProperty>) event); + if (event instanceof MappingContextEvent mappingContextEvent) { + indexCreator.onApplicationEvent(mappingContextEvent); } } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object) - */ public void publishEvent(Object event) {} } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java index 41018a8c8c..e20b0704cc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,75 +15,79 @@ */ package org.springframework.data.mongodb.core.index; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.context.MappingContextEvent; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.util.MongoDbErrorCodes; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; -import com.mongodb.DBObject; import com.mongodb.MongoException; /** * Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext} * for indexing metadata and ensures the indexes to be available. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Laurent Canet * @author Christoph Strobl + * @author Mark Paluch */ public class MongoPersistentEntityIndexCreator implements ApplicationListener> { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class); + private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexCreator.class); private final Map, Boolean> classesSeen = new ConcurrentHashMap, Boolean>(); - private final MongoDbFactory mongoDbFactory; + private final IndexOperationsProvider indexOperationsProvider; private final MongoMappingContext mappingContext; private final IndexResolver indexResolver; /** * Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and - * {@link MongoDbFactory}. - * + * {@link MongoDatabaseFactory}. + * * @param mappingContext must not be {@literal null}. - * @param mongoDbFactory must not be {@literal null}. + * @param indexOperationsProvider must not be {@literal null}. */ - public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) { - this(mappingContext, mongoDbFactory, new MongoPersistentEntityIndexResolver(mappingContext)); + public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + IndexOperationsProvider indexOperationsProvider) { + this(mappingContext, indexOperationsProvider, IndexResolver.create(mappingContext)); } /** * Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and - * {@link MongoDbFactory}. - * + * {@link MongoDatabaseFactory}. + * * @param mappingContext must not be {@literal null}. - * @param mongoDbFactory must not be {@literal null}. + * @param indexOperationsProvider must not be {@literal null}. * @param indexResolver must not be {@literal null}. */ - public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory, - IndexResolver indexResolver) { + public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + IndexOperationsProvider indexOperationsProvider, IndexResolver indexResolver) { - Assert.notNull(mongoDbFactory); - Assert.notNull(mappingContext); - Assert.notNull(indexResolver); + Assert.notNull(mappingContext, "MongoMappingContext must not be null"); + Assert.notNull(indexOperationsProvider, "IndexOperationsProvider must not be null"); + Assert.notNull(indexResolver, "IndexResolver must not be null"); - this.mongoDbFactory = mongoDbFactory; + this.indexOperationsProvider = indexOperationsProvider; this.mappingContext = mappingContext; this.indexResolver = indexResolver; @@ -92,10 +96,6 @@ public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, Mon } } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) - */ public void onApplicationEvent(MappingContextEvent event) { if (!event.wasEmittedBy(mappingContext)) { @@ -105,8 +105,9 @@ public void onApplicationEvent(MappingContextEvent event) { PersistentEntity entity = event.getPersistentEntity(); // Double check type as Spring infrastructure does not consider nested generics - if (entity instanceof MongoPersistentEntity) { - checkForIndexes((MongoPersistentEntity) entity); + if (entity instanceof MongoPersistentEntity mongoPersistentEntity) { + + checkForIndexes(mongoPersistentEntity); } } @@ -119,7 +120,7 @@ private void checkForIndexes(final MongoPersistentEntity entity) { this.classesSeen.put(type, Boolean.TRUE); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Analyzing class " + type + " for index information."); + LOGGER.debug("Analyzing class " + type + " for index information"); } checkForAndCreateIndexes(entity); @@ -128,8 +129,16 @@ private void checkForIndexes(final MongoPersistentEntity entity) { private void checkForAndCreateIndexes(MongoPersistentEntity entity) { - if (entity.findAnnotation(Document.class) != null) { - for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + if (entity.isAnnotationPresent(Document.class)) { + + String collection = entity.getCollection(); + + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + + IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder definitionHolder + ? definitionHolder + : new IndexDefinitionHolder("", indexDefinition, collection); + createIndex(indexToCreate); } } @@ -139,35 +148,34 @@ void createIndex(IndexDefinitionHolder indexDefinition) { try { - mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(), - indexDefinition.getIndexOptions()); + IndexOperations indexOperations = indexOperationsProvider.indexOps(indexDefinition.getCollection()); + indexOperations.ensureIndex(indexDefinition); - } catch (MongoException ex) { + } catch (DataIntegrityViolationException ex) { - if (MongoDbErrorCodes.isDataIntegrityViolationCode(ex.getCode())) { + if (ex.getCause() instanceof MongoException mongoException + && MongoDbErrorCodes.isDataIntegrityViolationCode(mongoException.getCode())) { - DBObject existingIndex = fetchIndexInformation(indexDefinition); - String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'."; + IndexInfo existingIndex = fetchIndexInformation(indexDefinition); + String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'"; if (existingIndex != null) { - message += " Index already defined as '%s'."; + message += " Index already defined as '%s'"; } throw new DataIntegrityViolationException( String.format(message, indexDefinition.getPath(), indexDefinition.getCollection(), indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions(), existingIndex), - ex); + ex.getCause()); } - RuntimeException exceptionToThrow = mongoDbFactory.getExceptionTranslator().translateExceptionIfPossible(ex); - - throw exceptionToThrow != null ? exceptionToThrow : ex; + throw ex; } } /** * Returns whether the current index creator was registered for the given {@link MappingContext}. - * + * * @param context * @return */ @@ -175,7 +183,8 @@ public boolean isIndexCreatorFor(MappingContext context) { return this.mappingContext.equals(context); } - private DBObject fetchIndexInformation(IndexDefinitionHolder indexDefinition) { + @Nullable + private IndexInfo fetchIndexInformation(@Nullable IndexDefinitionHolder indexDefinition) { if (indexDefinition == null) { return null; @@ -183,17 +192,21 @@ private DBObject fetchIndexInformation(IndexDefinitionHolder indexDefinition) { try { + IndexOperations indexOperations = indexOperationsProvider.indexOps(indexDefinition.getCollection()); Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name"); - for (DBObject index : mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).getIndexInfo()) { - if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) { - return index; - } - } + List existingIndexes = indexOperations.getIndexInfo(); + + return existingIndexes.stream().// + filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())).// + findFirst().// + orElse(null); } catch (Exception e) { - LOGGER.debug( - String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Failed to load index information for collection '%s'", indexDefinition.getCollection()), e); + } } return null; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 074872ba5b..a5988b8c1d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,179 +15,266 @@ */ package org.springframework.data.mongodb.core.index; +import java.lang.annotation.Annotation; +import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.LinkedHashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.annotation.MergedAnnotation; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.domain.Sort; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.AssociationHandler; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PropertyHandler; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.mongodb.core.index.Index.Duplicates; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy; import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder; import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DotPath; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.data.spel.EvaluationContextProvider; import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; - /** * {@link IndexResolver} implementation inspecting {@link MongoPersistentEntity} for {@link MongoPersistentEntity} to be * indexed.
                    * All {@link MongoPersistentProperty} of the {@link MongoPersistentEntity} are inspected for potential indexes by * scanning related annotations. - * + * * @author Christoph Strobl * @author Thomas Darimont + * @author Martin Macko + * @author Mark Paluch + * @author Dave Perryman + * @author Stefan Tirea * @since 1.5 */ public class MongoPersistentEntityIndexResolver implements IndexResolver { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class); + private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexResolver.class); + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); - private final MongoMappingContext mappingContext; + private final MappingContext, MongoPersistentProperty> mappingContext; + private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT; /** * Create new {@link MongoPersistentEntityIndexResolver}. - * + * * @param mappingContext must not be {@literal null}. */ - public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) { + public MongoPersistentEntityIndexResolver( + MappingContext, MongoPersistentProperty> mappingContext) { Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions"); this.mappingContext = mappingContext; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(org.springframework.data.util.TypeInformation) - */ @Override public Iterable resolveIndexFor(TypeInformation typeInformation) { - return resolveIndexForEntity(mappingContext.getPersistentEntity(typeInformation)); + return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(typeInformation)); } /** - * Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty} - * scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given - * {@literal root} has therefore to be annotated with {@link Document}. - * + * Resolve the {@link IndexDefinition}s for a given {@literal root} entity by traversing + * {@link MongoPersistentProperty} scanning for index annotations {@link Indexed}, {@link CompoundIndex} and + * {@link GeospatialIndex}. The given {@literal root} has therefore to be annotated with {@link Document}. + * * @param root must not be null. * @return List of {@link IndexDefinitionHolder}. Will never be {@code null}. * @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities. */ - public List resolveIndexForEntity(final MongoPersistentEntity root) { + public List resolveIndexForEntity(MongoPersistentEntity root) { - Assert.notNull(root, "Index cannot be resolved for given 'null' entity."); + Assert.notNull(root, "MongoPersistentEntity must not be null"); Document document = root.findAnnotation(Document.class); - Assert.notNull(document, "Given entity is not collection root."); + Assert.notNull(document, () -> String + .format("Entity %s is not a collection root; Make sure to annotate it with @Document", root.getName())); - final List indexInformation = new ArrayList(); - indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root)); - indexInformation.addAll(potentiallyCreateTextIndexDefinition(root)); + verifyWildcardIndexedProjection(root); - final CycleGuard guard = new CycleGuard(); + List indexInformation = new ArrayList<>(); + String collection = root.getCollection(); + indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection)); - root.doWithProperties(new PropertyHandler() { + root.doWithProperties((PropertyHandler) property -> this + .potentiallyAddIndexForProperty(root, property, indexInformation, new CycleGuard())); - @Override - public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) { + indexInformation.addAll(resolveIndexesForDbrefs("", collection, root)); - try { - if (persistentProperty.isEntity()) { - indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(), - persistentProperty.getFieldName(), root.getCollection(), guard)); - } + return indexInformation; + } - IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty( - persistentProperty.getFieldName(), root.getCollection(), persistentProperty); - if (indexDefinitionHolder != null) { - indexInformation.add(indexDefinitionHolder); - } - } catch (CyclicPropertyReferenceException e) { - LOGGER.info(e.getMessage()); + private void verifyWildcardIndexedProjection(MongoPersistentEntity entity) { + + entity.doWithAll(it -> { + + if (it.isAnnotationPresent(WildcardIndexed.class)) { + + WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class); + + if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) { + + throw new MappingException(String.format( + "WildcardIndexed.wildcardProjection cannot be used on nested paths; Offending property: %s.%s", + entity.getName(), it.getName())); } } }); + } - indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root)); + private void potentiallyAddIndexForProperty(MongoPersistentEntity root, MongoPersistentProperty persistentProperty, + List indexes, CycleGuard guard) { - return indexInformation; + try { + if (isMapWithoutWildcardIndex(persistentProperty)) { + return; + } + + if (persistentProperty.isEntity()) { + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + persistentProperty.isUnwrapped() ? "" : persistentProperty.getFieldName(), Path.of(persistentProperty), + root.getCollection(), guard)); + } + + List indexDefinitions = createIndexDefinitionHolderForProperty( + persistentProperty.getFieldName(), root.getCollection(), persistentProperty); + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); + } + } catch (CyclicPropertyReferenceException e) { + if (LOGGER.isInfoEnabled()) { + LOGGER.info(e.getMessage()); + } + } } /** * Recursively resolve and inspect properties of given {@literal type} for indexes to be created. - * + * * @param type - * @param path The {@literal "dot} path. + * @param dotPath The {@literal "dot} path. + * @param path {@link PersistentProperty} path for cycle detection. * @param collection + * @param guard * @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property * types. Will never be {@code null}. */ - private List resolveIndexForClass(final TypeInformation type, final String path, - final String collection, final CycleGuard guard) { + private List resolveIndexForClass(TypeInformation type, String dotPath, Path path, + String collection, CycleGuard guard) { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(type); + return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard); + } - final List indexInformation = new ArrayList(); - indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(path, collection, entity)); + private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, Path path, + String collection, CycleGuard guard) { - entity.doWithProperties(new PropertyHandler() { + List indexInformation = new ArrayList<>(); + indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity)); - @Override - public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) { + entity.doWithProperties((PropertyHandler) property -> this + .guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard)); - String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + persistentProperty.getFieldName(); - guard.protect(persistentProperty, path); + indexInformation.addAll(resolveIndexesForDbrefs(dotPath, collection, entity)); - if (persistentProperty.isEntity()) { - try { - indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(), - propertyDotPath, collection, guard)); - } catch (CyclicPropertyReferenceException e) { - LOGGER.info(e.getMessage()); - } - } + return indexInformation; + } - IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, - collection, persistentProperty); - if (indexDefinitionHolder != null) { - indexInformation.add(indexDefinitionHolder); - } + private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath, + Path path, String collection, List indexes, CycleGuard guard) { + + DotPath propertyDotPath = DotPath.from(dotPath); + + if (!persistentProperty.isUnwrapped()) { + propertyDotPath = propertyDotPath.append(persistentProperty.getFieldName()); + } + + Path propertyPath = path.append(persistentProperty); + guard.protect(persistentProperty, propertyPath); + + if (isMapWithoutWildcardIndex(persistentProperty)) { + return; + } + + if (persistentProperty.isEntity()) { + try { + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + propertyDotPath.toString(), propertyPath, collection, guard)); + } catch (CyclicPropertyReferenceException e) { + LOGGER.info(e.getMessage()); } - }); + } - indexInformation.addAll(resolveIndexesForDbrefs(path, collection, entity)); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, persistentProperty); - return indexInformation; + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); + } } - private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection, + private List createIndexDefinitionHolderForProperty(String dotPath, String collection, MongoPersistentProperty persistentProperty) { + List indices = new ArrayList<>(2); + + if (persistentProperty.isUnwrapped() && (persistentProperty.isAnnotationPresent(Indexed.class) + || persistentProperty.isAnnotationPresent(HashIndexed.class) + || persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class))) { + throw new InvalidDataAccessApiUsageException( + String.format("Index annotation not allowed on unwrapped object for path '%s'", dotPath)); + } + if (persistentProperty.isAnnotationPresent(Indexed.class)) { - return createIndexDefinition(dotPath, collection, persistentProperty); + indices.add(createIndexDefinition(dotPath, collection, persistentProperty)); } else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) { - return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty); + indices.add(createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty)); + } + + if (persistentProperty.isAnnotationPresent(HashIndexed.class)) { + indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty)); + } + if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) { + indices.add(createWildcardIndexDefinition(dotPath, collection, + persistentProperty.getRequiredAnnotation(WildcardIndexed.class), + mappingContext.getPersistentEntity(persistentProperty))); } - return null; + return indices; } private List potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection, @@ -200,77 +287,113 @@ private List potentiallyCreateCompoundIndexDefinitions(St return createCompoundIndexDefinitions(dotPath, collection, entity); } + private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, + MongoPersistentEntity entity) { + + if (!entity.isAnnotationPresent(WildcardIndexed.class)) { + return Collections.emptyList(); + } + + return Collections.singletonList(new IndexDefinitionHolder(dotPath, + createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity), + collection)); + } + private Collection potentiallyCreateTextIndexDefinition( - MongoPersistentEntity root) { + MongoPersistentEntity root, String collection) { + + String name = root.getType().getSimpleName() + "_TextIndex"; + if (name.getBytes().length > 127) { + String[] args = ClassUtils.getShortNameAsProperty(root.getType()).split("\\."); + name = ""; + Iterator it = Arrays.asList(args).iterator(); + while (it.hasNext()) { + + if (!it.hasNext()) { + name += it.next() + "_TextIndex"; + } else { + name += (it.next().charAt(0) + "."); + } + } - TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder() - .named(root.getType().getSimpleName() + "_TextIndex"); + } + TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder().named(name); if (StringUtils.hasText(root.getLanguage())) { indexDefinitionBuilder.withDefaultLanguage(root.getLanguage()); } try { - appendTextIndexInformation("", indexDefinitionBuilder, root, new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), - new CycleGuard()); + appendTextIndexInformation(DotPath.empty(), Path.empty(), indexDefinitionBuilder, root, + new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard()); } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } + if (root.hasCollation()) { + indexDefinitionBuilder.withSimpleCollation(); + } + TextIndexDefinition indexDefinition = indexDefinitionBuilder.build(); if (!indexDefinition.hasFieldSpec()) { return Collections.emptyList(); } - IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, root.getCollection()); + IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, collection); return Collections.singletonList(holder); } - private void appendTextIndexInformation(final String dotPath, final TextIndexDefinitionBuilder indexDefinitionBuilder, - final MongoPersistentEntity entity, final TextIndexIncludeOptions includeOptions, final CycleGuard guard) { + private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder, + MongoPersistentEntity entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) { entity.doWithProperties(new PropertyHandler() { @Override public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) { - guard.protect(persistentProperty, dotPath); + guard.protect(persistentProperty, path); - if (persistentProperty.isExplicitLanguageProperty() && !StringUtils.hasText(dotPath)) { + if (persistentProperty.isExplicitLanguageProperty() && dotPath.isEmpty()) { indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName()); } + if (persistentProperty.isMap()) { + return; + } + TextIndexed indexed = persistentProperty.findAnnotation(TextIndexed.class); if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) { - String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") - + persistentProperty.getFieldName(); + DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName()); + Path propertyPath = path.append(persistentProperty); + + TextIndexedFieldSpec parentFieldSpec = includeOptions.getParentFieldSpec(); Float weight = indexed != null ? indexed.weight() - : (includeOptions.getParentFieldSpec() != null ? includeOptions.getParentFieldSpec().getWeight() : 1.0F); + : (parentFieldSpec != null ? parentFieldSpec.getWeight() : 1.0F); if (persistentProperty.isEntity()) { TextIndexIncludeOptions optionsForNestedType = includeOptions; if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) { optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE, - new TextIndexedFieldSpec(propertyDotPath, weight)); + new TextIndexedFieldSpec(propertyDotPath.toString(), weight)); } try { - appendTextIndexInformation(propertyDotPath, indexDefinitionBuilder, + appendTextIndexInformation(propertyDotPath, propertyPath, indexDefinitionBuilder, mappingContext.getPersistentEntity(persistentProperty.getActualType()), optionsForNestedType, guard); } catch (CyclicPropertyReferenceException e) { - LOGGER.info(e.getMessage(), e); + LOGGER.info(e.getMessage()); } catch (InvalidDataAccessApiUsageException e) { - LOGGER.info(String.format("Potentially invalid index structure discovered. Breaking operation for %s.", + LOGGER.info(String.format("Potentially invalid index structure discovered; Breaking operation for %s", entity.getName()), e); } } else if (includeOptions.isForce() || indexed != null) { - indexDefinitionBuilder.onField(propertyDotPath, weight); + indexDefinitionBuilder.onField(propertyDotPath.toString(), weight); } } @@ -280,23 +403,24 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) } /** - * Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type. - * + * Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of a given + * type. + * * @param dotPath The properties {@literal "dot"} path representation from its document root. * @param fallbackCollection - * @param type + * @param entity * @return */ protected List createCompoundIndexDefinitions(String dotPath, String fallbackCollection, MongoPersistentEntity entity) { - List indexDefinitions = new ArrayList(); + List indexDefinitions = new ArrayList<>(); CompoundIndexes indexes = entity.findAnnotation(CompoundIndexes.class); if (indexes != null) { - for (CompoundIndex index : indexes.value()) { - indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity)); - } + indexDefinitions = Arrays.stream(indexes.value()) + .map(index -> createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity)) + .collect(Collectors.toList()); } CompoundIndex index = entity.findAnnotation(CompoundIndex.class); @@ -308,19 +432,18 @@ protected List createCompoundIndexDefinitions(String dotP return indexDefinitions; } - @SuppressWarnings("deprecation") - protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection, - CompoundIndex index, MongoPersistentEntity entity) { + protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String collection, CompoundIndex index, + MongoPersistentEntity entity) { CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition( - resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def())); + resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def(), entity)); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null)); + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); } if (index.unique()) { - indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN); + indexDefinition.unique(); } if (index.sparse()) { @@ -331,57 +454,92 @@ protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, St indexDefinition.background(); } - String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection; + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + indexDefinition.collation(resolveCollation(index, entity)); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } - private DBObject resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) { + protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection, + WildcardIndexed index, @Nullable MongoPersistentEntity entity) { + + WildcardIndex indexDefinition = new WildcardIndex(dotPath); + + if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) { + indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); + } + + if (!index.useGeneratedName()) { + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + indexDefinition.collation(resolveCollation(index, entity)); + return new IndexDefinitionHolder(dotPath, indexDefinition, collection); + } + + private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString, + PersistentEntity entity) { if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) { - throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys."); + throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys"); } if (!StringUtils.hasText(keyDefinitionString)) { - return new BasicDBObject(dotPath, 1); + return new org.bson.Document(dotPath, 1); } - DBObject dbo = (DBObject) JSON.parse(keyDefinitionString); + Object keyDefToUse = ExpressionUtils.evaluate(keyDefinitionString, () -> getEvaluationContextForProperty(entity)); + + org.bson.Document dbo = (keyDefToUse instanceof org.bson.Document document) ? document + : org.bson.Document.parse(ObjectUtils.nullSafeToString(keyDefToUse)); + if (!StringUtils.hasText(dotPath)) { return dbo; } - BasicDBObjectBuilder dboBuilder = new BasicDBObjectBuilder(); + org.bson.Document document = new org.bson.Document(); for (String key : dbo.keySet()) { - dboBuilder.add(dotPath + "." + key, dbo.get(key)); + document.put(dotPath + "." + key, dbo.get(key)); } - return dboBuilder.get(); + return document; } /** - * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given + * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for a given * {@link MongoPersistentProperty}. - * + * * @param dotPath The properties {@literal "dot"} path representation from its document root. * @param collection - * @param persitentProperty + * @param persistentProperty * @return */ - protected IndexDefinitionHolder createIndexDefinition(String dotPath, String fallbackCollection, - MongoPersistentProperty persitentProperty) { + @Nullable + protected IndexDefinitionHolder createIndexDefinition(String dotPath, String collection, + MongoPersistentProperty persistentProperty) { - Indexed index = persitentProperty.findAnnotation(Indexed.class); - String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection; + Indexed index = persistentProperty.findAnnotation(Indexed.class); + + if (index == null) { + return null; + } Index indexDefinition = new Index().on(dotPath, IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty)); + indexDefinition + .named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty)); } if (index.unique()) { - indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN); + indexDefinition.unique(); } if (index.sparse()) { @@ -396,40 +554,197 @@ protected IndexDefinitionHolder createIndexDefinition(String dotPath, String fal indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS); } + if (StringUtils.hasText(index.expireAfter())) { + + if (index.expireAfterSeconds() >= 0) { + throw new IllegalStateException(String.format( + "@Indexed already defines an expiration timeout of %s seconds via Indexed#expireAfterSeconds; Please make to use either expireAfterSeconds or expireAfter", + index.expireAfterSeconds())); + } + + Duration timeout = computeIndexTimeout(index.expireAfter(), + () -> getEvaluationContextForProperty(persistentProperty.getOwner())); + if (!timeout.isNegative()) { + indexDefinition.expire(timeout); + } + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), persistentProperty.getOwner())); + } + + indexDefinition.collation(resolveCollation(index, persistentProperty.getOwner())); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } + private PartialIndexFilter evaluatePartialFilter(String filterExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(filterExpression, () -> getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document document) { + return PartialIndexFilter.of(document); + } + + return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null)); + } + + private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(projectionExpression, () -> getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document document) { + return document; + } + + return BsonUtils.parse(projectionExpression, null); + } + + private Collation evaluateCollation(String collationExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(collationExpression, () -> getEvaluationContextForProperty(entity)); + if (result instanceof org.bson.Document document) { + return Collation.from(document); + } + if (result instanceof Collation collation) { + return collation; + } + if (result instanceof String stringValue) { + return Collation.parse(stringValue); + } + if (result instanceof Map) { + return Collation.from(new org.bson.Document((Map) result)); + } + throw new IllegalStateException("Cannot parse collation " + result); + + } + + /** + * Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given + * {@link MongoPersistentProperty}. + * + * @param dotPath The properties {@literal "dot"} path representation from its document root. + * @param collection + * @param persistentProperty + * @return + * @since 2.2 + */ + @Nullable + protected IndexDefinitionHolder createHashedIndexDefinition(String dotPath, String collection, + MongoPersistentProperty persistentProperty) { + + HashIndexed index = persistentProperty.findAnnotation(HashIndexed.class); + + if (index == null) { + return null; + } + + return new IndexDefinitionHolder(dotPath, HashedIndex.hashed(dotPath), collection); + } + + /** + * Get the default {@link EvaluationContext}. + * + * @return never {@literal null}. + * @since 2.2 + */ + protected EvaluationContext getEvaluationContext() { + return evaluationContextProvider.getEvaluationContext(null); + } + + /** + * Get the {@link EvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return + */ + private EvaluationContext getEvaluationContextForProperty(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity == null || !(persistentEntity instanceof BasicMongoPersistentEntity)) { + return getEvaluationContext(); + } + + EvaluationContext contextFromEntity = ((BasicMongoPersistentEntity) persistentEntity).getEvaluationContext(null); + + if (contextFromEntity != null && !EvaluationContextProvider.DEFAULT.equals(contextFromEntity)) { + return contextFromEntity; + } + + return getEvaluationContext(); + } + + /** + * Set the {@link EvaluationContextProvider} used for obtaining the {@link EvaluationContext} used to compute + * {@link org.springframework.expression.spel.standard.SpelExpression expressions}. + * + * @param evaluationContextProvider must not be {@literal null}. + * @since 2.2 + */ + public void setEvaluationContextProvider(EvaluationContextProvider evaluationContextProvider) { + this.evaluationContextProvider = evaluationContextProvider; + } + /** * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for * {@link MongoPersistentProperty}. - * + * * @param dotPath The properties {@literal "dot"} path representation from its document root. * @param collection * @param persistentProperty * @return */ - protected IndexDefinitionHolder createGeoSpatialIndexDefinition(String dotPath, String fallbackCollection, + @Nullable + protected IndexDefinitionHolder createGeoSpatialIndexDefinition(String dotPath, String collection, MongoPersistentProperty persistentProperty) { GeoSpatialIndexed index = persistentProperty.findAnnotation(GeoSpatialIndexed.class); - String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection; + + if (index == null) { + return null; + } GeospatialIndex indexDefinition = new GeospatialIndex(dotPath); indexDefinition.withBits(index.bits()); indexDefinition.withMin(index.min()).withMax(index.max()); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty)); + indexDefinition + .named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty)); } - indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField()); + if (MongoClientVersion.isVersion5orNewer()) { + + Optional defaultBucketSize = MergedAnnotation.of(GeoSpatialIndexed.class).getDefaultValue("bucketSize", + Double.class); + if (!defaultBucketSize.isPresent() || index.bucketSize() != defaultBucketSize.get()) { + indexDefinition.withBucketSize(index.bucketSize()); + } else { + if (LOGGER.isInfoEnabled()) { + LOGGER.info( + "GeoSpatialIndexed.bucketSize no longer supported by Mongo Client 5 or newer. Ignoring bucketSize for path %s." + .formatted(dotPath)); + } + } + } else { + indexDefinition.withBucketSize(index.bucketSize()); + } + + indexDefinition.typed(index.type()).withAdditionalField(index.additionalField()); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } - private String pathAwareIndexName(String indexName, String dotPath, MongoPersistentProperty property) { + private String pathAwareIndexName(String indexName, String dotPath, @Nullable PersistentEntity entity, + @Nullable MongoPersistentProperty property) { + + String nameToUse = ""; + if (StringUtils.hasText(indexName)) { + + Object result = ExpressionUtils.evaluate(indexName, () -> getEvaluationContextForProperty(entity)); - String nameToUse = StringUtils.hasText(indexName) ? indexName : ""; + if (result != null) { + nameToUse = ObjectUtils.nullSafeToString(result); + } + } if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) { return StringUtils.hasText(nameToUse) ? nameToUse : dotPath; @@ -448,139 +763,261 @@ private String pathAwareIndexName(String indexName, String dotPath, MongoPersist private List resolveIndexesForDbrefs(final String path, final String collection, MongoPersistentEntity entity) { - final List indexes = new ArrayList(0); - entity.doWithAssociations(new AssociationHandler() { + final List indexes = new ArrayList<>(0); + entity.doWithAssociations((AssociationHandler) association -> this + .resolveAndAddIndexesForAssociation(association, indexes, path, collection)); + return indexes; + } - @Override - public void doWithAssociation(Association association) { + private void resolveAndAddIndexesForAssociation(Association association, + List indexes, String path, String collection) { - MongoPersistentProperty property = association.getInverse(); + MongoPersistentProperty property = association.getInverse(); - String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + property.getFieldName(); + DotPath propertyDotPath = DotPath.from(path).append(property.getFieldName()); - if (property.isAnnotationPresent(GeoSpatialIndexed.class) || property.isAnnotationPresent(TextIndexed.class)) { - throw new MappingException( - String.format("Cannot create geospatial-/text- index on DBRef in collection '%s' for path '%s'.", - collection, propertyDotPath)); - } + if (property.isAnnotationPresent(GeoSpatialIndexed.class) || property.isAnnotationPresent(TextIndexed.class)) { + throw new MappingException( + String.format("Cannot create geospatial-/text- index on DBRef in collection '%s' for path '%s'", collection, + propertyDotPath)); + } - IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, - collection, property); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, property); - if (indexDefinitionHolder != null) { - indexes.add(indexDefinitionHolder); - } - } - }); + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); + } + } - return indexes; + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @since 2.2 + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, Supplier evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + + /** + * Resolve the "collation" attribute from a given {@link Annotation} if present. + * + * @param annotation + * @param entity + * @return the collation present on either the annotation or the entity as a fallback. Might be {@literal null}. + * @since 4.0 + */ + @Nullable + private Collation resolveCollation(Annotation annotation, @Nullable PersistentEntity entity) { + return MergedAnnotation.from(annotation).getValue("collation", String.class).filter(StringUtils::hasText) + .map(it -> evaluateCollation(it, entity)).orElseGet(() -> { + + if (entity instanceof MongoPersistentEntity mongoPersistentEntity + && mongoPersistentEntity.hasCollation()) { + return mongoPersistentEntity.getCollation(); + } + return null; + }); + } + + private static boolean isMapWithoutWildcardIndex(MongoPersistentProperty property) { + return property.isMap() && !property.isAnnotationPresent(WildcardIndexed.class); } /** * {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used * to detect potential cycles within the references. - * + * * @author Christoph Strobl + * @author Mark Paluch */ static class CycleGuard { - private final Map> propertyTypeMap; - - CycleGuard() { - this.propertyTypeMap = new LinkedHashMap>(); - } + private final Set seenProperties = new HashSet<>(); /** + * Detect a cycle in a property path if the property was seen at least once. + * * @param property The property to inspect - * @param path The path under which the property can be reached. + * @param path The type path under which the property can be reached. * @throws CyclicPropertyReferenceException in case a potential cycle is detected. - * @see Path#cycles(MongoPersistentProperty, String) + * @see Path#isCycle() */ - void protect(MongoPersistentProperty property, String path) throws CyclicPropertyReferenceException { + void protect(MongoPersistentProperty property, Path path) throws CyclicPropertyReferenceException { String propertyTypeKey = createMapKey(property); - if (propertyTypeMap.containsKey(propertyTypeKey)) { - - List paths = propertyTypeMap.get(propertyTypeKey); - - for (Path existingPath : paths) { - - if (existingPath.cycles(property, path) && property.isEntity()) { - paths.add(new Path(property, path)); + if (!seenProperties.add(propertyTypeKey)) { - throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(), - existingPath.getPath()); - } + if (path.isCycle()) { + throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(), + path.toCyclePath()); } - - paths.add(new Path(property, path)); - } else { - - ArrayList paths = new ArrayList(); - paths.add(new Path(property, path)); - propertyTypeMap.put(propertyTypeKey, paths); } } private String createMapKey(MongoPersistentProperty property) { - return property.getOwner().getType().getSimpleName() + ":" + property.getFieldName(); + return ClassUtils.getShortName(property.getOwner().getType()) + ":" + property.getFieldName(); } /** - * Path defines the property and its full path from the document root.
                    + * Path defines the full property path from the document root.
                    * A {@link Path} with {@literal spring.data.mongodb} would be created for the property {@code Three.mongodb}. - * + * *

                     		 * 
                     		 * @Document
                     		 * class One {
                     		 *   Two spring;
                     		 * }
                    -		 * 
                    +		 *
                     		 * class Two {
                     		 *   Three data;
                     		 * }
                    -		 * 
                    +		 *
                     		 * class Three {
                     		 *   String mongodb;
                     		 * }
                     		 * 
                     		 * 
                    - * + * * @author Christoph Strobl + * @author Mark Paluch */ static class Path { - private final MongoPersistentProperty property; - private final String path; + private static final Path EMPTY = new Path(Collections.emptyList(), false); - Path(MongoPersistentProperty property, String path) { + private final List> elements; + private final boolean cycle; - this.property = property; - this.path = path; + private Path(List> elements, boolean cycle) { + this.elements = elements; + this.cycle = cycle; } - public String getPath() { - return path; + /** + * @return an empty {@link Path}. + * @since 1.10.8 + */ + static Path empty() { + return EMPTY; + } + + /** + * Creates a new {@link Path} from the initial {@link PersistentProperty}. + * + * @param initial must not be {@literal null}. + * @return the new {@link Path}. + * @since 1.10.8 + */ + static Path of(PersistentProperty initial) { + return new Path(Collections.singletonList(initial), false); } /** - * Checks whether the given property is owned by the same entity and if it has been already visited by a subset of - * the current path. Given {@literal foo.bar.bar} cycles if {@literal foo.bar} has already been visited and - * {@code class Bar} contains a property of type {@code Bar}. The previously mentioned path would not cycle if - * {@code class Bar} contained a property of type {@code SomeEntity} named {@literal bar}. - * - * @param property - * @param path - * @return + * Creates a new {@link Path} by appending a {@link PersistentProperty breadcrumb} to the path. + * + * @param breadcrumb must not be {@literal null}. + * @return the new {@link Path}. + * @since 1.10.8 */ - boolean cycles(MongoPersistentProperty property, String path) { + Path append(PersistentProperty breadcrumb) { + + List> elements = new ArrayList<>(this.elements.size() + 1); + elements.addAll(this.elements); + elements.add(breadcrumb); + + return new Path(elements, this.elements.contains(breadcrumb)); + } + + /** + * @return {@literal true} if a cycle was detected. + * @since 1.10.8 + */ + public boolean isCycle() { + return cycle; + } + + @Override + public String toString() { + return this.elements.isEmpty() ? "(empty)" : toPath(this.elements.iterator()); + } + + /** + * Returns the cycle path truncated to the first discovered cycle. The result for the path + * {@literal foo.bar.baz.bar} is {@literal bar -> baz -> bar}. + * + * @return the cycle path truncated to the first discovered cycle. + * @since 1.10.8 + */ + String toCyclePath() { + + if (!cycle) { + return ""; + } + + for (int i = 0; i < this.elements.size(); i++) { + + int index = indexOf(this.elements, this.elements.get(i), i + 1); - if (!property.getOwner().equals(this.property.getOwner())) { + if (index != -1) { + return toPath(this.elements.subList(i, index + 1).iterator()); + } + } + + return toString(); + } + + private static int indexOf(List haystack, T needle, int offset) { + + for (int i = offset; i < haystack.size(); i++) { + if (haystack.get(i).equals(needle)) { + return i; + } + } + + return -1; + } + + private static String toPath(Iterator> iterator) { + + StringBuilder builder = new StringBuilder(); + while (iterator.hasNext()) { + + builder.append(iterator.next().getName()); + if (iterator.hasNext()) { + builder.append(" -> "); + } + } + + return builder.toString(); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Path that = (Path) o; + + if (this.cycle != that.cycle) { return false; } + return ObjectUtils.nullSafeEquals(this.elements, that.elements); + } - return path.equals(this.path) || path.contains(this.path + ".") || path.contains("." + this.path); + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(elements); + result = 31 * result + (cycle ? 1 : 0); + return result; } } } @@ -594,20 +1031,16 @@ public static class CyclicPropertyReferenceException extends RuntimeException { private static final long serialVersionUID = -3762979307658772277L; private final String propertyName; - private final Class type; + private final @Nullable Class type; private final String dotPath; - public CyclicPropertyReferenceException(String propertyName, Class type, String dotPath) { + public CyclicPropertyReferenceException(String propertyName, @Nullable Class type, String dotPath) { this.propertyName = propertyName; this.type = type; this.dotPath = dotPath; } - /* - * (non-Javadoc) - * @see java.lang.Throwable#getMessage() - */ @Override public String getMessage() { return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName, @@ -618,7 +1051,7 @@ public String getMessage() { /** * Implementation of {@link IndexDefinition} holding additional (property)path information used for creating the * index. The path itself is the properties {@literal "dot"} path representation from its root document. - * + * * @author Christoph Strobl * @since 1.5 */ @@ -630,7 +1063,7 @@ public static class IndexDefinitionHolder implements IndexDefinition { /** * Create - * + * * @param path */ public IndexDefinitionHolder(String path, IndexDefinition definition, String collection) { @@ -646,7 +1079,7 @@ public String getCollection() { /** * Get the {@literal "dot"} path used to create the index. - * + * * @return */ public String getPath() { @@ -655,30 +1088,27 @@ public String getPath() { /** * Get the {@literal raw} {@link IndexDefinition}. - * + * * @return */ public IndexDefinition getIndexDefinition() { return indexDefinition; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() - */ @Override - public DBObject getIndexKeys() { + public org.bson.Document getIndexKeys() { return indexDefinition.getIndexKeys(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions() - */ @Override - public DBObject getIndexOptions() { + public org.bson.Document getIndexOptions() { return indexDefinition.getIndexOptions(); } + + @Override + public String toString() { + return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}'; + } } /** @@ -688,14 +1118,14 @@ public DBObject getIndexOptions() { static class TextIndexIncludeOptions { enum IncludeStrategy { - FORCE, DEFAULT; + FORCE, DEFAULT } private final IncludeStrategy strategy; - private final TextIndexedFieldSpec parentFieldSpec; + private final @Nullable TextIndexedFieldSpec parentFieldSpec; - public TextIndexIncludeOptions(IncludeStrategy strategy, TextIndexedFieldSpec parentFieldSpec) { + public TextIndexIncludeOptions(IncludeStrategy strategy, @Nullable TextIndexedFieldSpec parentFieldSpec) { this.strategy = strategy; this.parentFieldSpec = parentFieldSpec; } @@ -708,6 +1138,7 @@ public IncludeStrategy getStrategy() { return strategy; } + @Nullable public TextIndexedFieldSpec getParentFieldSpec() { return parentFieldSpec; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java new file mode 100644 index 0000000000..8b835f72c5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java @@ -0,0 +1,73 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * {@link IndexFilter} implementation for usage with plain {@link Document} as well as {@link CriteriaDefinition} filter + * expressions. + * + * @author Christoph Strobl + * @since 1.10 + */ +public class PartialIndexFilter implements IndexFilter { + + private final Object filterExpression; + + private PartialIndexFilter(Object filterExpression) { + + Assert.notNull(filterExpression, "FilterExpression must not be null"); + + this.filterExpression = filterExpression; + } + + /** + * Create new {@link PartialIndexFilter} for given {@link Document filter expression}. + * + * @param where must not be {@literal null}. + * @return + */ + public static PartialIndexFilter of(Document where) { + return new PartialIndexFilter(where); + } + + /** + * Create new {@link PartialIndexFilter} for given {@link CriteriaDefinition filter expression}. + * + * @param where must not be {@literal null}. + * @return + */ + public static PartialIndexFilter of(CriteriaDefinition where) { + return new PartialIndexFilter(where); + } + + public Document getFilterObject() { + + if (filterExpression instanceof Document document) { + return document; + } + + if (filterExpression instanceof CriteriaDefinition criteriaDefinition) { + return criteriaDefinition.getCriteriaObject(); + } + + throw new IllegalArgumentException( + String.format("Unknown type %s used as filter expression", filterExpression.getClass())); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java new file mode 100644 index 0000000000..15b110c08a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java @@ -0,0 +1,80 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Index operations on a collection. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public interface ReactiveIndexOperations { + + /** + * Ensure that an index for the provided {@link IndexDefinition} exists for the collection indicated by the entity + * class. If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + * @return a {@link Mono} emitting the name of the index on completion. + * @deprecated since 4.5, in favor of {@link #createIndex(IndexDefinition)}. + */ + @Deprecated(since = "4.5", forRemoval = true) + Mono ensureIndex(IndexDefinition indexDefinition); + + /** + * Create the index for the provided {@link IndexDefinition} exists for the collection indicated by the entity class. + * If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @since 4.5 + */ + default Mono createIndex(IndexDefinition indexDefinition) { + return ensureIndex(indexDefinition); + } + + /** + * Alters the index with given {@literal name}. + * + * @param name name of index to change. + * @param options index options. + * @since 4.1 + */ + Mono alterIndex(String name, IndexOptions options); + + /** + * Drops an index from this collection. + * + * @param name name of index to drop + */ + Mono dropIndex(String name); + + /** + * Drops all indices from this collection. + */ + Mono dropAllIndexes(); + + /** + * Returns the index information on the collection. + * + * @return index information on the collection + */ + Flux getIndexInfo(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java new file mode 100644 index 0000000000..70dcfa0fbb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Provider interface to obtain {@link ReactiveIndexOperations} by MongoDB collection name. + * + * @author Mark Paluch + * @since 2.1 + */ +@FunctionalInterface +public interface ReactiveIndexOperationsProvider { + + /** + * Returns the operations that can be performed on indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + ReactiveIndexOperations indexOps(String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java new file mode 100644 index 0000000000..0d818e19d9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java @@ -0,0 +1,196 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.util.MongoDbErrorCodes; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.MongoException; + +/** + * Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext} + * for indexing metadata and ensures the indexes to be available using reactive infrastructure. + * + * @author Mark Paluch + * @since 2.1 + */ +public class ReactiveMongoPersistentEntityIndexCreator { + + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoPersistentEntityIndexCreator.class); + + private final Map, Boolean> classesSeen = new ConcurrentHashMap, Boolean>(); + private final MongoMappingContext mappingContext; + private final ReactiveIndexOperationsProvider operationsProvider; + private final IndexResolver indexResolver; + + /** + * Creates a new {@link ReactiveMongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext}, + * {@link ReactiveIndexOperationsProvider}. + * + * @param mappingContext must not be {@literal null}. + * @param operationsProvider must not be {@literal null}. + */ + public ReactiveMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + ReactiveIndexOperationsProvider operationsProvider) { + this(mappingContext, operationsProvider, IndexResolver.create(mappingContext)); + } + + /** + * Creates a new {@link ReactiveMongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext}, + * {@link ReactiveIndexOperationsProvider}, and {@link IndexResolver}. + * + * @param mappingContext must not be {@literal null}. + * @param operationsProvider must not be {@literal null}. + * @param indexResolver must not be {@literal null}. + */ + public ReactiveMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + ReactiveIndexOperationsProvider operationsProvider, IndexResolver indexResolver) { + + Assert.notNull(mappingContext, "MongoMappingContext must not be null"); + Assert.notNull(operationsProvider, "ReactiveIndexOperations must not be null"); + Assert.notNull(indexResolver, "IndexResolver must not be null"); + + this.mappingContext = mappingContext; + this.operationsProvider = operationsProvider; + this.indexResolver = indexResolver; + } + + /** + * Returns whether the current index creator was registered for the given {@link MappingContext}. + * + * @param context + * @return + */ + public boolean isIndexCreatorFor(MappingContext context) { + return this.mappingContext.equals(context); + } + + /** + * Inspect entities for index creation. + * + * @return a {@link Mono} that completes without value after indexes were created. + */ + public Mono checkForIndexes(MongoPersistentEntity entity) { + + Class type = entity.getType(); + + if (!classesSeen.containsKey(type)) { + + if (this.classesSeen.put(type, Boolean.TRUE) == null) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Analyzing class " + type + " for index information"); + } + + return checkForAndCreateIndexes(entity); + } + } + + return Mono.empty(); + } + + private Mono checkForAndCreateIndexes(MongoPersistentEntity entity) { + + List> publishers = new ArrayList<>(); + + if (entity.isAnnotationPresent(Document.class)) { + + String collection = entity.getCollection(); + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + + IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder definitionHolder + ? definitionHolder + : new IndexDefinitionHolder("", indexDefinition, collection); + + publishers.add(createIndex(indexToCreate)); + } + } + + return publishers.isEmpty() ? Mono.empty() : Flux.merge(publishers).then(); + } + + Mono createIndex(IndexDefinitionHolder indexDefinition) { + + return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) // + .onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation, + e -> translateException(e, indexDefinition)); + + } + + private Mono translateException(Throwable e, IndexDefinitionHolder indexDefinition) { + + Mono existingIndex = fetchIndexInformation(indexDefinition); + + Mono defaultError = Mono.error(new DataIntegrityViolationException( + String.format("Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'", + indexDefinition.getPath(), indexDefinition.getCollection(), indexDefinition.getIndexKeys(), + indexDefinition.getIndexOptions()), + e.getCause())); + + return existingIndex.flatMap(it -> { + return Mono. error(new DataIntegrityViolationException( + String.format("Index already defined as '%s'", indexDefinition.getPath()), e.getCause())); + }).switchIfEmpty(defaultError); + } + + private Mono fetchIndexInformation(IndexDefinitionHolder indexDefinition) { + + Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name"); + + Flux existingIndexes = operationsProvider.indexOps(indexDefinition.getCollection()).getIndexInfo(); + + return existingIndexes // + .filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())) // + .next() // + .doOnError(e -> { + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Failed to load index information for collection '%s'", indexDefinition.getCollection()), + e); + } + }); + } + + private static boolean isDataIntegrityViolation(Throwable t) { + + if (t instanceof UncategorizedMongoDbException) { + + return t.getCause() instanceof MongoException mongoException + && MongoDbErrorCodes.isDataIntegrityViolationCode(mongoException.getCode()); + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java new file mode 100644 index 0000000000..9d4315beae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java @@ -0,0 +1,87 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Definition for an Atlas Search Index (Search Index or Vector Index). + * + * @author Marcin Grzejszczak + * @author Mark Paluch + * @since 4.5 + */ +public interface SearchIndexDefinition { + + /** + * @return the name of the index. + */ + String getName(); + + /** + * @return the type of the index. Typically, {@code search} or {@code vectorSearch}. + */ + String getType(); + + /** + * Returns the index document for this index without any potential entity context resolving field name mappings. The + * resulting document contains the index name, type and {@link #getDefinition(TypeInformation, MappingContext) + * definition}. + * + * @return never {@literal null}. + */ + default Document getRawIndexDocument() { + return getIndexDocument(null, null); + } + + /** + * Returns the index document for this index in the context of a potential entity to resolve field name mappings. The + * resulting document contains the index name, type and {@link #getDefinition(TypeInformation, MappingContext) + * definition}. + * + * @param entity can be {@literal null}. + * @param mappingContext + * @return never {@literal null}. + */ + default Document getIndexDocument(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + Document document = new Document(); + document.put("name", getName()); + document.put("type", getType()); + document.put("definition", getDefinition(entity, mappingContext)); + + return document; + } + + /** + * Returns the actual index definition for this index in the context of a potential entity to resolve field name + * mappings. Entity and context can be {@literal null} to create a generic index definition without applying field + * name mapping. + * + * @param entity can be {@literal null}. + * @param mappingContext can be {@literal null}. + * @return never {@literal null}. + */ + Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java new file mode 100644 index 0000000000..1a657ecf0b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java @@ -0,0 +1,129 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.function.Supplier; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Index information for a MongoDB Search Index. + * + * @author Christoph Strobl + * @since 4.5 + */ +public class SearchIndexInfo { + + private final @Nullable Object id; + private final SearchIndexStatus status; + private final Lazy indexDefinition; + + SearchIndexInfo(@Nullable Object id, SearchIndexStatus status, Supplier indexDefinition) { + this.id = id; + this.status = status; + this.indexDefinition = Lazy.of(indexDefinition); + } + + /** + * Parse a BSON document describing an index into a {@link SearchIndexInfo}. + * + * @param source BSON document describing the index. + * @return a new {@link SearchIndexInfo} instance. + */ + public static SearchIndexInfo parse(String source) { + return of(Document.parse(source)); + } + + /** + * Create an index from its BSON {@link Document} representation into a {@link SearchIndexInfo}. + * + * @param indexDocument BSON document describing the index. + * @return a new {@link SearchIndexInfo} instance. + */ + public static SearchIndexInfo of(Document indexDocument) { + + Object id = indexDocument.get("id"); + SearchIndexStatus status = SearchIndexStatus + .valueOf(indexDocument.get("status", SearchIndexStatus.DOES_NOT_EXIST.name())); + + return new SearchIndexInfo(id, status, () -> readIndexDefinition(indexDocument)); + } + + /** + * The id of the index. Can be {@literal null}, eg. for an index not yet created. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getId() { + return id; + } + + /** + * @return the current status of the index. + */ + public SearchIndexStatus getStatus() { + return status; + } + + /** + * @return the current index definition. + */ + public SearchIndexDefinition getIndexDefinition() { + return indexDefinition.get(); + } + + private static SearchIndexDefinition readIndexDefinition(Document document) { + + String type = document.get("type", "search"); + if (type.equals("vectorSearch")) { + return VectorIndex.of(document); + } + + return new SearchIndexDefinition() { + + @Override + public String getName() { + return document.getString("name"); + } + + @Override + public String getType() { + return type; + } + + @Override + public Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + if (document.containsKey("latestDefinition")) { + return document.get("latestDefinition", new Document()); + } + return document.get("definition", new Document()); + } + + @Override + public String toString() { + return getDefinition(null, null).toJson(); + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java new file mode 100644 index 0000000000..ee3f59cf95 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.springframework.dao.DataAccessException; + +/** + * Search Index operations on a collection for Atlas Search. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + * @see VectorIndex + */ +public interface SearchIndexOperations { + + /** + * Create the index for the given {@link SearchIndexDefinition} in the collection indicated by the entity class. + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + */ + String createIndex(SearchIndexDefinition indexDefinition); + + /** + * Alters the search index matching the index {@link SearchIndexDefinition#getName() name}. + *

                    + * Atlas Search might not support updating indices which raises a {@link DataAccessException}. + * + * @param indexDefinition the index definition. + */ + void updateIndex(SearchIndexDefinition indexDefinition); + + /** + * Check whether an index with the given {@code indexName} exists for the collection indicated by the entity class. To + * ensure an existing index is queryable it is recommended to check its {@link #status(String) status}. + * + * @param indexName name of index to check for presence. + * @return {@literal true} if the index exists; {@literal false} otherwise. + */ + boolean exists(String indexName); + + /** + * Check the actual {@link SearchIndexStatus status} of an index. + * + * @param indexName name of index to get the status for. + * @return the current status of the index or {@link SearchIndexStatus#DOES_NOT_EXIST} if the index cannot be found. + */ + SearchIndexStatus status(String indexName); + + /** + * Drops an index from the collection indicated by the entity class. + * + * @param indexName name of index to drop. + */ + void dropIndex(String indexName); + + /** + * Drops all search indices from the collection indicated by the entity class. + */ + void dropAllIndexes(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java new file mode 100644 index 0000000000..ee87c8d61e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Provider interface to obtain {@link SearchIndexOperations} by MongoDB collection name or entity type. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public interface SearchIndexOperationsProvider { + + /** + * Returns the operations that can be performed on search indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(String collectionName); + + /** + * Returns the operations that can be performed on search indexes. + * + * @param type the type used for field mapping. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(Class type); + + /** + * Returns the operations that can be performed on search indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @param type the type used for field mapping. Can be {@literal null}. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(Class type, String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java new file mode 100644 index 0000000000..91143d73c6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025. the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Representation of different conditions a search index can be in. + * + * @author Christoph Strobl + * @since 4.5 + */ +public enum SearchIndexStatus { + + /** building or re-building the index - might be queryable */ + BUILDING, + + /** nothing to be seen here - not queryable */ + DOES_NOT_EXIST, + + /** will cease to exist - no longer queryable */ + DELETING, + + /** well, this one is broken - not queryable */ + FAILED, + + /** busy with other things, check back later - not queryable */ + PENDING, + + /** ask me anything - queryable */ + READY, + + /** ask me anything about outdated data - still queryable */ + STALE +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java index 9fb64be3fb..a87b15de45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,34 +19,37 @@ import java.util.LinkedHashSet; import java.util.Set; +import org.bson.Document; import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * {@link IndexDefinition} to span multiple keys for text search. - * + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 */ public class TextIndexDefinition implements IndexDefinition { - private String name; + private @Nullable String name; private Set fieldSpecs; - private String defaultLanguage; - private String languageOverride; + private @Nullable String defaultLanguage; + private @Nullable String languageOverride; + private @Nullable IndexFilter filter; + private @Nullable Collation collation; TextIndexDefinition() { - fieldSpecs = new LinkedHashSet(); + fieldSpecs = new LinkedHashSet<>(); } /** * Creates a {@link TextIndexDefinition} for all fields in the document. - * + * * @return */ public static TextIndexDefinition forAllFields() { @@ -55,7 +58,7 @@ public static TextIndexDefinition forAllFields() { /** * Get {@link TextIndexDefinitionBuilder} to create {@link TextIndexDefinition}. - * + * * @return */ public static TextIndexDefinitionBuilder builder() { @@ -78,21 +81,17 @@ public void addFieldSpecs(Collection fieldSpecs) { /** * Returns if the {@link TextIndexDefinition} has fields assigned. - * + * * @return */ public boolean hasFieldSpec() { return !fieldSpecs.isEmpty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() - */ @Override - public DBObject getIndexKeys() { + public Document getIndexKeys() { - DBObject keys = new BasicDBObject(); + Document keys = new Document(); for (TextIndexedFieldSpec fieldSpec : fieldSpecs) { keys.put(fieldSpec.fieldname, "text"); } @@ -100,14 +99,10 @@ public DBObject getIndexKeys() { return keys; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions() - */ @Override - public DBObject getIndexOptions() { + public Document getIndexOptions() { - DBObject options = new BasicDBObject(); + Document options = new Document(); if (StringUtils.hasText(name)) { options.put("name", name); } @@ -115,20 +110,28 @@ public DBObject getIndexOptions() { options.put("default_language", defaultLanguage); } - BasicDBObject weightsDbo = new BasicDBObject(); + if (collation != null) { + options.put("collation", collation.toDocument()); + } + + Document weightsDocument = new Document(); for (TextIndexedFieldSpec fieldSpec : fieldSpecs) { if (fieldSpec.isWeighted()) { - weightsDbo.put(fieldSpec.getFieldname(), fieldSpec.getWeight()); + weightsDocument.put(fieldSpec.getFieldname(), fieldSpec.getWeight()); } } - if (!weightsDbo.isEmpty()) { - options.put("weights", weightsDbo); + if (!weightsDocument.isEmpty()) { + options.put("weights", weightsDocument); } if (StringUtils.hasText(languageOverride)) { options.put("language_override", languageOverride); } + if (filter != null) { + options.put("partialFilterExpression", filter.getFilterObject()); + } + return options; } @@ -143,7 +146,7 @@ public static class TextIndexedFieldSpec { /** * Create new {@link TextIndexedFieldSpec} for given fieldname without any weight. - * + * * @param fieldname */ public TextIndexedFieldSpec(String fieldname) { @@ -152,20 +155,20 @@ public TextIndexedFieldSpec(String fieldname) { /** * Create new {@link TextIndexedFieldSpec} for given fieldname and weight. - * + * * @param fieldname * @param weight */ - public TextIndexedFieldSpec(String fieldname, Float weight) { + public TextIndexedFieldSpec(String fieldname, @Nullable Float weight) { - Assert.hasText(fieldname, "Text index field cannot be blank."); + Assert.hasText(fieldname, "Text index field cannot be blank"); this.fieldname = fieldname; this.weight = weight != null ? weight : 1.0F; } /** * Get the fieldname associated with the {@link TextIndexedFieldSpec}. - * + * * @return */ public String getFieldname() { @@ -174,7 +177,7 @@ public String getFieldname() { /** * Get the weight associated with the {@link TextIndexedFieldSpec}. - * + * * @return */ public Float getWeight() { @@ -185,7 +188,7 @@ public Float getWeight() { * @return true if {@link #weight} has a value that is a valid number. */ public boolean isWeighted() { - return this.weight != null && this.weight.compareTo(1.0F) != 0; + return this.weight.compareTo(1.0F) != 0; } @Override @@ -194,7 +197,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -202,12 +205,10 @@ public boolean equals(Object obj) { if (obj == null) { return false; } - if (!(obj instanceof TextIndexedFieldSpec)) { + if (!(obj instanceof TextIndexedFieldSpec other)) { return false; } - TextIndexedFieldSpec other = (TextIndexedFieldSpec) obj; - return ObjectUtils.nullSafeEquals(this.fieldname, other.fieldname); } @@ -215,7 +216,7 @@ public boolean equals(Object obj) { /** * {@link TextIndexDefinitionBuilder} helps defining options for creating {@link TextIndexDefinition}. - * + * * @author Christoph Strobl * @since 1.6 */ @@ -230,7 +231,7 @@ public TextIndexDefinitionBuilder() { /** * Define the name to be used when creating the index in the store. - * + * * @param name * @return */ @@ -240,15 +241,15 @@ public TextIndexDefinitionBuilder named(String name) { } /** - * Define the index to span all fields using wilcard.
                    - * NOTE {@link TextIndexDefinition} cannot contain any other fields when defined with wildcard. - * + * Define the index to span all fields using wildcard.
                    + * NOTE: {@link TextIndexDefinition} cannot contain any other fields when defined with wildcard. + * * @return */ public TextIndexDefinitionBuilder onAllFields() { if (!instance.fieldSpecs.isEmpty()) { - throw new InvalidDataAccessApiUsageException("Cannot add wildcard fieldspect to non empty."); + throw new InvalidDataAccessApiUsageException("Cannot add wildcard fieldspect to non empty"); } this.instance.fieldSpecs.add(ALL_FIELDS); @@ -257,7 +258,7 @@ public TextIndexDefinitionBuilder onAllFields() { /** * Include given fields with default weight. - * + * * @param fieldnames * @return */ @@ -271,7 +272,7 @@ public TextIndexDefinitionBuilder onFields(String... fieldnames) { /** * Include given field with default weight. - * + * * @param fieldname * @return */ @@ -281,15 +282,15 @@ public TextIndexDefinitionBuilder onField(String fieldname) { /** * Include given field with weight. - * + * * @param fieldname * @return */ public TextIndexDefinitionBuilder onField(String fieldname, Float weight) { if (this.instance.fieldSpecs.contains(ALL_FIELDS)) { - throw new InvalidDataAccessApiUsageException(String.format("Cannot add %s to field spec for all fields.", - fieldname)); + throw new InvalidDataAccessApiUsageException( + String.format("Cannot add %s to field spec for all fields", fieldname)); } this.instance.fieldSpecs.add(new TextIndexedFieldSpec(fieldname, weight)); @@ -298,10 +299,11 @@ public TextIndexDefinitionBuilder onField(String fieldname, Float weight) { /** * Define the default language to be used when indexing documents. - * + * * @param language - * @see http://docs.mongodb.org/manual/tutorial/specify-language-for-text-index/#specify-default-language-text-index * @return + * @see https://docs.mongodb.org/manual/tutorial/specify-language-for-text-index/#specify-default-language-text-index */ public TextIndexDefinitionBuilder withDefaultLanguage(String language) { @@ -311,22 +313,48 @@ public TextIndexDefinitionBuilder withDefaultLanguage(String language) { /** * Define field for language override. - * + * * @param fieldname * @return */ public TextIndexDefinitionBuilder withLanguageOverride(String fieldname) { if (StringUtils.hasText(this.instance.languageOverride)) { - throw new InvalidDataAccessApiUsageException(String.format( - "Cannot set language override on %s as it is already defined on %s.", fieldname, - this.instance.languageOverride)); + throw new InvalidDataAccessApiUsageException( + String.format("Cannot set language override on %s as it is already defined on %s", fieldname, + this.instance.languageOverride)); } this.instance.languageOverride = fieldname; return this; } + /** + * Only index the documents that meet the specified {@link IndexFilter filter expression}. + * + * @param filter can be {@literal null}. + * @return + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 1.10 + */ + public TextIndexDefinitionBuilder partial(@Nullable IndexFilter filter) { + + this.instance.filter = filter; + return this; + } + + /** + * Configure to use simple {@link Collation}. Required if the collection uses a non-simple collation. + * + * @since 2.2 + */ + public TextIndexDefinitionBuilder withSimpleCollation() { + + this.instance.collation = Collation.simple(); + return this; + } + public TextIndexDefinition build() { return this.instance; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java index 4f8fd83df1..61fc4c05f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2016 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,7 @@ /** * {@link TextIndexed} marks a field to be part of the text index. As there can be only one text index per collection * all fields marked with {@link TextIndexed} are combined into one single index.
                    - * + * * @author Christoph Strobl * @author Mark Paluch * @since 1.6 @@ -38,7 +38,7 @@ * Defines the significance of the filed relative to other indexed fields. The value directly influences the documents * score.
                    * Defaulted to {@literal 1.0}. - * + * * @return */ float weight() default 1.0F; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java new file mode 100644 index 0000000000..b46dbf4d0c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java @@ -0,0 +1,349 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Contract; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * {@link SearchIndexDefinition} for creating MongoDB + * Vector Index required to + * run {@code $vectorSearch} queries. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class VectorIndex implements SearchIndexDefinition { + + private final String name; + private final List fields = new ArrayList<>(); + + /** + * Create a new {@link VectorIndex} instance. + * + * @param name The name of the index. + */ + public VectorIndex(String name) { + this.name = name; + } + + /** + * Add a filter field. + * + * @param path dot notation to field/property used for filtering. + * @return this. + */ + @Contract("_ -> this") + public VectorIndex addFilter(String path) { + + Assert.hasText(path, "Path must not be null or empty"); + + return addField(new VectorFilterField(path, "filter")); + } + + /** + * Add a vector field and accept a {@link VectorFieldBuilder} customizer. + * + * @param path dot notation to field/property used for filtering. + * @param customizer customizer function. + * @return this. + */ + @Contract("_, _ -> this") + public VectorIndex addVector(String path, Consumer customizer) { + + Assert.hasText(path, "Path must not be null or empty"); + + VectorFieldBuilder builder = new VectorFieldBuilder(path, "vector"); + customizer.accept(builder); + return addField(builder.build()); + } + + @Override + public String getName() { + return name; + } + + @Override + public String getType() { + return "vectorSearch"; + } + + @Override + public Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + MongoPersistentEntity persistentEntity = entity != null + ? (mappingContext != null ? mappingContext.getPersistentEntity(entity) : null) + : null; + + Document definition = new Document(); + List fields = new ArrayList<>(); + definition.put("fields", fields); + + for (SearchField field : this.fields) { + + Document filter = new Document("type", field.type()); + filter.put("path", resolvePath(field.path(), persistentEntity, mappingContext)); + + if (field instanceof VectorIndexField vif) { + + filter.put("numDimensions", vif.dimensions()); + filter.put("similarity", vif.similarity()); + if (StringUtils.hasText(vif.quantization)) { + filter.put("quantization", vif.quantization()); + } + } + fields.add(filter); + } + + return definition; + } + + @Contract("_ -> this") + private VectorIndex addField(SearchField filterField) { + + fields.add(filterField); + return this; + } + + @Override + public String toString() { + return "VectorIndex{" + "name='" + name + '\'' + ", fields=" + fields + ", type='" + getType() + '\'' + '}'; + } + + /** + * Parse the {@link Document} into a {@link VectorIndex}. + */ + static VectorIndex of(Document document) { + + VectorIndex index = new VectorIndex(document.getString("name")); + + String definitionKey = document.containsKey("latestDefinition") ? "latestDefinition" : "definition"; + Document definition = document.get(definitionKey, Document.class); + + for (Object entry : definition.get("fields", List.class)) { + if (entry instanceof Document field) { + if (field.get("type").equals("vector")) { + index.addField(new VectorIndexField(field.getString("path"), "vector", field.getInteger("numDimensions"), + field.getString("similarity"), field.getString("quantization"))); + } else { + index.addField(new VectorFilterField(field.getString("path"), "filter")); + } + } + } + + return index; + } + + private String resolvePath(String path, @Nullable MongoPersistentEntity persistentEntity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + if (persistentEntity == null || mappingContext == null) { + return path; + } + + QueryMapper.MetadataBackedField mbf = new QueryMapper.MetadataBackedField(path, persistentEntity, mappingContext); + + return mbf.getMappedKey(); + } + + interface SearchField { + + String path(); + + String type(); + } + + record VectorFilterField(String path, String type) implements SearchField { + } + + record VectorIndexField(String path, String type, int dimensions, @Nullable String similarity, + @Nullable String quantization) implements SearchField { + } + + /** + * Builder to create a vector field + */ + public static class VectorFieldBuilder { + + private final String path; + private final String type; + + private int dimensions; + private @Nullable String similarity; + private @Nullable String quantization; + + VectorFieldBuilder(String path, String type) { + + this.path = path; + this.type = type; + } + + /** + * Number of vector dimensions enforced at index- & query-time. + * + * @param dimensions value between {@code 0} and {@code 4096}. + * @return this. + */ + @Contract("_ -> this") + public VectorFieldBuilder dimensions(int dimensions) { + this.dimensions = dimensions; + return this; + } + + /** + * Use similarity based on the angle between vectors. + * + * @return new instance of {@link VectorIndex}. + */ + @Contract(" -> this") + public VectorFieldBuilder cosine() { + return similarity(SimilarityFunction.COSINE); + } + + /** + * Use similarity based the distance between vector ends. + */ + @Contract(" -> this") + public VectorFieldBuilder euclidean() { + return similarity(SimilarityFunction.EUCLIDEAN); + } + + /** + * Use similarity based on both angle and magnitude of the vectors. + * + * @return new instance of {@link VectorIndex}. + */ + @Contract(" -> this") + public VectorFieldBuilder dotProduct() { + return similarity(SimilarityFunction.DOT_PRODUCT); + } + + /** + * Similarity function used. + * + * @param similarity should be one of {@literal euclidean | cosine | dotProduct}. + * @return this. + * @see SimilarityFunction + * @see #similarity(SimilarityFunction) + */ + @Contract("_ -> this") + public VectorFieldBuilder similarity(String similarity) { + + this.similarity = similarity; + return this; + } + + /** + * Similarity function used. + * + * @param similarity must not be {@literal null}. + * @return this. + */ + @Contract("_ -> this") + public VectorFieldBuilder similarity(SimilarityFunction similarity) { + + return similarity(similarity.getFunctionName()); + } + + /** + * Quantization used. + * + * @param quantization should be one of {@literal none | scalar | binary}. + * @return this. + * @see Quantization + * @see #quantization(Quantization) + */ + public VectorFieldBuilder quantization(String quantization) { + + this.quantization = quantization; + return this; + } + + /** + * Quantization used. + * + * @param quantization must not be {@literal null}. + * @return this. + */ + public VectorFieldBuilder quantization(Quantization quantization) { + return quantization(quantization.getQuantizationName()); + } + + VectorIndexField build() { + return new VectorIndexField(this.path, this.type, this.dimensions, this.similarity, this.quantization); + } + } + + /** + * Similarity function used to calculate vector distance. + */ + public enum SimilarityFunction { + + DOT_PRODUCT("dotProduct"), COSINE("cosine"), EUCLIDEAN("euclidean"); + + final String functionName; + + SimilarityFunction(String functionName) { + this.functionName = functionName; + } + + public String getFunctionName() { + return functionName; + } + } + + /** + * Vector quantization. Quantization reduce vector sizes while preserving performance. + */ + public enum Quantization { + + NONE("none"), + + /** + * Converting a float point into an integer. + */ + SCALAR("scalar"), + + /** + * Converting a float point into a single bit. + */ + BINARY("binary"); + + final String quantizationName; + + Quantization(String quantizationName) { + this.quantizationName = quantizationName; + } + + public String getQuantizationName() { + return quantizationName; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java new file mode 100644 index 0000000000..dcd2b7c022 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -0,0 +1,199 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the + * {@code $**" : 1} pattern on a root object (the one typically carrying the + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use + * {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific + * paths for in-/exclusion. + *
                    + * It can also be used to define an index on a specific field path and its subfields, e.g. + * {@code "path.to.field.$**" : 1}.
                    + * Note that {@literal wildcardProjections} are not allowed in this case. + *
                    + * LIMITATIONS
                    + *

                      + *
                    • {@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.
                    • + *
                    • Keys used for sharding must not be included
                    • + *
                    • Cannot be used to generate any type of geo index.
                    • + *
                    + * + * @author Christoph Strobl + * @see MongoDB Reference Documentation: Wildcard + * Indexes/ + * @since 3.3 + */ +public class WildcardIndex extends Index { + + private @Nullable String fieldName; + private final Map wildcardProjection = new LinkedHashMap<>(); + + /** + * Create a new instance of {@link WildcardIndex} using {@code $**}. + */ + public WildcardIndex() {} + + /** + * Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the + * index will be considered a root one using {@code $**}.
                    + * NOTE: {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)} + * can only be used for top level index definitions having an {@literal empty} or {@literal null} path. + * + * @param path can be {@literal null}. If {@literal null} all fields will be indexed. + */ + public WildcardIndex(@Nullable String path) { + this.fieldName = path; + } + + /** + * Include the {@code _id} field in {@literal wildcardProjection}. + * + * @return this. + */ + public WildcardIndex includeId() { + + wildcardProjection.put(FieldName.ID.name(), 1); + return this; + } + + /** + * Set the index name to use. + * + * @param name + * @return this. + */ + @Override + public WildcardIndex named(String name) { + + super.named(name); + return this; + } + + /** + * Unique option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index unique() { + throw new UnsupportedOperationException("Wildcard Index does not support 'unique'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(long seconds) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(long value, TimeUnit timeUnit) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(Duration duration) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * Add fields to be included from indexing via {@code wildcardProjection}.
                    + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionInclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 1); + } + return this; + } + + /** + * Add fields to be excluded from indexing via {@code wildcardProjection}.
                    + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionExclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 0); + } + return this; + } + + /** + * Set the fields to be in-/excluded from indexing via {@code wildcardProjection}.
                    + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param includeExclude must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjection(Map includeExclude) { + + wildcardProjection.putAll(includeExclude); + return this; + } + + private String getTargetFieldName() { + return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**"; + } + + @Override + public Document getIndexKeys() { + return new Document(getTargetFieldName(), 1); + } + + @Override + public Document getIndexOptions() { + + Document options = new Document(super.getIndexOptions()); + if (!CollectionUtils.isEmpty(wildcardProjection)) { + options.put("wildcardProjection", new Document(wildcardProjection)); + } + return options; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java new file mode 100644 index 0000000000..e7eaf3bf15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; + +/** + * Annotation for an entity or property that should be used as key for a + * Wildcard Index.
                    + * If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a + * wildcard index for it. + * + *
                    + *
                    + * @Document
                    + * @WildcardIndexed
                    + * public class Product {
                    + *     ...
                    + * }
                    + *
                    + * db.product.createIndex({ "$**" : 1 } , {})
                    + * 
                    + * + * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. + * + *
                    + *
                    + * @Document
                    + * @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
                    + * public class User {
                    + *     private @Id String id;
                    + *     private UserMetadata userMetadata;
                    + * }
                    + *
                    + *
                    + * db.user.createIndex(
                    + *   { "$**" : 1 },
                    + *   { "wildcardProjection" :
                    + *     { "userMetadata.age" : 0 }
                    + *   }
                    + * )
                    + * 
                    + * + * Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that + * {@literal wildcardProjection} is not allowed on nested paths. + * + *
                    + * @Document
                    + * public class User {
                    + *
                    + *     private @Id String id;
                    + *
                    + *     @WildcardIndexed
                    + *     private UserMetadata userMetadata;
                    + * }
                    + *
                    + *
                    + * db.user.createIndex({ "userMetadata.$**" : 1 }, {})
                    + * 
                    + * + * @author Christoph Strobl + * @since 3.3 + */ +@Collation +@Documented +@Target({ ElementType.TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface WildcardIndexed { + + /** + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
                    + *
                    + * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the + * provided name will be prefixed with the path leading to the entity. + * + * @return empty by default. + */ + String name() default ""; + + /** + * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults + * to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean useGeneratedName() default false; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
                    + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + */ + String partialFilter() default ""; + + /** + * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. + *
                    + * NOTE: Can only be applied on root level documents. + * + * @return empty by default. + */ + String wildcardProjection() default ""; + + /** + * Defines the collation to apply. + * + * @return an empty {@link String} by default. + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/package-info.java index 4f82c97826..c49f501d8d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/package-info.java @@ -1,5 +1,6 @@ /** * Support for MongoDB document indexing. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.index; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java index a1196fca46..3d68dbaac2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,57 +17,67 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Map; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.expression.BeanFactoryAccessor; -import org.springframework.context.expression.BeanFactoryResolver; import org.springframework.data.annotation.Id; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.AssociationHandler; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.model.BasicPersistentEntity; -import org.springframework.data.mapping.model.MappingException; import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; import org.springframework.expression.Expression; -import org.springframework.expression.ParserContext; -import org.springframework.expression.common.LiteralExpression; import org.springframework.expression.spel.standard.SpelExpressionParser; -import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** * MongoDB specific {@link MongoPersistentEntity} implementation that adds Mongo specific meta-data such as the * collection name and the like. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -public class BasicMongoPersistentEntity extends BasicPersistentEntity implements - MongoPersistentEntity, ApplicationContextAware { +public class BasicMongoPersistentEntity extends BasicPersistentEntity + implements MongoPersistentEntity { - private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!"; - private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected; Both %s and %s map to the same field name %s; Disambiguate using @Field annotation"; + private static final ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); private final String collection; private final String language; - private final StandardEvaluationContext context; - private final Expression expression; + private final @Nullable ValueExpression expression; + + private final @Nullable String collation; + private final @Nullable ValueExpression collationExpression; + + private final ShardKey shardKey; /** * Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the * collection name to the entities simple type name. - * + * * @param typeInformation must not be {@literal null}. */ public BasicMongoPersistentEntity(TypeInformation typeInformation) { @@ -77,78 +87,127 @@ public BasicMongoPersistentEntity(TypeInformation typeInformation) { Class rawType = typeInformation.getType(); String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType); - Document document = this.findAnnotation(Document.class); - - this.expression = detectExpression(document); - this.context = new StandardEvaluationContext(); - - if (document != null) { + if (this.isAnnotationPresent(Document.class)) { + Document document = this.getRequiredAnnotation(Document.class); this.collection = StringUtils.hasText(document.collection()) ? document.collection() : fallback; this.language = StringUtils.hasText(document.language()) ? document.language() : ""; + this.expression = detectExpression(document.collection()); + this.collation = document.collation(); + this.collationExpression = detectExpression(document.collation()); } else { + this.collection = fallback; this.language = ""; + this.expression = null; + this.collation = null; + this.collationExpression = null; } + + this.shardKey = detectShardKey(); } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) - */ - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + private ShardKey detectShardKey() { + + if (!isAnnotationPresent(Sharded.class)) { + return ShardKey.none(); + } + + Sharded sharded = getRequiredAnnotation(Sharded.class); + + String[] keyProperties = sharded.shardKey(); + if (ObjectUtils.isEmpty(keyProperties)) { + keyProperties = new String[] { FieldName.ID.name() }; + } + + ShardKey shardKey = ShardingStrategy.HASH.equals(sharded.shardingStrategy()) ? ShardKey.hash(keyProperties) + : ShardKey.range(keyProperties); - context.addPropertyAccessor(new BeanFactoryAccessor()); - context.setBeanResolver(new BeanFactoryResolver(applicationContext)); - context.setRootObject(applicationContext); + return sharded.immutableKey() ? ShardKey.immutable(shardKey) : shardKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getCollection() - */ + @Override public String getCollection() { - return expression == null ? collection : expression.getValue(context, String.class); + + return expression == null // + ? collection // + : ObjectUtils.nullSafeToString(expression.evaluate(getValueEvaluationContext(null))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getLanguage() - */ @Override public String getLanguage() { return this.language; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getTextScoreProperty() - */ + @Nullable @Override public MongoPersistentProperty getTextScoreProperty() { return getPersistentProperty(TextScore.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#hasTextScoreProperty() - */ @Override public boolean hasTextScoreProperty() { return getTextScoreProperty() != null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.BasicPersistentEntity#verify() - */ + @Override + public org.springframework.data.mongodb.core.query.Collation getCollation() { + + Object collationValue = collationExpression != null + ? collationExpression.evaluate(getValueEvaluationContext(null)) + : this.collation; + + if (collationValue == null) { + return null; + } + + if (collationValue instanceof org.bson.Document document) { + return org.springframework.data.mongodb.core.query.Collation.from(document); + } + + if (collationValue instanceof org.springframework.data.mongodb.core.query.Collation collation) { + return collation; + } + + return StringUtils.hasText(collationValue.toString()) + ? org.springframework.data.mongodb.core.query.Collation.parse(collationValue.toString()) + : null; + } + + @Override + public ShardKey getShardKey() { + return shardKey; + } + @Override public void verify() { + super.verify(); + verifyFieldUniqueness(); verifyFieldTypes(); } + @Override + public EvaluationContext getEvaluationContext(Object rootObject) { + return super.getEvaluationContext(rootObject); + } + + @Override + public EvaluationContext getEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getEvaluationContext(rootObject, dependencies); + } + + @Override + public ValueEvaluationContext getValueEvaluationContext(Object rootObject) { + return super.getValueEvaluationContext(rootObject); + } + + @Override + public ValueEvaluationContext getValueEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getValueEvaluationContext(rootObject, dependencies); + } + private void verifyFieldUniqueness() { AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler(); @@ -163,24 +222,24 @@ private void verifyFieldTypes() { /** * {@link Comparator} implementation inspecting the {@link MongoPersistentProperty}'s order. - * + * * @author Oliver Gierke */ - static enum MongoPersistentPropertyComparator implements Comparator { + enum MongoPersistentPropertyComparator implements Comparator { INSTANCE; - /* - * (non-Javadoc) - * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) - */ - public int compare(MongoPersistentProperty o1, MongoPersistentProperty o2) { + public int compare(@Nullable MongoPersistentProperty o1, @Nullable MongoPersistentProperty o2) { - if (o1.getFieldOrder() == Integer.MAX_VALUE) { + if (o1 != null && o1.getFieldOrder() == Integer.MAX_VALUE) { return 1; } - if (o2.getFieldOrder() == Integer.MAX_VALUE) { + if (o2 != null && o2.getFieldOrder() == Integer.MAX_VALUE) { + return -1; + } + + if (o1 == null && o2 == null) { return -1; } @@ -193,14 +252,14 @@ public int compare(MongoPersistentProperty o1, MongoPersistentProperty o2) { * that is annotated with @see {@link Id}. The property id is updated according to the following rules: 1) An id * property which is defined explicitly takes precedence over an implicitly defined id property. 2) In case of any * ambiguity a @see {@link MappingException} is thrown. - * + * * @param property - the new id property candidate - * @return + * @return can be {@literal null}. */ @Override protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNull(MongoPersistentProperty property) { - Assert.notNull(property); + Assert.notNull(property, "MongoPersistentProperty must not be null"); if (!property.isIdProperty()) { return null; @@ -210,7 +269,7 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul boolean currentIdPropertyIsSet = currentIdProperty != null; @SuppressWarnings("null") - boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet ? currentIdProperty.isExplicitIdProperty() : false; + boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet && currentIdProperty.isExplicitIdProperty(); boolean newIdPropertyIsExplicit = property.isExplicitIdProperty(); if (!currentIdPropertyIsSet) { @@ -222,9 +281,9 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul Field currentIdPropertyField = currentIdProperty.getField(); if (newIdPropertyIsExplicit && currentIdPropertyIsExplicit) { - throw new MappingException(String.format( - "Attempt to add explicit id property %s but already have an property %s registered " - + "as explicit id. Check your mapping configuration!", property.getField(), currentIdPropertyField)); + throw new MappingException( + String.format("Attempt to add explicit id property %s but already have an property %s registered " + + "as explicit id; Check your mapping configuration", property.getField(), currentIdPropertyField)); } else if (newIdPropertyIsExplicit && !currentIdPropertyIsExplicit) { // explicit id property takes precedence over implicit id property @@ -234,47 +293,40 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul // no id property override - current property is explicitly defined } else { - throw new MappingException(String.format( - "Attempt to add id property %s but already have an property %s registered " - + "as id. Check your mapping configuration!", property.getField(), currentIdPropertyField)); + throw new MappingException( + String.format("Attempt to add id property %s but already have an property %s registered " + + "as id; Check your mapping configuration", property.getField(), currentIdPropertyField)); } return null; } /** - * Returns a SpEL {@link Expression} frór the collection String expressed in the given {@link Document} annotation if - * present or {@literal null} otherwise. Will also return {@literal null} it the collection {@link String} evaluates - * to a {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). - * - * @param document can be {@literal null} - * @return + * Returns a Value {@link Expression} if the given {@link String} is actually an expression that does not evaluate to + * a literal expression (indicating that no subsequent evaluation is necessary). + * + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. */ - private static Expression detectExpression(Document document) { + @Nullable + private static ValueExpression detectExpression(@Nullable String potentialExpression) { - if (document == null) { + if (!StringUtils.hasText(potentialExpression)) { return null; } - String collection = document.collection(); - - if (!StringUtils.hasText(collection)) { - return null; - } - - Expression expression = PARSER.parseExpression(document.collection(), ParserContext.TEMPLATE_EXPRESSION); - - return expression instanceof LiteralExpression ? null : expression; + ValueExpression expression = PARSER.parse(potentialExpression); + return expression.isLiteral() ? null : expression; } /** * Handler to collect {@link MongoPersistentProperty} instances and check that each of them is mapped to a distinct * field name. - * + * * @author Oliver Gierke */ - private static class AssertFieldNameUniquenessHandler implements PropertyHandler, - AssociationHandler { + private static class AssertFieldNameUniquenessHandler + implements PropertyHandler, AssociationHandler { private final Map properties = new HashMap(); @@ -292,24 +344,45 @@ private void assertUniqueness(MongoPersistentProperty property) { MongoPersistentProperty existingProperty = properties.get(fieldName); if (existingProperty != null) { - throw new MappingException(String.format(AMBIGUOUS_FIELD_MAPPING, property.toString(), - existingProperty.toString(), fieldName)); + throw new MappingException(String.format(AMBIGUOUS_FIELD_MAPPING, property, existingProperty, fieldName)); } properties.put(fieldName, property); } } + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getType().getSimpleName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + /** * @author Christoph Strobl * @since 1.6 */ private static class PropertyTypeAssertionHandler implements PropertyHandler { - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty) - */ @Override public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) { @@ -336,9 +409,9 @@ private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty per if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) { if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) { - throw new MappingException(String.format( - "Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.", - persistentProperty.getField(), persistentProperty.getActualType())); + throw new MappingException( + String.format("Invalid lazy DBRef property for %s; Found %s which must not be an array nor a final class", + persistentProperty.getField(), persistentProperty.getActualType())); } } } @@ -352,7 +425,7 @@ private static void assertPropertyType(MongoPersistentProperty persistentPropert } throw new MappingException( - String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(), + String.format("Mismatching types for %s; Found %s expected one of %s", persistentProperty.getField(), persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches))); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 4680791d1f..5c3b4e6532 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,82 +15,75 @@ */ package org.springframework.data.mongodb.core.mapping; -import java.beans.PropertyDescriptor; -import java.lang.reflect.Field; -import java.math.BigInteger; -import java.util.HashSet; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Set; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.data.annotation.Id; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueEvaluationContext; import org.springframework.data.mapping.Association; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty; import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.MongoField.MongoFieldBuilder; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.DBObject; - /** - * MongoDB specific {@link org.springframework.data.mapping.MongoPersistentProperty} implementation. - * + * MongoDB specific {@link org.springframework.data.mapping.PersistentProperty} implementation. + * * @author Oliver Gierke * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava */ -public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty implements - MongoPersistentProperty { +public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty + implements MongoPersistentProperty { - private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class); + private static final Log LOG = LogFactory.getLog(BasicMongoPersistentProperty.class); - private static final String ID_FIELD_NAME = "_id"; + public static final String ID_FIELD_NAME = FieldName.ID.name(); private static final String LANGUAGE_FIELD_NAME = "language"; - private static final Set> SUPPORTED_ID_TYPES = new HashSet>(); - private static final Set SUPPORTED_ID_PROPERTY_NAMES = new HashSet(); - - static { - - SUPPORTED_ID_TYPES.add(ObjectId.class); - SUPPORTED_ID_TYPES.add(String.class); - SUPPORTED_ID_TYPES.add(BigInteger.class); - - SUPPORTED_ID_PROPERTY_NAMES.add("id"); - SUPPORTED_ID_PROPERTY_NAMES.add("_id"); - } + private static final Set SUPPORTED_ID_PROPERTY_NAMES = Set.of("id", ID_FIELD_NAME); private final FieldNamingStrategy fieldNamingStrategy; /** * Creates a new {@link BasicMongoPersistentProperty}. - * - * @param field - * @param propertyDescriptor - * @param owner - * @param simpleTypeHolder - * @param fieldNamingStrategy + * + * @param property the source property. + * @param owner the owing entity. + * @param simpleTypeHolder must not be {@literal null}. + * @param fieldNamingStrategy can be {@literal null}. */ - public BasicMongoPersistentProperty(Field field, PropertyDescriptor propertyDescriptor, - MongoPersistentEntity owner, SimpleTypeHolder simpleTypeHolder, FieldNamingStrategy fieldNamingStrategy) { + public BasicMongoPersistentProperty(Property property, MongoPersistentEntity owner, + SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) { - super(field, propertyDescriptor, owner, simpleTypeHolder); + super(property, owner, simpleTypeHolder); this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE : fieldNamingStrategy; - - if (isIdProperty() && getFieldName() != ID_FIELD_NAME) { - LOG.warn("Customizing field name for id property not allowed! Custom name will not be considered!"); - } } /** * Also considers fields as id that are of supported id type and name. - * + * * @see #SUPPORTED_ID_PROPERTY_NAMES - * @see #SUPPORTED_ID_TYPES */ @Override public boolean isIdProperty() { @@ -104,49 +97,50 @@ public boolean isIdProperty() { return SUPPORTED_ID_PROPERTY_NAMES.contains(getName()) && !hasExplicitFieldName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitIdProperty() - */ @Override public boolean isExplicitIdProperty() { - return isAnnotationPresent(Id.class); + return super.isIdProperty(); } /** - * Returns the key to be used to store the value of the property inside a Mongo {@link DBObject}. - * + * Returns the key to be used to store the value of the property inside a Mongo {@link org.bson.Document}. + * * @return */ + @Override public String getFieldName() { + return getMongoField().getName().name(); + } - if (isIdProperty()) { + @Override + public Class getFieldType() { - if (owner == null) { - return ID_FIELD_NAME; - } + Field fieldAnnotation = findAnnotation(Field.class); - if (owner.getIdProperty() == null) { - return ID_FIELD_NAME; - } + if (!getOwner().isIdProperty(this)) { - if (owner.isIdProperty(this)) { - return ID_FIELD_NAME; + if (fieldAnnotation == null || fieldAnnotation.targetType() == FieldType.IMPLICIT) { + return getType(); } + + return fieldAnnotation.targetType().getJavaClass(); } - if (hasExplicitFieldName()) { - return getAnnotatedFieldName(); + if (fieldAnnotation == null) { + return FieldType.OBJECT_ID.getJavaClass(); } - String fieldName = fieldNamingStrategy.getFieldName(this); + FieldType fieldType = getMongoField().getFieldType(); + if (fieldType == FieldType.IMPLICIT) { - if (!StringUtils.hasText(fieldName)) { - throw new MappingException(String.format("Invalid (null or empty) field name returned for property %s by %s!", - this, fieldNamingStrategy.getClass())); + if (isEntity()) { + return org.bson.Document.class; + } + + return getType(); } - return fieldName; + return fieldType.getJavaClass(); } /** @@ -154,79 +148,205 @@ public String getFieldName() { * {@link org.springframework.data.mongodb.core.mapping.Field#value()} present. * @since 1.7 */ - protected boolean hasExplicitFieldName() { + @Override + public boolean hasExplicitFieldName() { return StringUtils.hasText(getAnnotatedFieldName()); } + @Nullable private String getAnnotatedFieldName() { - org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(org.springframework.data.mongodb.core.mapping.Field.class); - - if (annotation != null && StringUtils.hasText(annotation.value())) { - return annotation.value(); - } + org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( + org.springframework.data.mongodb.core.mapping.Field.class); - return null; + return annotation != null ? annotation.value() : null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getFieldOrder() - */ + @Override public int getFieldOrder() { - org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(org.springframework.data.mongodb.core.mapping.Field.class); - return annotation != null ? annotation.order() : Integer.MAX_VALUE; + return getMongoField().getOrder(); + } + + @Override + public boolean writeNullValues() { + + org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( + org.springframework.data.mongodb.core.mapping.Field.class); + + return annotation != null && annotation.write() == Field.Write.ALWAYS; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation() - */ @Override protected Association createAssociation() { - return new Association(this, null); + return new Association<>(this, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDbReference() - */ + @Override public boolean isDbReference() { return isAnnotationPresent(DBRef.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef() - */ + @Override + public boolean isDocumentReference() { + return isAnnotationPresent(DocumentReference.class); + } + + @Override + @Nullable public DBRef getDBRef() { return findAnnotation(DBRef.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty() - */ + @Nullable + @Override + public DocumentReference getDocumentReference() { + return findAnnotation(DocumentReference.class); + } + @Override public boolean isLanguageProperty() { return getFieldName().equals(LANGUAGE_FIELD_NAME) || isExplicitLanguageProperty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitLanguageProperty() - */ @Override public boolean isExplicitLanguageProperty() { return isAnnotationPresent(Language.class); - }; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isTextScoreProperty() - */ @Override public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } + + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public EvaluationContext getEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity mongoPersistentEntity) { + return mongoPersistentEntity.getEvaluationContext(rootObject); + } + return rootObject != null ? new StandardEvaluationContext(rootObject) : new StandardEvaluationContext(); + } + + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public ValueEvaluationContext getValueEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity mongoPersistentEntity) { + return mongoPersistentEntity.getValueEvaluationContext(rootObject); + } + + StandardEvaluationContext standardEvaluationContext = rootObject != null ? new StandardEvaluationContext(rootObject) + : new StandardEvaluationContext(); + + return ValueEvaluationContext.of(new StandardEnvironment(), standardEvaluationContext); + } + + @Override + public MongoField getMongoField() { + return doGetMongoField(); + } + + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getOwner().getType().getSimpleName() + "." + getName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + + protected MongoField doGetMongoField() { + + MongoFieldBuilder builder = MongoField.builder(); + if (isAnnotationPresent(Field.class) && Type.KEY.equals(findAnnotation(Field.class).nameType())) { + builder.name(doGetFieldName()); + } else { + builder.path(doGetFieldName()); + } + builder.fieldType(doGetFieldType()); + builder.order(doGetFieldOrder()); + return builder.build(); + } + + private String doGetFieldName() { + + if (isIdProperty()) { + + if (getOwner().getIdProperty() == null) { + return ID_FIELD_NAME; + } + + if (getOwner().isIdProperty(this)) { + return ID_FIELD_NAME; + } + } + + if (hasExplicitFieldName()) { + return getAnnotatedFieldName(); + } + + String fieldName = fieldNamingStrategy.getFieldName(this); + + if (!StringUtils.hasText(fieldName)) { + throw new MappingException(String.format("Invalid (null or empty) field name returned for property %s by %s", + this, fieldNamingStrategy.getClass())); + } + + return fieldName; + } + + private FieldType doGetFieldType() { + + Field fieldAnnotation = findAnnotation(Field.class); + return fieldAnnotation != null ? fieldAnnotation.targetType() : FieldType.IMPLICIT; + } + + private int doGetFieldOrder() { + + Field annotation = findAnnotation(Field.class); + return annotation != null ? annotation.order() : Integer.MAX_VALUE; + } + + protected void validate() { + + if (isIdProperty() && hasExplicitFieldName()) { + + String annotatedName = getAnnotatedFieldName(); + if (!ID_FIELD_NAME.equals(annotatedName)) { + if (LOG.isWarnEnabled()) { + LOG.warn(String.format( + "Customizing field name for id property '%s.%s' is not allowed; Custom name ('%s') will not be considered", + getOwner().getName(), getName(), annotatedName)); + } + } + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java index e9389162bf..105c38b288 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,104 +15,134 @@ */ package org.springframework.data.mongodb.core.mapping; -import java.beans.PropertyDescriptor; -import java.lang.reflect.Field; - import org.springframework.data.mapping.model.FieldNamingStrategy; +import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; /** * {@link MongoPersistentProperty} caching access to {@link #isIdProperty()} and {@link #getFieldName()}. - * + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty { - private Boolean isIdProperty; - private Boolean isAssociation; - private String fieldName; - private Boolean usePropertyAccess; - private Boolean isTransient; + private final Lazy isEntity = Lazy.of(super::isEntity); + private final Lazy isUnwrapped = Lazy.of(super::isUnwrapped); + private final Lazy isIdProperty = Lazy.of(super::isIdProperty); + private final Lazy isAssociation = Lazy.of(super::isAssociation); + private final Lazy dbref = Lazy.of(super::getDBRef); + private final Lazy fieldName = Lazy.of(super::getFieldName); + private final Lazy hasExplicitFieldName = Lazy.of(super::hasExplicitFieldName); + private final Lazy writeNullValues = Lazy.of(super::writeNullValues); + private final Lazy> fieldType = Lazy.of(super::getFieldType); + private final Lazy usePropertyAccess = Lazy.of(super::usePropertyAccess); + private final Lazy isTransient = Lazy.of(super::isTransient); + private final Lazy mongoField = Lazy.of(super::getMongoField); + private final Lazy isTextScoreProperty = Lazy.of(super::isTextScoreProperty); + private final Lazy isLanguageProperty = Lazy.of(super::isLanguageProperty); + private final Lazy isExplicitLanguageProperty = Lazy.of(super::isExplicitLanguageProperty); + private final Lazy documentReference = Lazy.of(super::getDocumentReference); /** * Creates a new {@link CachingMongoPersistentProperty}. - * - * @param field - * @param propertyDescriptor - * @param owner - * @param simpleTypeHolder - * @param fieldNamingStrategy + * + * @param property must not be {@literal null}. + * @param owner must not be {@literal null}. + * @param simpleTypeHolder must not be {@literal null}. + * @param fieldNamingStrategy can be {@literal null}. */ - public CachingMongoPersistentProperty(Field field, PropertyDescriptor propertyDescriptor, - MongoPersistentEntity owner, SimpleTypeHolder simpleTypeHolder, FieldNamingStrategy fieldNamingStrategy) { - super(field, propertyDescriptor, owner, simpleTypeHolder, fieldNamingStrategy); + public CachingMongoPersistentProperty(Property property, MongoPersistentEntity owner, + SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) { + super(property, owner, simpleTypeHolder, fieldNamingStrategy); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#isIdProperty() - */ @Override - public boolean isIdProperty() { + public boolean isEntity() { + return isEntity.get(); + } - if (this.isIdProperty == null) { - this.isIdProperty = super.isIdProperty(); - } + @Override + public boolean isUnwrapped() { + return isUnwrapped.get(); + } - return this.isIdProperty; + @Override + public boolean isIdProperty() { + return isIdProperty.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#isAssociation() - */ @Override public boolean isAssociation() { - if (this.isAssociation == null) { - this.isAssociation = super.isAssociation(); - } - return this.isAssociation; + return isAssociation.get(); + } + + @Override + public boolean hasExplicitFieldName() { + return hasExplicitFieldName.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldName() - */ @Override public String getFieldName() { + return fieldName.get(); + } - if (this.fieldName == null) { - this.fieldName = super.getFieldName(); - } + @Override + public boolean writeNullValues() { + return writeNullValues.get(); + } - return this.fieldName; + @Override + public Class getFieldType() { + return fieldType.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess() - */ @Override public boolean usePropertyAccess() { + return usePropertyAccess.get(); + } - if (this.usePropertyAccess == null) { - this.usePropertyAccess = super.usePropertyAccess(); - } + @Override + public boolean isTransient() { + return isTransient.get(); + } - return this.usePropertyAccess; + @Override + public boolean isTextScoreProperty() { + return isTextScoreProperty.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient() - */ @Override - public boolean isTransient() { + public boolean isDbReference() { + return getDBRef() != null; + } + + @Override + public DBRef getDBRef() { + return dbref.getNullable(); + } - if (this.isTransient == null) { - this.isTransient = super.isTransient(); - } + @Override + public DocumentReference getDocumentReference() { + return documentReference.getNullable(); + } + + @Override + public boolean isLanguageProperty() { + return isLanguageProperty.get(); + } - return this.isTransient; + @Override + public boolean isExplicitLanguageProperty() { + return isExplicitLanguageProperty.get(); } + + @Override + public MongoField getMongoField() { + return mongoField.get(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java index 3272ae41f3..7a861829a4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,7 +25,7 @@ /** * An annotation that indicates the annotated field is to be stored using a {@link com.mongodb.DBRef}. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Thomas Darimont @@ -38,15 +38,15 @@ /** * The database the referred entity resides in. - * - * @return + * + * @return empty String by default. */ String db() default ""; /** * Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}. - * - * @return + * + * @return {@literal false} by default. */ boolean lazy() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java index 3ff03c639f..ef4980fab6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2014 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import java.lang.annotation.ElementType; @@ -22,29 +21,59 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Persistent; +import org.springframework.data.mongodb.core.annotation.Collation; /** * Identifies a domain object to be persisted to MongoDB. - * - * @author Jon Brisbin - * @author Oliver Gierke ogierke@vmware.com + * + * @author Jon Brisbin + * @author Oliver Gierke * @author Christoph Strobl */ @Persistent +@Collation @Inherited @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) public @interface Document { + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection to based on a per operation basis. + * + * @return the name of the collection to be used. + */ + @AliasFor("collection") + String value() default ""; + + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection to based on a per operation basis. + * + * @return the name of the collection to be used. + */ + @AliasFor("value") String collection() default ""; /** * Defines the default language to be used with this document. - * + * + * @return an empty String by default. * @since 1.6 - * @return */ String language() default ""; + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java new file mode 100644 index 0000000000..90da5dd87d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +/** + * A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value. + * + * @author Christoph Strobl + * @since 3.3 + */ +@FunctionalInterface +public interface DocumentPointer { + + /** + * The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or + * a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key, + * etc. + * + * @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}. + */ + T getPointer(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java new file mode 100644 index 0000000000..5b8a74f40e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java @@ -0,0 +1,132 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.MongoDatabaseFactory; + +/** + * A {@link DocumentReference} allows referencing entities in MongoDB using a flexible schema. While the goal is the + * same as when using {@link DBRef}, the store representation is different. The reference can be anything, a single + * value, an entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the + * mapping layer will use the referenced entities {@literal id} value for storage and retrieval. + * + *
                    + * public class Account {
                    + *   private String id;
                    + *   private Float total;
                    + * }
                    + *
                    + * public class Person {
                    + *   private String id;
                    + *   @DocumentReference
                    + *   private List<Account> accounts;
                    + * }
                    + *
                    + * Account account = ...
                    + *
                    + * mongoTemplate.insert(account);
                    + *
                    + * template.update(Person.class)
                    + *   .matching(where("id").is(...))
                    + *   .apply(new Update().push("accounts").value(account))
                    + *   .first();
                    + * 
                    + * + * {@link #lookup()} allows defining a query filter that is independent from the {@literal _id} field and in combination + * with {@link org.springframework.data.convert.WritingConverter writing converters} offers a flexible way of defining + * references between entities. + * + *
                    + * public class Book {
                    + * 	private ObjectId id;
                    + * 	private String title;
                    + *
                    + * 	@Field("publisher_ac") @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
                    + * }
                    + *
                    + * public class Publisher {
                    + *
                    + * 	private ObjectId id;
                    + * 	private String acronym;
                    + * 	private String name;
                    + *
                    + * 	@DocumentReference(lazy = true) private List<Book> books;
                    + * }
                    + *
                    + * @WritingConverter
                    + * public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
                    + *
                    + * 	public DocumentPointer<String> convert(Publisher source) {
                    + * 		return () -> source.getAcronym();
                    + * 	}
                    + * }
                    + * 
                    + * + * @author Christoph Strobl + * @since 3.3 + * @see MongoDB + * Reference Documentation + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD }) +@Reference +public @interface DocumentReference { + + /** + * The database the referenced entity resides in. Uses the default database provided by + * {@link org.springframework.data.mongodb.MongoDatabaseFactory} if empty. + * + * @see MongoDatabaseFactory#getMongoDatabase() + * @see MongoDatabaseFactory#getMongoDatabase(String) + */ + String db() default ""; + + /** + * The collection the referenced entity resides in. Defaults to the collection of the referenced entity type. + * + * @see MongoPersistentEntity#getCollection() + */ + String collection() default ""; + + /** + * The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property the + * individual lookups are combined via an {@code $or} operator. {@code target} points to the source value (or + * document) stored at the reference property. Properties of {@code target} can be used to define the reference query. + * + * @return an {@literal _id} based lookup. + */ + String lookup() default "{ '_id' : ?#{#target} }"; + + /** + * A specific sort. + */ + String sort() default ""; + + /** + * Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean lazy() default false; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java new file mode 100644 index 0000000000..3e169026a9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Encrypted} provides data required for MongoDB Client Side Field Level Encryption that is applied during schema + * resolution. It can be applied on top level (typically those types annotated with {@link Document} to provide the + * {@literal encryptMetadata}. + * + *
                    + * @Document
                    + * @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==")
                    + * public class Patient {
                    + * 	 private ObjectId id;
                    + * 	 private String name;
                    + *
                    + * 	 @Field("publisher_ac")
                    + * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
                    + * }
                    + *
                    + * "encryptMetadata": {
                    + *    "keyId": [
                    + *      {
                    + *        "$binary": {
                    + *          "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
                    + *          "subType": "04"
                    + *        }
                    + *      }
                    + *    ]
                    + *  }
                    + * 
                    + * + *
                    + * On property level it is used for deriving field specific {@literal encrypt} settings. + * + *
                    + * public class Patient {
                    + * 	 private ObjectId id;
                    + * 	 private String name;
                    + *
                    + * 	 @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
                    + * 	 private String ssn;
                    + * }
                    + *
                    + * "ssn" : {
                    + *   "encrypt": {
                    + *      "keyId": [
                    + *        {
                    + *          "$binary": {
                    + *            "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
                    + *            "subType": "04"
                    + *          }
                    + *        }
                    + *      ],
                    + *      "algorithm" : "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic",
                    + *      "bsonType" : "string"
                    + *    }
                    + *  }
                    + * 
                    + * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.FIELD }) +public @interface Encrypted { + + /** + * Get the {@code keyId} to use. The value must resolve to either the UUID representation of the key or a base64 + * encoded value representing the UUID value. + *
                    + * On {@link ElementType#TYPE} level the {@link #keyId()} can be left empty if explicitly set for fields.
                    + * On {@link ElementType#FIELD} level the {@link #keyId()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the key id to use. May contain a parsable {@link org.springframework.expression.Expression expression}. In + * this case the {@code #target} variable will hold the target element name. + */ + String[] keyId() default {}; + + /** + * Set the algorithm to use. + *
                    + * On {@link ElementType#TYPE} level the {@link #algorithm()} can be left empty if explicitly set for fields.
                    + * On {@link ElementType#FIELD} level the {@link #algorithm()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the encryption algorithm. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + String algorithm() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java new file mode 100644 index 0000000000..37d1019f62 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.mongodb.core.convert.encryption.EncryptingConverter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; + +/** + * {@link ExplicitEncrypted} is a {@link ElementType#FIELD field} level {@link ValueConverter} annotation that indicates + * the target element is subject to encryption during the mapping process, in which a given domain type is converted + * into the store specific format. + *

                    + * The {@link #value()} attribute, defines the bean type to look up within the + * {@link org.springframework.context.ApplicationContext} to obtain the {@link EncryptingConverter} responsible for the + * actual {@literal en-/decryption} while {@link #algorithm()} and {@link #keyAltName()} can be used to define aspects + * of the encryption process. + * + *

                    + * public class Patient {
                    + * 	private ObjectId id;
                    + * 	private String name;
                    + *
                    + * 	@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "secred-key-alternative-name") //
                    + * 	private String ssn;
                    + * }
                    + * 
                    + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + * @see ValueConverter + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Encrypted +@ValueConverter +public @interface ExplicitEncrypted { + + /** + * Define the algorithm to use. + *

                    + * A {@literal Deterministic} algorithm ensures that a given input value always encrypts to the same output while a + * {@literal randomized} one will produce different results every time. A {@literal range} algorithm allows for + * the value to be queried whilst encrypted. + *

                    + * Please make sure to use an algorithm that is in line with MongoDB's encryption rules for simple types, complex + * objects and arrays as well as the query limitations that come with each of them. + * + * @return the string representation of the encryption algorithm to use. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + @AliasFor(annotation = Encrypted.class, value = "algorithm") + String algorithm() default ""; + + /** + * Set the {@literal Key Alternate Name} that references the {@literal Data Encryption Key} to be used. + *

                    + * An empty String indicates that no alternative key name was configured. + *

                    + * It is possible to use the {@literal "/"} character as a prefix to access a particular field value in the same + * domain type. In this case {@code "/name"} references the value of the {@literal name} field. Please note that + * update operations will require the full object to resolve those values. + * + * @return the {@literal Key Alternate Name} if set or an empty {@link String}. + */ + String keyAltName() default ""; + + /** + * The {@link EncryptingConverter} type handling the {@literal en-/decryption} of the annotated property. + * + * @return the configured {@link EncryptingConverter}. A {@link MongoEncryptionConverter} by default. + */ + @AliasFor(annotation = ValueConverter.class, value = "value") + Class value() default MongoEncryptionConverter.class; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java index 19ffb6ebdf..68ff4bb976 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +21,15 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; + /** * Annotation to define custom metadata for document fields. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Divya Srivastava */ @Documented @Retention(RetentionPolicy.RUNTIME) @@ -32,16 +37,75 @@ public @interface Field { /** - * The key to be used to store the field inside the document. - * - * @return + * The key to be used to store the field inside the document. Alias for {@link #name()}. + * + * @return an empty {@link String} by default. + * @see #name() */ + @AliasFor("name") String value() default ""; + /** + * The key to be used to store the field inside the document. Alias for {@link #value()}. The name may contain MongoDB + * special characters like dot ({@literal .}). In this case the name is by default treated as a {@link Type#PATH + * path}. To preserve dots within the name set the {@link #nameType()} attribute to {@link Type#KEY}. + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor("value") + String name() default ""; + + /** + * The used {@link Type type} has impact on how a given {@link #name()} is treated if it contains dot ({@literal .}) + * characters. + * + * @return {@link Type#PATH} by default. + * @since 4.2 + */ + Type nameType() default Type.PATH; + /** * The order in which various fields shall be stored. Has to be a positive integer. - * + * * @return the order the field shall have in the document or -1 if undefined. */ int order() default Integer.MAX_VALUE; + + /** + * The actual desired target type the field should be stored as. + * + * @return {@link FieldType#IMPLICIT} by default. + * @since 2.2 + */ + FieldType targetType() default FieldType.IMPLICIT; + + /** + * Write rules when to include a property value upon conversion. If set to {@link Write#NON_NULL} (default) + * {@literal null} values are not written to the target {@code Document}. Setting the value to {@link Write#ALWAYS} + * explicitly adds an entry for the given field holding {@literal null} as a value {@code 'fieldName' : null }.
                    + * NOTE: Setting the value to {@link Write#ALWAYS} may lead to increased document size. + * + * @return {@link Write#NON_NULL} by default. + * @since 3.3 + */ + Write write() default Write.NON_NULL; + + /** + * Enumeration of write strategies to define when a property is included for write conversion. + * + * @since 3.3 + */ + enum Write { + + /** + * Value that indicates that property is to be always included, independent of value of the property. + */ + ALWAYS, + + /** + * Value that indicates that only properties with non-{@literal null} values are to be included. + */ + NON_NULL + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java new file mode 100644 index 0000000000..2efb50a42f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java @@ -0,0 +1,140 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.util.ObjectUtils; + +/** + * Value Object representing a field name that should be used to read/write fields within the MongoDB document. + * {@link FieldName Field names} field names may contain special characters (such as dot ({@literal .})) but may be + * treated differently depending on their {@link Type type}. + * + * @author Christoph Strobl + * @since 4.2 + */ +public record FieldName(String name, Type type, String[] parts) { + + public FieldName(String name, Type type) { + this(name, type, name.split("\\.")); + } + + private static final String ID_KEY = "_id"; + + public static final FieldName ID = new FieldName(ID_KEY, Type.KEY); + + /** + * Create a new {@link FieldName} that treats the given {@literal value} as is. + * + * @param value must not be {@literal null}. + * @return new instance of {@link FieldName}. + */ + public static FieldName name(String value) { + return new FieldName(value, Type.KEY); + } + + /** + * Create a new {@link FieldName} that treats the given {@literal value} as a path. If the {@literal value} contains + * dot ({@literal .}) characters, they are considered deliminators in a path. + * + * @param value must not be {@literal null}. + * @return new instance of {@link FieldName}. + */ + public static FieldName path(String value) { + return new FieldName(value, Type.PATH); + } + + /** + * Get the parts the field name consists of. If the {@link FieldName} is a {@link Type#KEY} or a {@link Type#PATH} + * that does not contain dot ({@literal .}) characters an array containing a single element is returned. Otherwise the + * {@link #name()} is split into segments using dot ({@literal .}) as a separator. + * + * @return never {@literal null}. + */ + public String[] parts() { + + if (isKey()) { + return new String[] { name }; + } + + return parts; + } + + /** + * @param type return true if the given {@link Type} is equal to {@link #type()}. + * @return {@literal true} if values are equal. + */ + public boolean isOfType(Type type) { + return ObjectUtils.nullSafeEquals(type(), type); + } + + /** + * @return whether the field name represents a key (i.e. as-is name). + */ + public boolean isKey() { + return isOfType(Type.KEY); + } + + /** + * @return whether the field name represents a path (i.e. dot-path). + */ + public boolean isPath() { + return isOfType(Type.PATH); + } + + @Override + public String toString() { + return "FieldName{%s=%s}".formatted(isKey() ? "key" : "path", name); + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + FieldName fieldName = (FieldName) o; + return ObjectUtils.nullSafeEquals(name, fieldName.name) && type == fieldName.type; + } + + @Override + public int hashCode() { + + int hashCode = ObjectUtils.nullSafeHashCode(name); + return 31 * hashCode + ObjectUtils.nullSafeHashCode(type); + } + + /** + * The {@link FieldName.Type type} defines how to treat a {@link FieldName} that contains special characters. + * + * @author Christoph Strobl + * @since 4.2 + */ + public enum Type { + + /** + * Dot ({@literal .}) characters are treated as separators for segments in a path. + */ + PATH, + + /** + * Values are used as is. + */ + KEY + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java new file mode 100644 index 0000000000..7fc4199dd9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java @@ -0,0 +1,86 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.Date; +import java.util.regex.Pattern; + +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; +import org.bson.types.ObjectId; + +/** + * Enumeration of field value types that can be used to represent a {@link org.bson.Document} field value. This + * enumeration contains a subset of {@link org.bson.BsonType} that is supported by the mapping and conversion + * components. + *
                    + * Bson types are identified by a {@code byte} {@link #getBsonType() value}. This enumeration typically returns the + * according bson type value except for {@link #IMPLICIT} which is a marker to derive the field type from a property. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + * @see org.bson.BsonType + */ +public enum FieldType { + + /** + * Implicit type that is derived from the property value. + */ + IMPLICIT(-1, Object.class), // + DOUBLE(1, Double.class), // + STRING(2, String.class), // + ARRAY(4, Object[].class), // + BINARY(5, Binary.class), // + OBJECT_ID(7, ObjectId.class), // + BOOLEAN(8, Boolean.class), // + DATE_TIME(9, Date.class), // + PATTERN(11, Pattern.class), // + SCRIPT(13, Code.class), // + INT32(15, Integer.class), // + TIMESTAMP(16, BSONTimestamp.class), // + INT64(17, Long.class), // + DECIMAL128(18, Decimal128.class); + + private final int bsonType; + private final Class javaClass; + + FieldType(int bsonType, Class javaClass) { + + this.bsonType = bsonType; + this.javaClass = javaClass; + } + + /** + * Returns the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type. + * + * @return the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type. + */ + public int getBsonType() { + return bsonType; + } + + /** + * Returns the Java class used to represent the type. + * + * @return the Java class used to represent the type. + */ + public Class getJavaClass() { + return javaClass; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java index dc763264bd..db8cd1790d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,7 +23,7 @@ /** * Mark property as language field. - * + * * @author Christoph Strobl * @since 1.6 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java new file mode 100644 index 0000000000..6f0e1ae4c3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java @@ -0,0 +1,211 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Value Object for representing a field to read/write within a MongoDB {@link org.bson.Document}. + * + * @author Christoph Strobl + * @since 4.2 + */ +public class MongoField { + + private final FieldName name; + private final FieldType fieldType; + private final int order; + + protected MongoField(FieldName name, Class targetFieldType, int fieldOrder) { + this(name, FieldType.valueOf(targetFieldType.getSimpleName()), fieldOrder); + } + + protected MongoField(FieldName name, FieldType fieldType, int fieldOrder) { + + this.name = name; + this.fieldType = fieldType; + this.order = fieldOrder; + } + + /** + * Create a new {@link MongoField} with given {@literal name}. + * + * @param name the name to be used as is (with all its potentially special characters). + * @return new instance of {@link MongoField}. + */ + public static MongoField fromKey(String name) { + return builder().name(name).build(); + } + + /** + * Create a new {@link MongoField} with given {@literal name}. + * + * @param name the name to be used path expression. + * @return new instance of {@link MongoField}. + */ + public static MongoField fromPath(String name) { + return builder().path(name).build(); + } + + /** + * @return new instance of {@link MongoFieldBuilder}. + */ + public static MongoFieldBuilder builder() { + return new MongoFieldBuilder(); + } + + /** + * @return never {@literal null}. + */ + public FieldName getName() { + return name; + } + + /** + * Get the position of the field within the target document. + * + * @return {@link Integer#MAX_VALUE} if undefined. + */ + public int getOrder() { + return order; + } + + /** + * @param prefix a prefix to the current name. + * @return new instance of {@link MongoField} with prefix appended to current field name. + */ + MongoField withPrefix(String prefix) { + return new MongoField(new FieldName(prefix + name.name(), name.type()), fieldType, order); + } + + /** + * Get the fields target type if defined. + * + * @return never {@literal null}. + */ + public FieldType getFieldType() { + return fieldType; + } + + @Override + public boolean equals(Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MongoField that = (MongoField) o; + + if (order != that.order) + return false; + if (!ObjectUtils.nullSafeEquals(name, that.name)) { + return false; + } + return fieldType == that.fieldType; + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(name); + result = 31 * result + ObjectUtils.nullSafeHashCode(fieldType); + result = 31 * result + order; + return result; + } + + @Override + public String toString() { + return name.toString(); + } + + /** + * Builder for {@link MongoField}. + */ + public static class MongoFieldBuilder { + + private String name; + private Type nameType = Type.PATH; + private FieldType type = FieldType.IMPLICIT; + private int order = Integer.MAX_VALUE; + + /** + * Configure the field type. + * + * @param fieldType + * @return + */ + public MongoFieldBuilder fieldType(FieldType fieldType) { + + this.type = fieldType; + return this; + } + + /** + * Configure the field name as key. Key field names are used as-is without applying path segmentation splitting + * rules. + * + * @param fieldName + * @return + */ + public MongoFieldBuilder name(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty"); + + this.name = fieldName; + this.nameType = Type.KEY; + return this; + } + + /** + * Configure the field name as path. Path field names are applied as paths potentially pointing into subdocuments. + * + * @param path + * @return + */ + public MongoFieldBuilder path(String path) { + + Assert.hasText(path, "Field path (name) must not be empty"); + + this.name = path; + this.nameType = Type.PATH; + return this; + } + + /** + * Configure the field order, defaulting to {@link Integer#MAX_VALUE} (undefined). + * + * @param order + * @return + */ + public MongoFieldBuilder order(int order) { + + this.order = order; + return this; + } + + /** + * Build a new {@link MongoField}. + * + * @return a new {@link MongoField}. + */ + public MongoField build() { + return new MongoField(new FieldName(name, nameType), type, order); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java new file mode 100644 index 0000000000..6e1eb40324 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.Id; + +/** + * {@link MongoId} represents a MongoDB specific {@link Id} annotation that allows customizing {@literal id} conversion. + * Id properties use {@link org.springframework.data.mongodb.core.mapping.FieldType#IMPLICIT} as the default + * {@literal id's} target type. This means that the actual property value is used. No conversion attempts to any other + * type are made.
                    + * In contrast to {@link Id @Id}, {@link String} {@literal id's} are stored as the such even when the actual value + * represents a valid {@link org.bson.types.ObjectId#isValid(String) ObjectId hex String}. To trigger {@link String} to + * {@link org.bson.types.ObjectId} conversion use {@link MongoId#targetType() @MongoId(FieldType.OBJECT_ID)}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +@Id +@Field +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +public @interface MongoId { + + /** + * @return the preferred id type. + * @see #targetType() + */ + @AliasFor(annotation = Field.class, attribute="targetType") + FieldType value() default FieldType.IMPLICIT; + + /** + * Get the preferred {@literal _id} type to be used. Defaults to {@link FieldType#IMPLICIT} which uses the property's + * type. If defined different, the given value is attempted to be converted into the desired target type via + * {@link org.springframework.data.mongodb.core.convert.MongoConverter#convertId(Object, Class)}. + * + * @return the preferred {@literal id} type. {@link FieldType#IMPLICIT} by default. + */ + @AliasFor(annotation = Field.class, attribute="targetType") + FieldType targetType() default FieldType.IMPLICIT; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index c878062856..76c0269861 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.core.mapping; -import java.beans.PropertyDescriptor; -import java.lang.reflect.Field; import java.util.AbstractMap; import org.springframework.beans.BeansException; @@ -25,24 +23,31 @@ import org.springframework.data.mapping.context.AbstractMappingContext; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.FieldNamingStrategy; +import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.NullableWrapperConverters; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; /** * Default implementation of a {@link MappingContext} for MongoDB using {@link BasicMongoPersistentEntity} and * {@link BasicMongoPersistentProperty} as primary abstractions. - * + * * @author Jon Brisbin * @author Oliver Gierke + * @author Christoph Strobl */ -public class MongoMappingContext extends AbstractMappingContext, MongoPersistentProperty> +public class MongoMappingContext extends AbstractMappingContext, MongoPersistentProperty> implements ApplicationContextAware { private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE; private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY; - private ApplicationContext context; + private boolean autoIndexCreation = false; + + @Nullable + private ApplicationContext applicationContext; /** * Creates a new {@link MongoMappingContext}. @@ -54,55 +59,83 @@ public MongoMappingContext() { /** * Configures the {@link FieldNamingStrategy} to be used to determine the field name if no manual mapping is applied. * Defaults to a strategy using the plain property name. - * + * * @param fieldNamingStrategy the {@link FieldNamingStrategy} to be used to determine the field name if no manual * mapping is applied. */ - public void setFieldNamingStrategy(FieldNamingStrategy fieldNamingStrategy) { + public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStrategy) { this.fieldNamingStrategy = fieldNamingStrategy == null ? DEFAULT_NAMING_STRATEGY : fieldNamingStrategy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.context.AbstractMappingContext#shouldCreatePersistentEntityFor(org.springframework.data.util.TypeInformation) - */ @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { + + if (NullableWrapperConverters.supports(type.getType())) { + return false; + } + return !MongoSimpleTypes.HOLDER.isSimpleType(type.getType()) && !AbstractMap.class.isAssignableFrom(type.getType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.AbstractMappingContext#createPersistentProperty(java.lang.reflect.Field, java.beans.PropertyDescriptor, org.springframework.data.mapping.MutablePersistentEntity, org.springframework.data.mapping.SimpleTypeHolder) - */ @Override - public MongoPersistentProperty createPersistentProperty(Field field, PropertyDescriptor descriptor, - BasicMongoPersistentEntity owner, SimpleTypeHolder simpleTypeHolder) { - return new CachingMongoPersistentProperty(field, descriptor, owner, simpleTypeHolder, fieldNamingStrategy); + public MongoPersistentProperty createPersistentProperty(Property property, MongoPersistentEntity owner, + SimpleTypeHolder simpleTypeHolder) { + + CachingMongoPersistentProperty cachingMongoPersistentProperty = new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy); + cachingMongoPersistentProperty.validate(); + return cachingMongoPersistentProperty; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.BasicMappingContext#createPersistentEntity(org.springframework.data.util.TypeInformation, org.springframework.data.mapping.model.MappingContext) - */ @Override protected BasicMongoPersistentEntity createPersistentEntity(TypeInformation typeInformation) { + return new BasicMongoPersistentEntity<>(typeInformation); + } - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity(typeInformation); + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - if (context != null) { - entity.setApplicationContext(context); - } + this.applicationContext = applicationContext; + super.setApplicationContext(applicationContext); + } - return entity; + /** + * Returns whether auto-index creation is enabled or disabled.
                    + * NOTE: Index creation should happen at a well-defined time that is ideally controlled by the + * application itself. + * + * @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise.
                    + * INFO: As of 3.x the default will is set to {@literal false} was {@literal true} in 2.x. + * @since 2.2 + * @see org.springframework.data.mongodb.core.index.Indexed + */ + public boolean isAutoIndexCreation() { + return autoIndexCreation; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + /** + * Enables/disables auto-index creation.
                    + * NOTE:Index creation should happen at a well-defined time that is ideally controlled by the + * application itself. + * + * @param autoCreateIndexes set to {@literal true} to enable auto-index creation. + * @since 2.2 + * @see org.springframework.data.mongodb.core.index.Indexed */ + public void setAutoIndexCreation(boolean autoCreateIndexes) { + this.autoIndexCreation = autoCreateIndexes; + } + + @Nullable @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.context = applicationContext; + public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty persistentProperty) { + + MongoPersistentEntity entity = super.getPersistentEntity(persistentProperty); + + if (entity == null || !persistentProperty.isUnwrapped()) { + return entity; + } + + return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty)); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java index 51fa695263..e02bd00c8d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,100 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.model.MutablePersistentEntity; +import org.springframework.lang.Nullable; /** * MongoDB specific {@link PersistentEntity} abstraction. - * + * * @author Oliver Gierke * @author Christoph Strobl */ -public interface MongoPersistentEntity extends PersistentEntity { +public interface MongoPersistentEntity extends MutablePersistentEntity { /** * Returns the collection the entity shall be persisted to. - * + * * @return */ String getCollection(); /** * Returns the default language to be used for this entity. - * - * @since 1.6 + * * @return + * @since 1.6 */ String getLanguage(); /** * Returns the property holding text score value. - * - * @since 1.6 - * @see #hasTextScoreProperty() + * * @return {@literal null} if not present. + * @see #hasTextScoreProperty() + * @since 1.6 */ + @Nullable MongoPersistentProperty getTextScoreProperty(); /** * Returns whether the entity has a {@link TextScore} property. - * - * @since 1.6 + * * @return true if property annotated with {@link TextScore} is present. + * @since 1.6 */ boolean hasTextScoreProperty(); + /** + * Returns the collation of the entity evaluating a potential SpEL expression within the current context. + * + * @return {@literal null} if not set. + * @since 2.2 + */ + @Nullable + org.springframework.data.mongodb.core.query.Collation getCollation(); + + /** + * @return {@literal true} if the entity is annotated with + * {@link org.springframework.data.mongodb.core.query.Collation}. + * @since 2.2 + */ + default boolean hasCollation() { + return getCollation() != null; + } + + /** + * Get the entities shard key if defined. + * + * @return {@link ShardKey#none()} if not not set. + * @since 3.0 + */ + ShardKey getShardKey(); + + /** + * @return {@literal true} if the {@link #getShardKey() shard key} is sharded. + * @since 3.0 + */ + default boolean isSharded() { + return getShardKey().isSharded(); + } + + /** + * @return {@literal true} if the entity should be unwrapped. + * @since 3.2 + */ + default boolean isUnwrapped() { + return false; + } + + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + @Nullable + Collection getEncryptionKeyIds(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index dc18f162f2..e75ac015aa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,47 +15,86 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentProperty; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; /** * MongoDB specific {@link org.springframework.data.mapping.PersistentProperty} extension. - * + * * @author Oliver Gierke * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava */ public interface MongoPersistentProperty extends PersistentProperty { /** * Returns the name of the field a property is persisted to. - * + * * @return */ String getFieldName(); + /** + * Returns whether the property uses an annotated field name through {@link Field}. + * + * @return + */ + boolean hasExplicitFieldName(); + + /** + * Returns the {@link Class Java FieldType} of the field a property is persisted to. + * + * @return + * @since 2.2 + * @see FieldType + */ + Class getFieldType(); + /** * Returns the order of the field if defined. Will return -1 if undefined. - * + * * @return */ int getFieldOrder(); + /** + * Returns whether the property should be written to the database if its value is {@literal null}. + * + * @return + * @since 3.3 + * @see Field.Write + */ + boolean writeNullValues(); + /** * Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect * {@link #getDBRef()} to return an non-{@literal null} value. - * + * * @return */ boolean isDbReference(); + /** + * Returns whether the property is a {@link DocumentReference}. If this returns {@literal true} you can expect + * {@link #getDocumentReference()} to return an non-{@literal null} value. + * + * @return + * @since 3.3 + */ + boolean isDocumentReference(); + /** * Returns whether the property is explicitly marked as an identifier property of the owning {@link PersistentEntity}. * A property is an explicit id property if it is annotated with @see {@link Id}. - * + * * @return */ boolean isExplicitIdProperty(); @@ -63,7 +102,7 @@ public interface MongoPersistentProperty extends PersistentProperty * It's marked with {@link TextScore}. - * + * * @return * @since 1.6 */ @@ -88,35 +127,78 @@ public interface MongoPersistentProperty extends PersistentProperty getEncryptionKeyIds(); + + /** + * @return the {@link MongoField} representing the raw field to read/write in a MongoDB document. + * @since 4.2 + */ + MongoField getMongoField(); + /** * Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name. - * + * * @author Oliver Gierke */ - public enum PropertyToFieldNameConverter implements Converter { + enum PropertyToFieldNameConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ + @NonNull + @Override public String convert(MongoPersistentProperty source) { - return source.getFieldName(); + if (!source.isUnwrapped()) { + return source.getFieldName(); + } + return ""; } } - - /** - * Returns whether property access shall be used for reading the property value. This means it will use the getter - * instead of field access. - * - * @return - */ - boolean usePropertyAccess(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java index c26f125164..3b3a520bc3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,50 +16,67 @@ package org.springframework.data.mongodb.core.mapping; import java.math.BigInteger; -import java.util.Collections; -import java.util.HashSet; +import java.time.Instant; import java.util.Set; import java.util.UUID; import java.util.regex.Pattern; +import org.bson.*; import org.bson.types.Binary; +import org.bson.types.Code; import org.bson.types.CodeWScope; +import org.bson.types.CodeWithScope; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; +import org.bson.types.Symbol; import org.springframework.data.mapping.model.SimpleTypeHolder; -import com.mongodb.DBObject; import com.mongodb.DBRef; +import com.mongodb.client.model.geojson.Geometry; +import com.mongodb.client.model.geojson.GeometryCollection; +import com.mongodb.client.model.geojson.LineString; +import com.mongodb.client.model.geojson.MultiLineString; +import com.mongodb.client.model.geojson.MultiPoint; +import com.mongodb.client.model.geojson.MultiPolygon; +import com.mongodb.client.model.geojson.Point; +import com.mongodb.client.model.geojson.Polygon; /** * Simple constant holder for a {@link SimpleTypeHolder} enriched with Mongo specific simple types. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ public abstract class MongoSimpleTypes { - public static final Set> AUTOGENERATED_ID_TYPES; + public static final Set> AUTOGENERATED_ID_TYPES = Set.of(ObjectId.class, String.class, BigInteger.class); + private static final Set> MONGO_SIMPLE_TYPES = Set.of(Binary.class, DBRef.class, Decimal128.class, + org.bson.Document.class, Code.class, CodeWScope.class, CodeWithScope.class, ObjectId.class, Pattern.class, + Symbol.class, UUID.class, Instant.class, BinaryVector.class, BsonValue.class, BsonNumber.class, BsonType.class, + BsonArray.class, BsonSymbol.class, BsonUndefined.class, BsonMinKey.class, BsonMaxKey.class, BsonNull.class, + BsonBinary.class, BsonBoolean.class, BsonDateTime.class, BsonDbPointer.class, BsonDecimal128.class, + BsonDocument.class, BsonDouble.class, BsonInt32.class, BsonInt64.class, BsonJavaScript.class, + BsonJavaScriptWithScope.class, BsonObjectId.class, BsonRegularExpression.class, BsonString.class, + BsonTimestamp.class, Geometry.class, GeometryCollection.class, LineString.class, MultiLineString.class, + MultiPoint.class, MultiPolygon.class, Point.class, Polygon.class); + + public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true) { + + @Override + public boolean isSimpleType(Class type) { - static { - Set> classes = new HashSet>(); - classes.add(ObjectId.class); - classes.add(String.class); - classes.add(BigInteger.class); - AUTOGENERATED_ID_TYPES = Collections.unmodifiableSet(classes); + if (type.isEnum()) { + return true; + } - Set> simpleTypes = new HashSet>(); - simpleTypes.add(DBRef.class); - simpleTypes.add(ObjectId.class); - simpleTypes.add(CodeWScope.class); - simpleTypes.add(DBObject.class); - simpleTypes.add(Pattern.class); - simpleTypes.add(Binary.class); - simpleTypes.add(UUID.class); - MONGO_SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes); - } + if (type.getName().startsWith("java.time")) { + return false; + } - private static final Set> MONGO_SIMPLE_TYPES; - public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true); + return super.isSimpleType(type); + } + }; - private MongoSimpleTypes() { - } + private MongoSimpleTypes() {} } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java new file mode 100644 index 0000000000..3b2e0a45f1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java @@ -0,0 +1,154 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.bson.BinaryVector; +import org.bson.Float32BinaryVector; +import org.bson.Int8BinaryVector; +import org.bson.PackedBitBinaryVector; + +import org.springframework.data.domain.Vector; +import org.springframework.util.ObjectUtils; + +/** + * MongoDB-specific extension to {@link Vector} based on Mongo's {@link BinaryVector}. Note that only float32 and int8 + * variants can be represented as floating-point numbers. int1 returns an all-zero array for {@link #toFloatArray()} and + * {@link #toDoubleArray()}. + * + * @author Mark Paluch + * @since 4.5 + */ +public class MongoVector implements Vector { + + private final BinaryVector v; + + MongoVector(BinaryVector v) { + this.v = v; + } + + /** + * Creates a new {@link MongoVector} from the given {@link BinaryVector}. + * + * @param v binary vector representation. + * @return the {@link MongoVector} for the given vector values. + */ + public static MongoVector of(BinaryVector v) { + return new MongoVector(v); + } + + @Override + public Class getType() { + + if (v instanceof Float32BinaryVector) { + return Float.class; + } + + if (v instanceof Int8BinaryVector) { + return Byte.class; + } + + if (v instanceof PackedBitBinaryVector) { + return Byte.class; + } + + return Number.class; + } + + @Override + public BinaryVector getSource() { + return v; + } + + @Override + public int size() { + + if (v instanceof Float32BinaryVector f) { + return f.getData().length; + } + + if (v instanceof Int8BinaryVector i) { + return i.getData().length; + } + + if (v instanceof PackedBitBinaryVector p) { + return p.getData().length; + } + + return 0; + } + + @Override + public float[] toFloatArray() { + + if (v instanceof Float32BinaryVector f) { + + float[] result = new float[f.getData().length]; + System.arraycopy(f.getData(), 0, result, 0, result.length); + return result; + } + + if (v instanceof Int8BinaryVector i) { + + float[] result = new float[i.getData().length]; + System.arraycopy(i.getData(), 0, result, 0, result.length); + return result; + } + + return new float[size()]; + } + + @Override + public double[] toDoubleArray() { + + if (v instanceof Float32BinaryVector f) { + + float[] data = f.getData(); + double[] result = new double[data.length]; + for (int i = 0; i < data.length; i++) { + result[i] = data[i]; + } + + return result; + } + + if (v instanceof Int8BinaryVector i) { + + double[] result = new double[i.getData().length]; + System.arraycopy(i.getData(), 0, result, 0, result.length); + return result; + } + + return new double[size()]; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof MongoVector that)) { + return false; + } + return ObjectUtils.nullSafeEquals(v, that.v); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(v); + } + + @Override + public String toString() { + return "MV[" + v + "]"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java new file mode 100644 index 0000000000..d78494d23b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.function.Predicate; + +import org.springframework.data.util.Predicates; +import org.springframework.lang.Nullable; + +/** + * Utility to translate a {@link MongoPersistentProperty} into a corresponding property from a different + * {@link MongoPersistentEntity} by looking it up by name. + *

                    + * Mainly used within the framework. + * + * @author Mark Paluch + * @since 3.4 + */ +public class PersistentPropertyTranslator { + + /** + * Translate a {@link MongoPersistentProperty} into a corresponding property from a different + * {@link MongoPersistentEntity}. + * + * @param property must not be {@literal null}. + * @return the translated property. Can be the original {@code property}. + */ + public MongoPersistentProperty translate(MongoPersistentProperty property) { + return property; + } + + /** + * Create a new {@link PersistentPropertyTranslator}. + * + * @param targetEntity must not be {@literal null}. + * @return the property translator to use. + */ + public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity targetEntity) { + return create(targetEntity, Predicates.isTrue()); + } + + /** + * Create a new {@link PersistentPropertyTranslator} accepting a {@link Predicate filter predicate} whether the + * translation should happen at all. + * + * @param targetEntity must not be {@literal null}. + * @param translationFilter must not be {@literal null}. + * @return the property translator to use. + */ + public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity targetEntity, + Predicate translationFilter) { + return targetEntity != null ? new EntityPropertyTranslator(targetEntity, translationFilter) + : new PersistentPropertyTranslator(); + } + + private static class EntityPropertyTranslator extends PersistentPropertyTranslator { + + private final MongoPersistentEntity targetEntity; + private final Predicate translationFilter; + + EntityPropertyTranslator(MongoPersistentEntity targetEntity, + Predicate translationFilter) { + this.targetEntity = targetEntity; + this.translationFilter = translationFilter; + } + + @Override + public MongoPersistentProperty translate(MongoPersistentProperty property) { + + if (!translationFilter.test(property)) { + return property; + } + + MongoPersistentProperty targetProperty = targetEntity.getPersistentProperty(property.getName()); + return targetProperty != null ? targetProperty : property; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java new file mode 100644 index 0000000000..a0c67f7187 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Christoph Strobl + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) +public @interface Queryable { + + /** + * @return empty {@link String} if not set. + */ + String queryType() default ""; + + /** + * @return empty {@link String} if not set. + */ + String queryAttributes() default ""; + + /** + * Set the contention factor + * + * @return the contention factor + */ + long contentionFactor() default -1; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java new file mode 100644 index 0000000000..8b2eccb6ca --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java @@ -0,0 +1,57 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Encrypted(algorithm = "Range") +@Queryable(queryType = "range") +public @interface RangeEncrypted { + + /** + * Set the contention factor. + * + * @return the contention factor + */ + @AliasFor(annotation = Queryable.class, value = "contentionFactor") + long contentionFactor() default -1; + + /** + * Set the {@literal range} options. + *

                    + * Should be valid extended {@link org.bson.Document#parse(String) JSON} representing the range options and including + * the following values: {@code min}, {@code max}, {@code trimFactor} and {@code sparsity}. + *

                    + * Please note that values are data type sensitive and may require proper identification via eg. {@code $numberLong}. + * + * @return the {@link org.bson.Document#parse(String) JSON} representation of range options. + */ + @AliasFor(annotation = Queryable.class, value = "queryAttributes") + String rangeOptions() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java new file mode 100644 index 0000000000..28a114a918 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java @@ -0,0 +1,148 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Value object representing an entities Shard + * Key used to distribute documents across a sharded MongoDB cluster. + *
                    + * {@link ShardKey#isImmutable() Immutable} shard keys indicates a fixed value that is not updated (see + * MongoDB + * Reference: Change a Document's Shard Key Value), which allows to skip server round trips in cases where a + * potential shard key change might have occurred. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class ShardKey { + + private static final ShardKey NONE = new ShardKey(Collections.emptyList(), null, true); + + private final List propertyNames; + private final @Nullable ShardingStrategy shardingStrategy; + private final boolean immutable; + + private ShardKey(List propertyNames, @Nullable ShardingStrategy shardingStrategy, boolean immutable) { + + this.propertyNames = propertyNames; + this.shardingStrategy = shardingStrategy; + this.immutable = immutable; + } + + /** + * @return the number of properties used to form the shard key. + */ + public int size() { + return propertyNames.size(); + } + + /** + * @return the unmodifiable collection of property names forming the shard key. + */ + public Collection getPropertyNames() { + return propertyNames; + } + + /** + * @return {@literal true} if the shard key of an document does not change. + * @see MongoDB + * Reference: Change a Document's Shard Key Value + */ + public boolean isImmutable() { + return immutable; + } + + /** + * Return whether the shard key represents a sharded key. Return {@literal false} if the key is not sharded. + * + * @return {@literal true} if the key is sharded; {@literal false} otherwise. + */ + public boolean isSharded() { + return !propertyNames.isEmpty(); + } + + /** + * Get the raw MongoDB representation of the {@link ShardKey}. + * + * @return never {@literal null}. + */ + public Document getDocument() { + + Document doc = new Document(); + for (String field : propertyNames) { + doc.append(field, shardingValue()); + } + return doc; + } + + private Object shardingValue() { + return ObjectUtils.nullSafeEquals(ShardingStrategy.HASH, shardingStrategy) ? "hash" : 1; + } + + /** + * {@link ShardKey} indicating no shard key has been defined. + * + * @return {@link #NONE} + */ + public static ShardKey none() { + return NONE; + } + + /** + * Create a new {@link ShardingStrategy#RANGE} shard key. + * + * @param propertyNames must not be {@literal null}. + * @return new instance of {@link ShardKey}. + */ + public static ShardKey range(String... propertyNames) { + return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.RANGE, false); + } + + /** + * Create a new {@link ShardingStrategy#RANGE} shard key. + * + * @param propertyNames must not be {@literal null}. + * @return new instance of {@link ShardKey}. + */ + public static ShardKey hash(String... propertyNames) { + return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.HASH, false); + } + + /** + * Turn the given {@link ShardKey} into an {@link #isImmutable() immutable} one. + * + * @param shardKey must not be {@literal null}. + * @return new instance of {@link ShardKey} if the given shard key is not already immutable. + */ + public static ShardKey immutable(ShardKey shardKey) { + + if (shardKey.isImmutable()) { + return shardKey; + } + + return new ShardKey(shardKey.propertyNames, shardKey.shardingStrategy, true); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java new file mode 100644 index 0000000000..da537f7948 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java @@ -0,0 +1,95 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.Persistent; + +/** + * The {@link Sharded} annotation provides meta information about the actual distribution of data. The + * {@link #shardKey()} is used to distribute documents across shards.
                    + * Please see the MongoDB Documentation for more information + * about requirements and limitations of sharding. + *
                    + * Spring Data adds the shard key to filter queries used for + * {@link com.mongodb.client.MongoCollection#replaceOne(org.bson.conversions.Bson, Object)} operations triggered by + * {@code save} operations on {@link org.springframework.data.mongodb.core.MongoOperations} and + * {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} as well as {@code update/upsert} operations + * replacing/upserting a single existing document as long as the given + * {@link org.springframework.data.mongodb.core.query.UpdateDefinition} holds a full copy of the entity. + *
                    + * All other operations that require the presence of the {@literal shard key} in the filter query need to provide the + * information via the {@link org.springframework.data.mongodb.core.query.Query} parameter when invoking the method. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +@Persistent +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.ANNOTATION_TYPE }) +public @interface Sharded { + + /** + * Alias for {@link #shardKey()}. + * + * @return {@literal _id} by default. + * @see #shardKey() + */ + @AliasFor("shardKey") + String[] value() default {}; + + /** + * The shard key determines the distribution of the collection's documents among the cluster's shards. The shard key + * is either a single or multiple indexed properties that exist in every document in the collection. + *
                    + * By default the {@literal id} property is used for sharding.
                    + * NOTE: Required indexes are not created automatically. Create these either externally, via + * {@link org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)} + * or by annotating your domain model with {@link org.springframework.data.mongodb.core.index.Indexed}/ + * {@link org.springframework.data.mongodb.core.index.CompoundIndex} along with enabled + * {@link org.springframework.data.mongodb.config.MongoConfigurationSupport#autoIndexCreation() auto index creation}. + * + * @return an empty key by default. Which indicates to use the entities {@literal id} property. + */ + @AliasFor("value") + String[] shardKey() default {}; + + /** + * The sharding strategy to use for distributing data across sharded clusters. + * + * @return {@link ShardingStrategy#RANGE} by default + */ + ShardingStrategy shardingStrategy() default ShardingStrategy.RANGE; + + /** + * As of MongoDB 4.2 it is possible to change the shard key using update. Using immutable shard keys avoids server + * round trips to obtain an entities actual shard key from the database. + * + * @return {@literal false} by default. + * @see MongoDB + * Reference: Change a Document's Shard Key Value + */ + boolean immutableKey() default false; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java new file mode 100644 index 0000000000..6fefbf6913 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java @@ -0,0 +1,35 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +/** + * @author Christoph Strobl + * @since 3.0 + */ +public enum ShardingStrategy { + + /** + * Ranged sharding involves dividing data into ranges based on the shard key values. Each chunk is then assigned a + * range based on the shard key values. + */ + RANGE, + + /** + * Hashed Sharding involves computing a hash of the shard key field’s value. Each chunk is then assigned a range based + * on the hashed shard key values. + */ + HASH +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java index 4dafd56442..349cc191f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,9 +26,11 @@ /** * {@link TextScore} marks the property to be considered as the on server calculated {@literal textScore} when doing * full text search.
                    - * NOTE Property will not be written when saving entity. - * + * NOTE Property will not be written when saving entity and may be {@literal null} if the document is retrieved + * by a regular (i.e. {@literal $text}) query. + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 */ @ReadOnlyProperty diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java new file mode 100644 index 0000000000..efe0cd8703 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -0,0 +1,113 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.timeseries.Granularity; + +/** + * Identifies a domain object to be persisted to a MongoDB Time Series collection. + * + * @author Christoph Strobl + * @author Ben Foster + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@Document +public @interface TimeSeries { + + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection based on a per operation basis. + * + * @return the name of the collection to be used. + * @see Document#collection() + */ + @AliasFor(annotation = Document.class, attribute = "collection") + String collection() default ""; + + /** + * Name of the property which contains the date in each time series document.
                    + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. + * + * @return never {@literal null}. + */ + String timeField(); + + /** + * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor + * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
                    + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. + * + * @return empty {@link String} by default. + */ + String metaField() default ""; + + /** + * Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized. + * + * @return {@link Granularity#DEFAULT server default} by default. + */ + Granularity granularity() default Granularity.DEFAULT; + + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @see Document#collation() + */ + @AliasFor(annotation = Document.class, attribute = "collation") + String collation() default ""; + + /** + * Configure the timeout after which the document should expire. + * Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure: + *

                      + *
                    • d: Days
                    • + *
                    • h: Hours
                    • + *
                    • m: Minutes
                    • + *
                    • s: Seconds
                    • + *
                    • Alternatively: A Spring {@literal template expression}. The expression can result in a + * {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned + * conventions.
                    • + *
                    + * Supports ISO-8601 style. + * + *
                    +	 * @TimeSeries(expireAfter = "10s") String expireAfterTenSeconds;
                    +	 * @TimeSeries(expireAfter = "1d") String expireAfterOneDay;
                    +	 * @TimeSeries(expireAfter = "P2D") String expireAfterTwoDays;
                    +	 * @TimeSeries(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
                    +	 * @TimeSeries(expireAfter = "${my.property.timeout}") String expireAfterTimeoutObtainedFromProperty;
                    +	 * 
                    + * + * @return empty by default. + * @since 4.4 + */ + String expireAfter() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java new file mode 100644 index 0000000000..b3b73397ff --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java @@ -0,0 +1,57 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * @author Christoph Strobl + * @author Rogério Meneguelli Gatto + * @since 3.2 + */ +class UnwrapEntityContext { + + private final MongoPersistentProperty property; + + public UnwrapEntityContext(MongoPersistentProperty property) { + this.property = property; + } + + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public boolean equals(@Nullable Object obj) { + + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrapEntityContext that = (UnwrapEntityContext) obj; + return ObjectUtils.nullSafeEquals(property, that.property); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(property); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java new file mode 100644 index 0000000000..10a0639fb6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java @@ -0,0 +1,136 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import javax.annotation.meta.When; + +import org.springframework.core.annotation.AliasFor; + +/** + * The annotation to configure a value object as flattened out in the target document. + *
                    + * Depending on the {@link OnEmpty value} of {@link #onEmpty()} the property is set to {@literal null} or an empty + * instance in the case all unwrapped values are {@literal null} when reading from the result set. + * + * @author Christoph Strobl + * @since 3.2 + */ +@Documented +@Retention(value = RetentionPolicy.RUNTIME) +@Target(value = { ElementType.ANNOTATION_TYPE, ElementType.FIELD, ElementType.METHOD }) +public @interface Unwrapped { + + /** + * Set the load strategy for the unwrapped object if all contained fields yield {@literal null} values. + *
                    + * {@link Nullable @Unwrapped.Nullable} and {@link Empty @Unwrapped.Empty} offer shortcuts for this. + * + * @return never {@link} null. + */ + OnEmpty onEmpty(); + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + String prefix() default ""; + + /** + * Load strategy to be used {@link Unwrapped#onEmpty()}. + * + * @author Christoph Strobl + */ + enum OnEmpty { + USE_NULL, USE_EMPTY + } + + /** + * Shortcut for a nullable unwrapped property. + * + *
                    +	 * @Unwrapped.Nullable private Address address;
                    +	 * 
                    + * + * as alternative to the more verbose + * + *
                    +	 * @Unwrapped(onEmpty = USE_NULL) @javax.annotation.Nonnull(when = When.MAYBE) private Address address;
                    +	 * 
                    + * + * @author Christoph Strobl + * @see Unwrapped#onEmpty() + */ + @Unwrapped(onEmpty = OnEmpty.USE_NULL) + @Documented + @Retention(RetentionPolicy.RUNTIME) + @Target({ ElementType.FIELD, ElementType.METHOD }) + @javax.annotation.Nonnull(when = When.MAYBE) + @interface Nullable { + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String prefix() default ""; + + /** + * @return value for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String value() default ""; + } + + /** + * Shortcut for an empty unwrapped property. + * + *
                    +	 * @Unwrapped.Empty private Address address;
                    +	 * 
                    + * + * as alternative to the more verbose + * + *
                    +	 * @Unwrapped(onEmpty = USE_EMPTY) @javax.annotation.Nonnull(when = When.NEVER) private Address address;
                    +	 * 
                    + * + * @author Christoph Strobl + * @see Unwrapped#onEmpty() + */ + @Unwrapped(onEmpty = OnEmpty.USE_EMPTY) + @Documented + @Retention(RetentionPolicy.RUNTIME) + @Target({ ElementType.FIELD, ElementType.METHOD }) + @javax.annotation.Nonnull(when = When.NEVER) + @interface Empty { + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String prefix() default ""; + + /** + * @return value for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String value() default ""; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java new file mode 100644 index 0000000000..fed08815b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java @@ -0,0 +1,344 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Annotation; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Spliterator; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.springframework.core.env.Environment; +import org.springframework.data.mapping.*; +import org.springframework.data.mapping.model.PersistentPropertyAccessorFactory; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.util.Streamable; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Unwrapped variant of {@link MongoPersistentEntity}. + * + * @author Christoph Strobl + * @since 3.2 + * @see Unwrapped + */ +class UnwrappedMongoPersistentEntity implements MongoPersistentEntity { + + private final UnwrapEntityContext context; + private final MongoPersistentEntity delegate; + + public UnwrappedMongoPersistentEntity(MongoPersistentEntity delegate, UnwrapEntityContext context) { + + this.context = context; + this.delegate = delegate; + } + + @Override + public String getCollection() { + return delegate.getCollection(); + } + + @Override + public String getLanguage() { + return delegate.getLanguage(); + } + + @Override + @Nullable + public MongoPersistentProperty getTextScoreProperty() { + return delegate.getTextScoreProperty(); + } + + @Override + public boolean hasTextScoreProperty() { + return delegate.hasTextScoreProperty(); + } + + @Override + @Nullable + public Collation getCollation() { + return delegate.getCollation(); + } + + @Override + public boolean hasCollation() { + return delegate.hasCollation(); + } + + @Override + public ShardKey getShardKey() { + return delegate.getShardKey(); + } + + @Override + public boolean isSharded() { + return delegate.isSharded(); + } + + @Override + public String getName() { + return delegate.getName(); + } + + @Override + @Nullable + @Deprecated + public PreferredConstructor getPersistenceConstructor() { + return delegate.getPersistenceConstructor(); + } + + @Override + public InstanceCreatorMetadata getInstanceCreatorMetadata() { + return delegate.getInstanceCreatorMetadata(); + } + + @Override + public boolean isCreatorArgument(PersistentProperty property) { + return delegate.isCreatorArgument(property); + } + + @Override + public boolean isIdProperty(PersistentProperty property) { + return delegate.isIdProperty(property); + } + + @Override + public boolean isVersionProperty(PersistentProperty property) { + return delegate.isVersionProperty(property); + } + + @Override + @Nullable + public MongoPersistentProperty getIdProperty() { + return delegate.getIdProperty(); + } + + @Override + public MongoPersistentProperty getRequiredIdProperty() { + return delegate.getRequiredIdProperty(); + } + + @Override + @Nullable + public MongoPersistentProperty getVersionProperty() { + return delegate.getVersionProperty(); + } + + @Override + public MongoPersistentProperty getRequiredVersionProperty() { + return delegate.getRequiredVersionProperty(); + } + + @Override + @Nullable + public MongoPersistentProperty getPersistentProperty(String name) { + return wrap(delegate.getPersistentProperty(name)); + } + + @Override + public MongoPersistentProperty getRequiredPersistentProperty(String name) { + + MongoPersistentProperty persistentProperty = getPersistentProperty(name); + if (persistentProperty != null) { + return persistentProperty; + } + + throw new IllegalStateException(String.format("Required property %s not found for %s", name, getType())); + } + + @Override + @Nullable + public MongoPersistentProperty getPersistentProperty(Class annotationType) { + return wrap(delegate.getPersistentProperty(annotationType)); + } + + @Override + public Iterable getPersistentProperties(Class annotationType) { + return Streamable.of(delegate.getPersistentProperties(annotationType)).stream().map(this::wrap) + .collect(Collectors.toList()); + } + + @Override + public boolean hasIdProperty() { + return delegate.hasIdProperty(); + } + + @Override + public boolean hasVersionProperty() { + return delegate.hasVersionProperty(); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public Alias getTypeAlias() { + return delegate.getTypeAlias(); + } + + @Override + public TypeInformation getTypeInformation() { + return delegate.getTypeInformation(); + } + + @Override + public void doWithProperties(PropertyHandler handler) { + + delegate.doWithProperties((PropertyHandler) property -> { + handler.doWithPersistentProperty(wrap(property)); + }); + } + + @Override + public void doWithProperties(SimplePropertyHandler handler) { + + delegate.doWithProperties((SimplePropertyHandler) property -> { + if (property instanceof MongoPersistentProperty mongoPersistentProperty) { + handler.doWithPersistentProperty(wrap(mongoPersistentProperty)); + } else { + handler.doWithPersistentProperty(property); + } + }); + } + + @Override + public void doWithAssociations(AssociationHandler handler) { + delegate.doWithAssociations(handler); + } + + @Override + public void doWithAssociations(SimpleAssociationHandler handler) { + delegate.doWithAssociations(handler); + } + + @Override + @Nullable + public A findAnnotation(Class annotationType) { + return delegate.findAnnotation(annotationType); + } + + @Override + public A getRequiredAnnotation(Class annotationType) throws IllegalStateException { + return delegate.getRequiredAnnotation(annotationType); + } + + @Override + public boolean isAnnotationPresent(Class annotationType) { + return delegate.isAnnotationPresent(annotationType); + } + + @Override + public PersistentPropertyAccessor getPropertyAccessor(B bean) { + return delegate.getPropertyAccessor(bean); + } + + @Override + public PersistentPropertyPathAccessor getPropertyPathAccessor(B bean) { + return delegate.getPropertyPathAccessor(bean); + } + + @Override + public IdentifierAccessor getIdentifierAccessor(Object bean) { + return delegate.getIdentifierAccessor(bean); + } + + @Override + public boolean isNew(Object bean) { + return delegate.isNew(bean); + } + + @Override + public boolean isImmutable() { + return delegate.isImmutable(); + } + + @Override + public boolean requiresPropertyPopulation() { + return delegate.requiresPropertyPopulation(); + } + + @Override + public Iterator iterator() { + + List target = new ArrayList<>(); + delegate.iterator().forEachRemaining(it -> target.add(wrap(it))); + return target.iterator(); + } + + @Override + public void forEach(Consumer action) { + delegate.forEach(it -> action.accept(wrap(it))); + } + + @Override + public Spliterator spliterator() { + return delegate.spliterator(); + } + + private MongoPersistentProperty wrap(MongoPersistentProperty source) { + if (source == null) { + return source; + } + return new UnwrappedMongoPersistentProperty(source, context); + } + + @Override + public void addPersistentProperty(MongoPersistentProperty property) { + + } + + @Override + public void addAssociation(Association association) { + + } + + @Override + public void verify() throws MappingException { + + } + + @Override + public void setPersistentPropertyAccessorFactory(PersistentPropertyAccessorFactory factory) { + + } + + @Override + public void setEvaluationContextProvider(EvaluationContextProvider provider) { + + } + + @Override + public void setEnvironment(Environment environment) { + + } + + @Override + public boolean isUnwrapped() { + return context.getProperty().isUnwrapped(); + } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java new file mode 100644 index 0000000000..1d4877478f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -0,0 +1,387 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.Collection; + +import org.springframework.data.mapping.Association; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Unwrapped variant of {@link MongoPersistentProperty}. + * + * @author Christoph Strobl + * @author Rogério Meneguelli Gatto + * @since 3.2 + * @see Unwrapped + */ +class UnwrappedMongoPersistentProperty implements MongoPersistentProperty { + + private final MongoPersistentProperty delegate; + private final UnwrapEntityContext context; + + public UnwrappedMongoPersistentProperty(MongoPersistentProperty delegate, UnwrapEntityContext context) { + + this.delegate = delegate; + this.context = context; + } + + @Override + public String getFieldName() { + + if (!context.getProperty().isUnwrapped()) { + return delegate.getFieldName(); + } + + return context.getProperty().findAnnotation(Unwrapped.class).prefix() + delegate.getFieldName(); + } + + @Override + public boolean hasExplicitFieldName() { + return delegate.hasExplicitFieldName() + || !ObjectUtils.isEmpty(context.getProperty().findAnnotation(Unwrapped.class).prefix()); + } + + @Override + public Class getFieldType() { + return delegate.getFieldType(); + } + + @Override + public int getFieldOrder() { + return delegate.getFieldOrder(); + } + + @Override + public boolean writeNullValues() { + return delegate.writeNullValues(); + } + + @Override + public boolean isDbReference() { + return delegate.isDbReference(); + } + + @Override + public boolean isDocumentReference() { + return delegate.isDocumentReference(); + } + + @Override + public boolean isExplicitIdProperty() { + return delegate.isExplicitIdProperty(); + } + + @Override + public boolean isLanguageProperty() { + return delegate.isLanguageProperty(); + } + + @Override + public boolean isExplicitLanguageProperty() { + return delegate.isExplicitLanguageProperty(); + } + + @Override + public boolean isTextScoreProperty() { + return delegate.isTextScoreProperty(); + } + + @Override + @Nullable + public DBRef getDBRef() { + return delegate.getDBRef(); + } + + @Override + @Nullable + public DocumentReference getDocumentReference() { + return delegate.getDocumentReference(); + } + + @Override + public boolean usePropertyAccess() { + return delegate.usePropertyAccess(); + } + + @Override + public boolean hasExplicitWriteTarget() { + return delegate.hasExplicitWriteTarget(); + } + + @Override + public PersistentEntity getOwner() { + return delegate.getOwner(); + } + + @Override + public String getName() { + return delegate.getName(); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public MongoField getMongoField() { + + if (!context.getProperty().isUnwrapped()) { + return delegate.getMongoField(); + } + + return delegate.getMongoField().withPrefix(context.getProperty().findAnnotation(Unwrapped.class).prefix()); + } + + @Override + public TypeInformation getTypeInformation() { + return delegate.getTypeInformation(); + } + + @Override + public Iterable> getPersistentEntityTypeInformation() { + return delegate.getPersistentEntityTypeInformation(); + } + + @Override + @Nullable + public Method getGetter() { + return delegate.getGetter(); + } + + @Override + public Method getRequiredGetter() { + return delegate.getRequiredGetter(); + } + + @Override + @Nullable + public Method getSetter() { + return delegate.getSetter(); + } + + @Override + public Method getRequiredSetter() { + return delegate.getRequiredSetter(); + } + + @Override + @Nullable + public Method getWither() { + return delegate.getWither(); + } + + @Override + public Method getRequiredWither() { + return delegate.getRequiredWither(); + } + + @Override + @Nullable + public Field getField() { + return delegate.getField(); + } + + @Override + public Field getRequiredField() { + return delegate.getRequiredField(); + } + + @Override + @Nullable + public String getSpelExpression() { + return delegate.getSpelExpression(); + } + + @Override + @Nullable + public Association getAssociation() { + return delegate.getAssociation(); + } + + @Override + public Association getRequiredAssociation() { + return delegate.getRequiredAssociation(); + } + + @Override + public boolean isEntity() { + return delegate.isEntity(); + } + + @Override + public boolean isIdProperty() { + return delegate.isIdProperty(); + } + + @Override + public boolean isVersionProperty() { + return delegate.isVersionProperty(); + } + + @Override + public boolean isCollectionLike() { + return delegate.isCollectionLike(); + } + + @Override + public boolean isMap() { + return delegate.isMap(); + } + + @Override + public boolean isArray() { + return delegate.isArray(); + } + + @Override + public boolean isTransient() { + return delegate.isTransient(); + } + + @Override + public boolean isWritable() { + return delegate.isWritable(); + } + + @Override + public boolean isReadable() { + return delegate.isReadable(); + } + + @Override + public boolean isImmutable() { + return delegate.isImmutable(); + } + + @Override + public boolean isAssociation() { + return delegate.isAssociation(); + } + + @Override + public boolean isUnwrapped() { + return delegate.isUnwrapped(); + } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } + + @Override + @Nullable + public Class getComponentType() { + return delegate.getComponentType(); + } + + @Override + public Class getRawType() { + return delegate.getRawType(); + } + + @Override + @Nullable + public Class getMapValueType() { + return delegate.getMapValueType(); + } + + @Override + public Class getActualType() { + return delegate.getActualType(); + } + + @Override + @Nullable + public A findAnnotation(Class annotationType) { + return delegate.findAnnotation(annotationType); + } + + @Override + public A getRequiredAnnotation(Class annotationType) throws IllegalStateException { + return delegate.getRequiredAnnotation(annotationType); + } + + @Override + @Nullable + public A findPropertyOrOwnerAnnotation(Class annotationType) { + return delegate.findPropertyOrOwnerAnnotation(annotationType); + } + + @Override + public boolean isAnnotationPresent(Class annotationType) { + return delegate.isAnnotationPresent(annotationType); + } + + @Override + public boolean hasActualTypeAnnotation(Class annotationType) { + return delegate.hasActualTypeAnnotation(annotationType); + } + + @Override + @Nullable + public Class getAssociationTargetType() { + return delegate.getAssociationTargetType(); + } + + @Override + public TypeInformation getAssociationTargetTypeInformation() { + return delegate.getAssociationTargetTypeInformation(); + } + + @Override + public PersistentPropertyAccessor getAccessorForOwner(T owner) { + return delegate.getAccessorForOwner(owner); + } + + @Override + public boolean equals(@Nullable Object obj) { + + if (this == obj) { + return true; + } + + if (obj == delegate) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrappedMongoPersistentProperty that = (UnwrappedMongoPersistentProperty) obj; + if (!ObjectUtils.nullSafeEquals(delegate, that.delegate)) { + return false; + } + return ObjectUtils.nullSafeEquals(context, that.context); + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(delegate); + result = 31 * result + ObjectUtils.nullSafeHashCode(context); + return result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java index 19b5228f07..73f4890dec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 by the original author(s). + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,50 +15,41 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.lang.Nullable; /** * Base class for delete events. - * + * * @author Martin Baumgartner * @author Christoph Strobl + * @author Mark Paluch */ -public abstract class AbstractDeleteEvent extends MongoMappingEvent { +public abstract class AbstractDeleteEvent extends MongoMappingEvent { private static final long serialVersionUID = 1L; - private final Class type; - - /** - * Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @deprecated since 1.8. Please use {@link #AbstractDeleteEvent(DBObject, Class, String)}. - */ - @Deprecated - public AbstractDeleteEvent(DBObject dbo, Class type) { - this(dbo, type, null); - } + private final @Nullable Class type; /** - * Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @param collectionName can be {@literal null}. + * Creates a new {@link AbstractDeleteEvent} for the given {@link Document} and type. + * + * @param document must not be {@literal null}. + * @param type may be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public AbstractDeleteEvent(DBObject dbo, Class type, String collectionName) { + public AbstractDeleteEvent(Document document, @Nullable Class type, String collectionName) { - super(dbo, dbo, collectionName); + super(document, document, collectionName); this.type = type; } /** * Returns the type for which the {@link AbstractDeleteEvent} shall be invoked for. - * - * @return + * + * @return can be {@literal null}. */ + @Nullable public Class getType() { return type; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java index 944ec26490..4e1de58c7a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,16 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; import org.springframework.core.GenericTypeResolver; +import org.springframework.data.mongodb.core.query.SerializationUtils; /** * Base class to implement domain class specific {@link ApplicationListener}s. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Martin Baumgartner @@ -30,7 +32,7 @@ */ public abstract class AbstractMongoEventListener implements ApplicationListener> { - private static final Logger LOG = LoggerFactory.getLogger(AbstractMongoEventListener.class); + private static final Log LOG = LogFactory.getLog(AbstractMongoEventListener.class); private final Class domainClass; /** @@ -41,15 +43,11 @@ public AbstractMongoEventListener() { this.domainClass = typeArgument == null ? Object.class : typeArgument; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) - */ @SuppressWarnings({ "rawtypes", "unchecked" }) + @Override public void onApplicationEvent(MongoMappingEvent event) { - if (event instanceof AfterLoadEvent) { - AfterLoadEvent afterLoadEvent = (AfterLoadEvent) event; + if (event instanceof AfterLoadEvent afterLoadEvent) { if (domainClass.isAssignableFrom(afterLoadEvent.getType())) { onAfterLoad((AfterLoadEvent) event); @@ -58,16 +56,16 @@ public void onApplicationEvent(MongoMappingEvent event) { return; } - if (event instanceof AbstractDeleteEvent) { + if (event instanceof AbstractDeleteEvent deleteEvent) { - Class eventDomainType = ((AbstractDeleteEvent) event).getType(); + Class eventDomainType = deleteEvent.getType(); if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) { - if (event instanceof BeforeDeleteEvent) { - onBeforeDelete((BeforeDeleteEvent) event); + if (event instanceof BeforeDeleteEvent beforeDeleteEvent) { + onBeforeDelete(beforeDeleteEvent); } - if (event instanceof AfterDeleteEvent) { - onAfterDelete((AfterDeleteEvent) event); + if (event instanceof AfterDeleteEvent afterDeleteEvent) { + onAfterDelete(afterDeleteEvent); } } @@ -82,105 +80,105 @@ public void onApplicationEvent(MongoMappingEvent event) { return; } - if (event instanceof BeforeConvertEvent) { - onBeforeConvert((BeforeConvertEvent) event); - } else if (event instanceof BeforeSaveEvent) { - onBeforeSave((BeforeSaveEvent) event); - } else if (event instanceof AfterSaveEvent) { - onAfterSave((AfterSaveEvent) event); - } else if (event instanceof AfterConvertEvent) { - onAfterConvert((AfterConvertEvent) event); + if (event instanceof BeforeConvertEvent beforeConvertEvent) { + onBeforeConvert(beforeConvertEvent); + } else if (event instanceof BeforeSaveEvent beforeSaveEvent) { + onBeforeSave(beforeSaveEvent); + } else if (event instanceof AfterSaveEvent afterSaveEvent) { + onAfterSave(afterSaveEvent); + } else if (event instanceof AfterConvertEvent afterConvertEvent) { + onAfterConvert(afterConvertEvent); } } /** * Captures {@link BeforeConvertEvent}. - * + * * @param event never {@literal null}. * @since 1.8 */ public void onBeforeConvert(BeforeConvertEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeConvert({})", event.getSource()); + LOG.debug(String.format("onBeforeConvert(%s)", SerializationUtils.serializeToJsonSafely(event.getSource()))); } } /** * Captures {@link BeforeSaveEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onBeforeSave(BeforeSaveEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeSave({}, {})", event.getSource(), event.getDBObject()); + LOG.debug(String.format("onBeforeSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } /** * Captures {@link AfterSaveEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onAfterSave(AfterSaveEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterSave({}, {})", event.getSource(), event.getDBObject()); + LOG.debug(String.format("onAfterSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } /** * Captures {@link AfterLoadEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onAfterLoad(AfterLoadEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterLoad({})", event.getDBObject()); + LOG.debug(String.format("onAfterLoad(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } /** * Captures {@link AfterConvertEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onAfterConvert(AfterConvertEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterConvert({}, {})", event.getDBObject(), event.getSource()); + LOG.debug(String.format("onAfterConvert(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getDocument()), SerializationUtils.serializeToJsonSafely(event.getSource()))); } } /** * Captures {@link AfterDeleteEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onAfterDelete(AfterDeleteEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterDelete({})", event.getDBObject()); + LOG.debug(String.format("onAfterDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } /** * Capture {@link BeforeDeleteEvent}. - * + * * @param event will never be {@literal null}. * @since 1.8 */ public void onBeforeDelete(BeforeDeleteEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeDelete({})", event.getDBObject()); + LOG.debug(String.format("onBeforeDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java new file mode 100644 index 0000000000..be6d6fb5e4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked after a domain object is materialized from a {@link Document} when reading results. + * + * @author Roman Puchkovskiy + * @author Mark Paluch + * @since 3.0 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface AfterConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the + * same or a modified instance of the domain object. + * + * @param entity the domain object (the result of the conversion). + * @param document must not be {@literal null}. + * @param collection name of the collection. + * @return the domain object that is the result of reading it from the {@link Document}. + */ + T onAfterConvert(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java index 2ac7a55d87..9421e9184a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; /** * {@link MongoMappingEvent} thrown after convert of a document. - * - * @author Jon Brisbin + * + * @author Jon Brisbin * @author Christoph Strobl + * @author Mark Paluch */ public class AfterConvertEvent extends MongoMappingEvent { @@ -29,26 +30,14 @@ public class AfterConvertEvent extends MongoMappingEvent { /** * Creates new {@link AfterConvertEvent}. - * - * @param dbo can be {@literal null}. - * @param source must not be {@literal null}. - * @deprecated since 1.8. Please use {@link #AfterConvertEvent(DBObject, Object, String)}. - */ - @Deprecated - public AfterConvertEvent(DBObject dbo, E source) { - this(dbo, source, null); - } - - /** - * Creates new {@link AfterConvertEvent}. - * - * @param dbo can be {@literal null}. + * + * @param document must not be {@literal null}. * @param source must not be {@literal null}. - * @param collectionName can be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public AfterConvertEvent(DBObject dbo, E source, String collectionName) { - super(source, dbo, collectionName); + public AfterConvertEvent(Document document, E source, String collectionName) { + super(source, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java index ccbcdbd43e..55ccaa5f3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 by the original author(s). + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,30 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.lang.Nullable; /** - * Event being thrown after a single or a set of documents has/have been deleted. The {@link DBObject} held in the event - * will be the query document after it has been mapped onto the domain type handled. - * + * Event being thrown after a single or a set of documents has/have been deleted. The {@link Document} held in the event + * will be the query document after it has been mapped onto the domain type handled. + * * @author Martin Baumgartner * @author Christoph Strobl + * @author Mark Paluch */ public class AfterDeleteEvent extends AbstractDeleteEvent { private static final long serialVersionUID = 1L; /** - * Creates a new {@link AfterDeleteEvent} for the given {@link DBObject} and type. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @deprecated since 1.8. Please use {@link #AfterDeleteEvent(DBObject, Class, String)}. - */ - @Deprecated - public AfterDeleteEvent(DBObject dbo, Class type) { - this(dbo, type, null); - } - - /** - * Creates a new {@link AfterDeleteEvent} for the given {@link DBObject}, type and collectionName. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @param collectionName can be {@literal null}. + * Creates a new {@link AfterDeleteEvent} for the given {@link Document}, type and collectionName. + * + * @param document must not be {@literal null}. + * @param type may be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public AfterDeleteEvent(DBObject dbo, Class type, String collectionName) { - super(dbo, type, collectionName); + public AfterDeleteEvent(Document document, @Nullable Class type, String collectionName) { + super(document, type, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java index 2876b243ee..bd808bfecf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,58 +13,45 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping.event; +import org.bson.Document; import org.springframework.util.Assert; -import com.mongodb.DBObject; - /** - * Event to be triggered after loading {@link DBObject}s to be mapped onto a given type. - * + * Event to be triggered after loading {@link Document}s to be mapped onto a given type. + * * @author Oliver Gierke * @author Jon Brisbin * @author Christoph Leiter * @author Christoph Strobl + * @author Mark Paluch */ -public class AfterLoadEvent extends MongoMappingEvent { +public class AfterLoadEvent extends MongoMappingEvent { private static final long serialVersionUID = 1L; private final Class type; /** - * Creates a new {@link AfterLoadEvent} for the given {@link DBObject} and type. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @deprecated since 1.8. Please use {@link #AfterLoadEvent(DBObject, Class, String)}. - */ - @Deprecated - public AfterLoadEvent(DBObject dbo, Class type) { - this(dbo, type, null); - } - - /** - * Creates a new {@link AfterLoadEvent} for the given {@link DBObject}, type and collectionName. - * - * @param dbo must not be {@literal null}. + * Creates a new {@link AfterLoadEvent} for the given {@link Document}, type and collectionName. + * + * @param document must not be {@literal null}. * @param type must not be {@literal null}. - * @param collectionName can be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public AfterLoadEvent(DBObject dbo, Class type, String collectionName) { + public AfterLoadEvent(Document document, Class type, String collectionName) { - super(dbo, dbo, collectionName); + super(document, document, collectionName); - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); this.type = type; } /** * Returns the type for which the {@link AfterLoadEvent} shall be invoked for. - * - * @return + * + * @return never {@literal null}. */ public Class getType() { return type; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java new file mode 100644 index 0000000000..3489cfce03 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered after save of a {@link Document}. + * + * @author Roman Puchkovskiy + * @since 3.0 + */ +@FunctionalInterface +public interface AfterSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of + * the domain object. + * + * @param entity the domain object that was saved. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return the domain object that was persisted. + */ + T onAfterSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java index 1b892b64c2..a2786ff011 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,43 +13,31 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; /** * {@link MongoMappingEvent} triggered after save of a document. - * - * @author Jon Brisbin + * + * @author Jon Brisbin * @author Christoph Strobl + * @author Mark Paluch */ public class AfterSaveEvent extends MongoMappingEvent { private static final long serialVersionUID = 1L; - /** - * Creates new {@link AfterSaveEvent} - * - * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. - * @deprecated since 1.8. Please use {@link #AfterSaveEvent(Object, DBObject, String)}. - */ - @Deprecated - public AfterSaveEvent(E source, DBObject dbo) { - super(source, dbo); - } - /** * Creates new {@link AfterSaveEvent}. - * + * * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. - * @param collectionName can be {@literal null}. + * @param document must not be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public AfterSaveEvent(E source, DBObject dbo, String collectionName) { - super(source, dbo, collectionName); + public AfterSaveEvent(E source, Document document, String collectionName) { + super(source, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java similarity index 55% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java index 6205d27a74..df5ecc1e92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,49 +16,40 @@ package org.springframework.data.mongodb.core.mapping.event; import org.springframework.beans.factory.ObjectFactory; -import org.springframework.context.ApplicationListener; import org.springframework.core.Ordered; import org.springframework.data.auditing.AuditingHandler; import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallback; import org.springframework.data.mapping.context.MappingContext; import org.springframework.util.Assert; /** - * Event listener to populate auditing related fields on an entity about to be saved. - * - * @author Oliver Gierke - * @author Thomas Darimont + * {@link EntityCallback} to populate auditing related fields on an entity about to be saved. + * + * @author Mark Paluch + * @since 2.2 */ -public class AuditingEventListener implements ApplicationListener>, Ordered { +public class AuditingEntityCallback implements BeforeConvertCallback, Ordered { private final ObjectFactory auditingHandlerFactory; /** - * Creates a new {@link AuditingEventListener} using the given {@link MappingContext} and {@link AuditingHandler} + * Creates a new {@link AuditingEntityCallback} using the given {@link MappingContext} and {@link AuditingHandler} * provided by the given {@link ObjectFactory}. - * + * * @param auditingHandlerFactory must not be {@literal null}. */ - public AuditingEventListener(ObjectFactory auditingHandlerFactory) { + public AuditingEntityCallback(ObjectFactory auditingHandlerFactory) { - Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null!"); + Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null"); this.auditingHandlerFactory = auditingHandlerFactory; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) - */ - public void onApplicationEvent(BeforeConvertEvent event) { - - Object entity = event.getSource(); - auditingHandlerFactory.getObject().markAudited(entity); + @Override + public Object onBeforeConvert(Object entity, String collection) { + return auditingHandlerFactory.getObject().markAudited(entity); } - /* - * (non-Javadoc) - * @see org.springframework.core.Ordered#getOrder() - */ @Override public int getOrder() { return 100; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java new file mode 100644 index 0000000000..91107834f3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java @@ -0,0 +1,72 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validator; + +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.util.Assert; + +/** + * Delegate to handle common calls to Bean {@link Validator Validation}. + * + * @author Mark Paluch + * @since 4.5 + */ +class BeanValidationDelegate { + + private static final Log LOG = LogFactory.getLog(BeanValidationDelegate.class); + + private final Validator validator; + + /** + * Creates a new {@link BeanValidationDelegate} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public BeanValidationDelegate(Validator validator) { + Assert.notNull(validator, "Validator must not be null"); + this.validator = validator; + } + + /** + * Validate the given object. + * + * @param object + * @return set of constraint violations. + */ + public Set> validate(Object object) { + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Validating object: %s", object)); + } + + Set> violations = validator.validate(object); + + if (!violations.isEmpty()) { + if (LOG.isDebugEnabled()) { + LOG.info(String.format("During object: %s validation violations found: %s", object, violations)); + } + } + + return violations; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java new file mode 100644 index 0000000000..3315a1d360 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked before a domain object is converted to be persisted. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface BeforeConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is converted to be persisted. Can return either the same or a + * modified instance of the domain object. + * + * @param entity the domain object to save. + * @param collection name of the collection. + * @return the domain object to be persisted. + */ + T onBeforeConvert(T entity, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java index b894310e99..33d992d9ab 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,10 +17,11 @@ /** * Event being thrown before a domain object is converted to be persisted. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public class BeforeConvertEvent extends MongoMappingEvent { @@ -28,20 +29,9 @@ public class BeforeConvertEvent extends MongoMappingEvent { /** * Creates new {@link BeforeConvertEvent}. - * - * @param source must not be {@literal null}. - * @deprecated since 1.8. Please use {@link #BeforeConvertEvent(Object, String)}. - */ - @Deprecated - public BeforeConvertEvent(T source) { - this(source, null); - } - - /** - * Creates new {@link BeforeConvertEvent}. - * + * * @param source must not be {@literal null}. - * @param collectionName can be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ public BeforeConvertEvent(T source, String collectionName) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java index f1311961cd..49d509fb43 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 by the original author(s). + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,30 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.lang.Nullable; /** - * Event being thrown before a document is deleted. The {@link DBObject} held in the event will represent the query + * Event being thrown before a document is deleted. The {@link Document} held in the event will represent the query * document before being mapped based on the domain class handled. - * + * * @author Martin Baumgartner * @author Christoph Strobl + * @author Mark Paluch */ public class BeforeDeleteEvent extends AbstractDeleteEvent { private static final long serialVersionUID = -2627547705679734497L; /** - * Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject} and type. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @deprecated since 1.8. Please use {@link #BeforeDeleteEvent(DBObject, Class, String)}. - */ - @Deprecated - public BeforeDeleteEvent(DBObject dbo, Class type) { - this(dbo, type, null); - } - - /** - * Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject}, type and collectionName. - * - * @param dbo must not be {@literal null}. - * @param type can be {@literal null}. - * @param collectionName can be {@literal null}. + * Creates a new {@link BeforeDeleteEvent} for the given {@link Document}, type and collectionName. + * + * @param document must not be {@literal null}. + * @param type may be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public BeforeDeleteEvent(DBObject dbo, Class type, String collectionName) { - super(dbo, type, collectionName); + public BeforeDeleteEvent(Document document, @Nullable Class type, String collectionName) { + super(document, type, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java new file mode 100644 index 0000000000..27ea5dce69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java @@ -0,0 +1,46 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered before save of a document. + * + * @author Mark Paluch + * @author Michael J. Simons + * @since 2.2 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface BeforeSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is saved. Can return either the same or a modified instance + * of the domain object and can modify {@link Document} contents. This method is called after converting the + * {@code entity} to a {@link Document} so effectively the document is used as outcome of invoking this callback. + * Changes to the domain object are not taken into account for saving, only changes to the document. Only transient + * fields of the entity should be changed in this callback. To change persistent the entity before being converted, + * use the {@link BeforeConvertCallback}. + * + * @param entity the domain object to save. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return the domain object to be persisted. + */ + T onBeforeSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java index b55b52818a..2aa3317f9e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping.event; -import com.mongodb.DBObject; +import org.bson.Document; /** * {@link MongoMappingEvent} triggered before save of a document. - * - * @author Jon Brisbin + * + * @author Jon Brisbin * @author Christoph Strobl + * @author Mark Paluch */ public class BeforeSaveEvent extends MongoMappingEvent { @@ -30,26 +30,14 @@ public class BeforeSaveEvent extends MongoMappingEvent { /** * Creates new {@link BeforeSaveEvent}. - * - * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. - * @deprecated since 1.8. Please use {@link #BeforeSaveEvent(Object, DBObject, String)}. - */ - @Deprecated - public BeforeSaveEvent(E source, DBObject dbo) { - super(source, dbo); - } - - /** - * Creates new {@link BeforeSaveEvent}. - * + * * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. - * @param collectionName can be {@literal null}. + * @param document must not be {@literal null}. + * @param collectionName must not be {@literal null}. * @since 1.8 */ - public BeforeSaveEvent(E source, DBObject dbo, String collectionName) { - super(source, dbo, collectionName); + public BeforeSaveEvent(E source, Document document, String collectionName) { + super(source, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java index 2e27551a83..c36e33e777 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,81 +15,71 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static org.springframework.data.mongodb.core.query.SerializationUtils.*; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; /** * {@link ApplicationListener} for Mongo mapping events logging the events. - * + * * @author Jon Brisbin * @author Martin Baumgartner * @author Oliver Gierke + * @author Christoph Strobl */ public class LoggingEventListener extends AbstractMongoEventListener { - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingEventListener.class); + private static final Log LOGGER = LogFactory.getLog(LoggingEventListener.class); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeConvert(org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent) - */ @Override public void onBeforeConvert(BeforeConvertEvent event) { - LOGGER.info("onBeforeConvert: {}", event.getSource()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeConvert: %s", event.getSource())); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent) - */ @Override public void onBeforeSave(BeforeSaveEvent event) { - LOGGER.info("onBeforeSave: {}, {}", event.getSource(), event.getDBObject()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterSave(org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent) - */ @Override public void onAfterSave(AfterSaveEvent event) { - LOGGER.info("onAfterSave: {}, {}", event.getSource(), event.getDBObject()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterLoad(org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent) - */ @Override public void onAfterLoad(AfterLoadEvent event) { - LOGGER.info("onAfterLoad: {}", event.getDBObject()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterLoad: %s", serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterConvert(org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent) - */ @Override public void onAfterConvert(AfterConvertEvent event) { - LOGGER.info("onAfterConvert: {}, {}", event.getDBObject(), event.getSource()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterConvert: %s, %s", serializeToJsonSafely(event.getDocument()), event.getSource())); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterDelete(org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent) - */ @Override public void onAfterDelete(AfterDeleteEvent event) { - LOGGER.info("onAfterDelete: {}", event.getDBObject()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterDelete: %s", serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeDelete(org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent) - */ @Override public void onBeforeDelete(BeforeDeleteEvent event) { - LOGGER.info("onBeforeDelete: {}", event.getDBObject()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeDelete: %s", serializeToJsonSafely(event.getDocument()))); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java index d286aea544..eec9a3edf1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,75 +13,76 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping.event; -import org.springframework.context.ApplicationEvent; +import java.util.function.Function; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.context.ApplicationEvent; +import org.springframework.lang.Nullable; /** * Base {@link ApplicationEvent} triggered by Spring Data MongoDB. - * - * @author Jon Brisbin + * + * @author Jon Brisbin * @author Christoph Strobl + * @author Mark Paluch */ public class MongoMappingEvent extends ApplicationEvent { private static final long serialVersionUID = 1L; - private final DBObject dbo; - private final String collectionName; + private final @Nullable Document document; + private final @Nullable String collectionName; /** * Creates new {@link MongoMappingEvent}. - * + * * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. - * @deprecated since 1.8. Please use {@link #MongoMappingEvent(Object, DBObject, String)}. - */ - @Deprecated - public MongoMappingEvent(T source, DBObject dbo) { - this(source, dbo, null); - } - - /** - * Creates new {@link MongoMappingEvent}. - * - * @param source must not be {@literal null}. - * @param dbo can be {@literal null}. + * @param document can be {@literal null}. * @param collectionName can be {@literal null}. */ - public MongoMappingEvent(T source, DBObject dbo, String collectionName) { + public MongoMappingEvent(T source, @Nullable Document document, @Nullable String collectionName) { super(source); - this.dbo = dbo; + this.document = document; this.collectionName = collectionName; } /** * @return {@literal null} if not set. */ - public DBObject getDBObject() { - return dbo; + public @Nullable Document getDocument() { + return document; } /** * Get the collection the event refers to. - * + * * @return {@literal null} if not set. * @since 1.8 */ - public String getCollectionName() { + public @Nullable String getCollectionName() { return collectionName; } - /* - * (non-Javadoc) - * @see java.util.EventObject#getSource() - */ @SuppressWarnings({ "unchecked" }) @Override public T getSource() { return (T) super.getSource(); } + + /** + * Allows client code to change the underlying source instance by applying the given {@link Function}. + * + * @param mapper the {@link Function} to apply, will only be applied if the source is not {@literal null}. + * @since 2.1 + */ + final void mapSource(Function mapper) { + + if (source == null) { + return; + } + + this.source = mapper.apply(getSource()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java new file mode 100644 index 0000000000..5dc1f7c69f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked after a domain object is materialized from a {@link Document} when reading results. + * + * @author Roman Puchkovskiy + * @author Mark Paluch + * @since 3.0 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveAfterConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the + * same or a modified instance of the domain object. + * + * @param entity the domain object (the result of the conversion). + * @param document must not be {@literal null}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object that is the result of reading it from the {@link Document}. + */ + Publisher onAfterConvert(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java new file mode 100644 index 0000000000..4e8302d84c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; + +/** + * Entity callback triggered after save of a {@link Document}. + * + * @author Roman Puchkovskiy + * @since 3.0 + * @see ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveAfterSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of + * the domain object. + * + * @param entity the domain object that was saved. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be returned to the caller. + */ + Publisher onAfterSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java new file mode 100644 index 0000000000..62fe054145 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java @@ -0,0 +1,59 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.reactivestreams.Publisher; + +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.core.Ordered; +import org.springframework.data.auditing.AuditingHandler; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.util.Assert; + +/** + * Reactive {@link EntityCallback} to populate auditing related fields on an entity about to be saved. + * + * @author Mark Paluch + * @since 2.2 + */ +public class ReactiveAuditingEntityCallback implements ReactiveBeforeConvertCallback, Ordered { + + private final ObjectFactory auditingHandlerFactory; + + /** + * Creates a new {@link ReactiveAuditingEntityCallback} using the given {@link MappingContext} and + * {@link AuditingHandler} provided by the given {@link ObjectFactory}. + * + * @param auditingHandlerFactory must not be {@literal null}. + */ + public ReactiveAuditingEntityCallback(ObjectFactory auditingHandlerFactory) { + + Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null"); + this.auditingHandlerFactory = auditingHandlerFactory; + } + + @Override + public Publisher onBeforeConvert(Object entity, String collection) { + return auditingHandlerFactory.getObject().markAudited(entity); + } + + @Override + public int getOrder() { + return 100; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java new file mode 100644 index 0000000000..842c734744 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked before a domain object is converted to be persisted. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveBeforeConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is converted to be persisted. Can return either the same of a + * modified instance of the domain object. + * + * @param entity the domain object to save. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be persisted. + */ + Publisher onBeforeConvert(T entity, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java new file mode 100644 index 0000000000..e353cb8ecf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered before save of a document. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveBeforeSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is saved. Can return either the same or a modified instance + * of the domain object and can modify {@link Document} contents. This method is called after converting the + * {@code entity} to {@link Document} so effectively the document is used as outcome of invoking this callback. + * + * @param entity the domain object to save. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be persisted. + */ + Publisher onBeforeSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java new file mode 100644 index 0000000000..7011da90b4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java @@ -0,0 +1,69 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; +import reactor.core.publisher.Mono; + +import java.util.Set; + +import org.bson.Document; + +import org.springframework.core.Ordered; + +/** + * Reactive variant of JSR-303 dependant entities validator. + *

                    + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. + * + * @author Mark Paluch + * @author Rene Felgenträger + * @since 4.5 + */ +public class ReactiveValidatingEntityCallback implements ReactiveBeforeSaveCallback, Ordered { + + private final BeanValidationDelegate delegate; + + /** + * Creates a new {@link ReactiveValidatingEntityCallback} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public ReactiveValidatingEntityCallback(Validator validator) { + this.delegate = new BeanValidationDelegate(validator); + } + + @Override + public Mono onBeforeSave(Object entity, Document document, String collection) { + + Set> violations = delegate.validate(entity); + + if (!violations.isEmpty()) { + return Mono.error(new ConstraintViolationException(violations)); + } + + return Mono.just(entity); + } + + @Override + public int getOrder() { + return 100; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java new file mode 100644 index 0000000000..260652616e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java @@ -0,0 +1,68 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; + +import java.util.Set; + +import org.bson.Document; + +import org.springframework.core.Ordered; + +/** + * JSR-303 dependant entities validator. + *

                    + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. + * + * @author Rene Felgenträger + * @author Mark Paluch + * @since 4.5 + */ +public class ValidatingEntityCallback implements BeforeSaveCallback, Ordered { + + private final BeanValidationDelegate delegate; + + /** + * Creates a new {@link ValidatingEntityCallback} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public ValidatingEntityCallback(Validator validator) { + this.delegate = new BeanValidationDelegate(validator); + } + + @Override + public Object onBeforeSave(Object entity, Document document, String collection) { + + Set> violations = delegate.validate(entity); + + if (!violations.isEmpty()) { + throw new ConstraintViolationException(violations); + } + + return entity; + } + + @Override + public int getOrder() { + return 100; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java index d98f4fe164..1854c486f8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2016 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,48 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import java.util.Set; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; -import javax.validation.ConstraintViolationException; -import javax.validation.Validator; +import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.Assert; +import org.bson.Document; /** - * javax.validation dependant entities validator. When it is registered as Spring component its automatically invoked - * before entities are saved in database. - * + * JSR-303 dependant entities validator. + *

                    + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. + * * @author Maciej Walkowiak * @author Oliver Gierke + * @author Christoph Strobl + * @deprecated since 4.5, use {@link ValidatingEntityCallback} respectively {@link ReactiveValidatingEntityCallback} + * instead to ensure ordering and interruption of saving when encountering validation constraint violations. */ +@Deprecated(since = "4.5") public class ValidatingMongoEventListener extends AbstractMongoEventListener { - private static final Logger LOG = LoggerFactory.getLogger(ValidatingMongoEventListener.class); - - private final Validator validator; + private final BeanValidationDelegate delegate; /** * Creates a new {@link ValidatingMongoEventListener} using the given {@link Validator}. - * + * * @param validator must not be {@literal null}. */ public ValidatingMongoEventListener(Validator validator) { - - Assert.notNull(validator, "Validator must not be null!"); - this.validator = validator; + this.delegate = new BeanValidationDelegate(validator); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent) - */ @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) public void onBeforeSave(BeforeSaveEvent event) { - Object source = event.getSource(); - - LOG.debug("Validating object: {}", source); - Set violations = validator.validate(source); + Set> violations = delegate.validate(event.getSource()); if (!violations.isEmpty()) { - - LOG.info("During object: {} validation violations found: {}", source, violations); throw new ConstraintViolationException(violations); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/package-info.java index 96c601da35..0cc9d071a3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/package-info.java @@ -1,5 +1,6 @@ /** * Mapping event callback infrastructure for the MongoDB document-to-object mapping subsystem. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.mapping.event; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/package-info.java index 5e451b0431..0a513f1a18 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/package-info.java @@ -1,5 +1,6 @@ /** * Infrastructure for the MongoDB document-to-object mapping subsystem. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.mapping; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java deleted file mode 100644 index b1cbb49761..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * Collects the parameters required to perform a group operation on a collection. The query condition and the input - * collection are specified on the group method as method arguments to be consistent with other operations, e.g. - * map-reduce. - * - * @author Mark Pollack - */ -public class GroupBy { - - private DBObject dboKeys; - private String keyFunction; - private String initial; - private DBObject initialDbObject; - private String reduce; - private String finalize; - - public GroupBy(String... keys) { - DBObject dbo = new BasicDBObject(); - for (String key : keys) { - dbo.put(key, 1); - } - dboKeys = dbo; - } - - // NOTE GroupByCommand does not handle keyfunction. - - public GroupBy(String key, boolean isKeyFunction) { - DBObject dbo = new BasicDBObject(); - if (isKeyFunction) { - keyFunction = key; - } else { - dbo.put(key, 1); - dboKeys = dbo; - } - } - - public static GroupBy keyFunction(String key) { - return new GroupBy(key, true); - } - - public static GroupBy key(String... keys) { - return new GroupBy(keys); - } - - public GroupBy initialDocument(String initialDocument) { - initial = initialDocument; - return this; - } - - public GroupBy initialDocument(DBObject initialDocument) { - initialDbObject = initialDocument; - return this; - } - - public GroupBy reduceFunction(String reduceFunction) { - reduce = reduceFunction; - return this; - } - - public GroupBy finalizeFunction(String finalizeFunction) { - finalize = finalizeFunction; - return this; - } - - public DBObject getGroupByObject() { - // return new GroupCommand(dbCollection, dboKeys, condition, initial, reduce, finalize); - BasicDBObject dbo = new BasicDBObject(); - if (dboKeys != null) { - dbo.put("key", dboKeys); - } - if (keyFunction != null) { - dbo.put("$keyf", keyFunction); - } - - dbo.put("$reduce", reduce); - - dbo.put("initial", initialDbObject); - if (initial != null) { - dbo.put("initial", initial); - } - if (finalize != null) { - dbo.put("finalize", finalize); - } - return dbo; - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java deleted file mode 100644 index a3dc65ee80..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2011 - 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import java.util.Iterator; -import java.util.List; - -import org.springframework.util.Assert; - -import com.mongodb.DBObject; - -/** - * Collects the results of executing a group operation. - * - * @author Mark Pollack - * @author Christoph Strobl - * @param The class in which the results are mapped onto, accessible via an {@link Iterator}. - */ -public class GroupByResults implements Iterable { - - private final List mappedResults; - private final DBObject rawResults; - - private double count; - private int keys; - private String serverUsed; - - public GroupByResults(List mappedResults, DBObject rawResults) { - - Assert.notNull(mappedResults); - Assert.notNull(rawResults); - this.mappedResults = mappedResults; - this.rawResults = rawResults; - parseKeys(); - parseCount(); - parseServerUsed(); - } - - public double getCount() { - return count; - } - - public int getKeys() { - return keys; - } - - public String getServerUsed() { - return serverUsed; - } - - public Iterator iterator() { - return mappedResults.iterator(); - } - - public DBObject getRawResults() { - return rawResults; - } - - private void parseCount() { - - Object object = rawResults.get("count"); - if (object instanceof Number) { - count = ((Number) object).doubleValue(); - } - - } - - private void parseKeys() { - - Object object = rawResults.get("keys"); - if (object instanceof Number) { - keys = ((Number) object).intValue(); - } - } - - private void parseServerUsed() { - - // "serverUsed" : "127.0.0.1:27017" - Object object = rawResults.get("serverUsed"); - if (object instanceof String) { - serverUsed = (String) object; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java index d47d9e2d84..32a9ed5118 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2014 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,16 @@ */ package org.springframework.data.mongodb.core.mapreduce; +import org.springframework.lang.Nullable; + /** * Value object to encapsulate results of a map-reduce count. - * + * * @author Mark Pollack * @author Oliver Gierke + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceCounts { public static final MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1); @@ -31,7 +35,7 @@ public class MapReduceCounts { /** * Creates a new {@link MapReduceCounts} using the given input count, emit count, and output count. - * + * * @param inputCount * @param emitCount * @param outputCount @@ -55,20 +59,12 @@ public long getOutputCount() { return outputCount; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "MapReduceCounts [inputCount=" + inputCount + ", emitCount=" + emitCount + ", outputCount=" + outputCount + "]"; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -82,12 +78,8 @@ public int hashCode() { return Long.valueOf(result).intValue(); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java index c50da11c27..9f34ec44e4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,41 +17,41 @@ import java.util.HashMap; import java.util.Map; +import java.util.Optional; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.MapReduceCommand; +import org.bson.Document; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; + +import com.mongodb.client.model.MapReduceAction; /** * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceOptions { - private String outputCollection; - - private String outputDatabase; - - private Boolean outputSharded; - - private MapReduceCommand.OutputType outputType = MapReduceCommand.OutputType.REPLACE; - - private String finalizeFunction; - - private Map scopeVariables = new HashMap(); - - private Boolean jsMode; + private @Nullable String outputCollection; - private Boolean verbose = true; + private Optional outputDatabase = Optional.empty(); + private @Nullable MapReduceAction mapReduceAction = MapReduceAction.REPLACE; + private Map scopeVariables = new HashMap<>(); + private Map extraOptions = new HashMap<>(); + private @Nullable Boolean jsMode; + private Boolean verbose = Boolean.TRUE; + private @Nullable Integer limit; - private Integer limit; - - private Map extraOptions = new HashMap(); + private Optional outputSharded = Optional.empty(); + private Optional finalizeFunction = Optional.empty(); + private Optional collation = Optional.empty(); /** * Static factory method to create a MapReduceOptions instance - * + * * @return a new instance */ public static MapReduceOptions options() { @@ -61,7 +61,7 @@ public static MapReduceOptions options() { /** * Limit the number of objects to return from the collection that is fed into the map reduce operation Often used in * conjunction with a query and sort option so as to reduce the portion of the data that will be processed. - * + * * @param limit Limit the number of objects to process * @return MapReduceOptions so that methods can be chained in a fluent API style */ @@ -74,11 +74,12 @@ public MapReduceOptions limit(int limit) { /** * The collection where the results from the map-reduce operation will be stored. Note, you can set the database name * as well with the outputDatabase option. - * + * * @param collectionName The name of the collection where the results of the map-reduce operation will be stored. * @return MapReduceOptions so that methods can be chained in a fluent API style */ public MapReduceOptions outputCollection(String collectionName) { + this.outputCollection = collectionName; return this; } @@ -86,12 +87,13 @@ public MapReduceOptions outputCollection(String collectionName) { /** * The database where the results from the map-reduce operation will be stored. Note, you ca set the collection name * as well with the outputCollection option. - * + * * @param outputDatabase The name of the database where the results of the map-reduce operation will be stored. * @return MapReduceOptions so that methods can be chained in a fluent API style */ - public MapReduceOptions outputDatabase(String outputDatabase) { - this.outputDatabase = outputDatabase; + public MapReduceOptions outputDatabase(@Nullable String outputDatabase) { + + this.outputDatabase = Optional.ofNullable(outputDatabase); return this; } @@ -99,22 +101,27 @@ public MapReduceOptions outputDatabase(String outputDatabase) { * With this option, no collection will be created, and the whole map-reduce operation will happen in RAM. Also, the * results of the map-reduce will be returned within the result object. Note that this option is possible only when * the result set fits within the 16MB limit of a single document. - * - * @return MapReduceOptions so that methods can be chained in a fluent API style + * + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeInline() { - this.outputType = MapReduceCommand.OutputType.INLINE; + public MapReduceOptions actionInline() { + + this.mapReduceAction = null; return this; } + /** * This option will merge new data into the old output collection. In other words, if the same key exists in both the * result set and the old collection, the new key will overwrite the old one. - * - * @return MapReduceOptions so that methods can be chained in a fluent API style + * + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeMerge() { - this.outputType = MapReduceCommand.OutputType.MERGE; + public MapReduceOptions actionMerge() { + + this.mapReduceAction = MapReduceAction.MERGE; return this; } @@ -122,56 +129,63 @@ public MapReduceOptions outputTypeMerge() { * If documents exists for a given key in the result set and in the old collection, then a reduce operation (using the * specified reduce function) will be performed on the two values and the result will be written to the output * collection. If a finalize function was provided, this will be run after the reduce as well. - * - * @return + * + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeReduce() { - this.outputType = MapReduceCommand.OutputType.REDUCE; + public MapReduceOptions actionReduce() { + + this.mapReduceAction = MapReduceAction.REDUCE; return this; } /** * The output will be inserted into a collection which will atomically replace any existing collection with the same - * name. Note, the default is MapReduceCommand.OutputType.REPLACE - * + * name. Note, the default is {@link MapReduceAction#REPLACE}. + * * @return MapReduceOptions so that methods can be chained in a fluent API style + * @since 3.0 */ - public MapReduceOptions outputTypeReplace() { - this.outputType = MapReduceCommand.OutputType.REPLACE; + public MapReduceOptions actionReplace() { + + this.mapReduceAction = MapReduceAction.REPLACE; return this; } /** * If true and combined with an output mode that writes to a collection, the output collection will be sharded using * the _id field. For MongoDB 1.9+ - * + * * @param outputShared if true, output will be sharded based on _id key. * @return MapReduceOptions so that methods can be chained in a fluent API style */ public MapReduceOptions outputSharded(boolean outputShared) { - this.outputSharded = outputShared; + + this.outputSharded = Optional.of(outputShared); return this; } /** * Sets the finalize function - * + * * @param finalizeFunction The finalize function. Can be a JSON string or a Spring Resource URL * @return MapReduceOptions so that methods can be chained in a fluent API style */ - public MapReduceOptions finalizeFunction(String finalizeFunction) { - this.finalizeFunction = finalizeFunction; + public MapReduceOptions finalizeFunction(@Nullable String finalizeFunction) { + + this.finalizeFunction = Optional.ofNullable(finalizeFunction); return this; } /** * Key-value pairs that are placed into JavaScript global scope and can be accessed from map, reduce, and finalize * scripts. - * + * * @param scopeVariables variables that can be accessed from map, reduce, and finalize scripts * @return MapReduceOptions so that methods can be chained in a fluent API style */ public MapReduceOptions scopeVariables(Map scopeVariables) { + this.scopeVariables = scopeVariables; return this; } @@ -179,89 +193,108 @@ public MapReduceOptions scopeVariables(Map scopeVariables) { /** * Flag that toggles behavior in the map-reduce operation so as to avoid intermediate conversion to BSON between the * map and reduce steps. For MongoDB 1.9+ - * + * * @param javaScriptMode if true, have the execution of map-reduce stay in JavaScript * @return MapReduceOptions so that methods can be chained in a fluent API style */ public MapReduceOptions javaScriptMode(boolean javaScriptMode) { + this.jsMode = javaScriptMode; return this; } /** * Flag to set that will provide statistics on job execution time. - * + * * @return MapReduceOptions so that methods can be chained in a fluent API style */ public MapReduceOptions verbose(boolean verbose) { - this.verbose = verbose; - return this; - } - /** - * Add additional extra options that may not have a method on this class. This method will help if you use a version - * of this client library with a server version that has added additional map-reduce options that do not yet have an - * method for use in setting them. options - * - * @param key The key option - * @param value The value of the option - * @return MapReduceOptions so that methods can be chained in a fluent API style - * @deprecated since 1.7. - */ - @Deprecated - public MapReduceOptions extraOption(String key, Object value) { - extraOptions.put(key, value); + this.verbose = verbose; return this; } /** + * Define the Collation specifying language-specific rules for string comparison. + * + * @param collation can be {@literal null}. * @return - * @deprecated since 1.7 + * @since 2.0 */ - @Deprecated - public Map getExtraOptions() { - return extraOptions; + public MapReduceOptions collation(@Nullable Collation collation) { + + this.collation = Optional.ofNullable(collation); + return this; } - public String getFinalizeFunction() { + public Optional getFinalizeFunction() { return this.finalizeFunction; } + @Nullable public Boolean getJavaScriptMode() { return this.jsMode; } + @Nullable public String getOutputCollection() { return this.outputCollection; } - public String getOutputDatabase() { + public Optional getOutputDatabase() { return this.outputDatabase; } - public Boolean getOutputSharded() { + public Optional getOutputSharded() { return this.outputSharded; } - public MapReduceCommand.OutputType getOutputType() { - return this.outputType; - } - public Map getScopeVariables() { return this.scopeVariables; } /** * Get the maximum number of documents for the input into the map function. - * + * * @return {@literal null} if not set. */ + @Nullable public Integer getLimit() { return limit; } - public DBObject getOptionsObject() { - BasicDBObject cmd = new BasicDBObject(); + /** + * Get the Collation specifying language-specific rules for string comparison. + * + * @return + * @since 2.0 + */ + public Optional getCollation() { + return collation; + } + + /** + * Return the {@link MapReduceAction}. + * + * @return the mapped action or {@literal null} if the action maps to inline output. + * @since 2.0.10 + */ + @Nullable + public MapReduceAction getMapReduceAction() { + return mapReduceAction; + } + + /** + * @return {@literal true} if {@literal inline} output is used. + * @since 2.0.10 + */ + public boolean usesInlineOutput() { + return null == mapReduceAction; + } + + public Document getOptionsObject() { + + Document cmd = new Document(); if (verbose != null) { cmd.put("verbose", verbose); @@ -269,9 +302,7 @@ public DBObject getOptionsObject() { cmd.put("out", createOutObject()); - if (finalizeFunction != null) { - cmd.put("finalize", finalizeFunction); - } + finalizeFunction.ifPresent(val -> cmd.append("finalize", val)); if (scopeVariables != null) { cmd.put("scope", scopeVariables); @@ -285,35 +316,28 @@ public DBObject getOptionsObject() { cmd.putAll(extraOptions); } + getCollation().ifPresent(val -> cmd.append("collation", val.toDocument())); + return cmd; } - protected BasicDBObject createOutObject() { - BasicDBObject out = new BasicDBObject(); - - switch (outputType) { - case INLINE: - out.put("inline", 1); - break; - case REPLACE: - out.put("replace", outputCollection); - break; - case MERGE: - out.put("merge", outputCollection); - break; - case REDUCE: - out.put("reduce", outputCollection); - break; - } + protected Document createOutObject() { - if (outputDatabase != null) { - out.put("db", outputDatabase); - } + Document out = new Document(); - if (outputSharded != null) { - out.put("sharded", outputSharded); + if (getMapReduceAction() == null) { + out.put("inline", 1); + } else { + switch (getMapReduceAction()) { + case REPLACE -> out.put("replace", outputCollection); + case MERGE -> out.put("merge", outputCollection); + case REDUCE -> out.put("reduce", outputCollection); + } } + outputDatabase.ifPresent(val -> out.append("db", val)); + outputSharded.ifPresent(val -> out.append("sharded", val)); + return out; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java index 4d7c1407eb..865a4e9438 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,39 +18,39 @@ import java.util.Iterator; import java.util.List; +import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; -import com.mongodb.MapReduceOutput; - /** * Collects the results of performing a MapReduce operations. - * + * * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @param The class in which the results are mapped onto, accessible via an iterator. + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceResults implements Iterable { private final List mappedResults; - private final DBObject rawResults; - private final String outputCollection; + private final @Nullable Document rawResults; + private final @Nullable String outputCollection; private final MapReduceTiming mapReduceTiming; private final MapReduceCounts mapReduceCounts; /** * Creates a new {@link MapReduceResults} from the given mapped results and the raw one. - * + * * @param mappedResults must not be {@literal null}. * @param rawResults must not be {@literal null}. - * @deprecated since 1.7. Please use {@link #MapReduceResults(List, MapReduceOutput)} */ - @Deprecated - public MapReduceResults(List mappedResults, DBObject rawResults) { + public MapReduceResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults); - Assert.notNull(rawResults); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = mappedResults; this.rawResults = rawResults; @@ -59,29 +59,6 @@ public MapReduceResults(List mappedResults, DBObject rawResults) { this.outputCollection = parseOutputCollection(rawResults); } - /** - * Creates a new {@link MapReduceResults} from the given mapped results and the {@link MapReduceOutput}. - * - * @param mappedResults must not be {@literal null}. - * @param mapReduceOutput must not be {@literal null}. - * @since 1.7 - */ - public MapReduceResults(List mappedResults, MapReduceOutput mapReduceOutput) { - - Assert.notNull(mappedResults, "MappedResults must not be null!"); - Assert.notNull(mapReduceOutput, "MapReduceOutput must not be null!"); - - this.mappedResults = mappedResults; - this.rawResults = null; - this.mapReduceTiming = parseTiming(mapReduceOutput); - this.mapReduceCounts = parseCounts(mapReduceOutput); - this.outputCollection = parseOutputCollection(mapReduceOutput); - } - - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } @@ -94,17 +71,19 @@ public MapReduceCounts getCounts() { return mapReduceCounts; } + @Nullable public String getOutputCollection() { return outputCollection; } - public DBObject getRawResults() { + @Nullable + public Document getRawResults() { return rawResults; } - private static MapReduceTiming parseTiming(DBObject rawResults) { + private static MapReduceTiming parseTiming(Document rawResults) { - DBObject timing = (DBObject) rawResults.get("timing"); + Document timing = (Document) rawResults.get("timing"); if (timing == null) { return new MapReduceTiming(-1, -1, -1); @@ -120,12 +99,12 @@ private static MapReduceTiming parseTiming(DBObject rawResults) { /** * Returns the value of the source's field with the given key as {@link Long}. - * + * * @param source * @param key * @return */ - private static Long getAsLong(DBObject source, String key) { + private static Long getAsLong(Document source, String key) { Object raw = source.get(key); @@ -133,14 +112,14 @@ private static Long getAsLong(DBObject source, String key) { } /** - * Parses the raw {@link DBObject} result into a {@link MapReduceCounts} value object. - * + * Parses the raw {@link Document} result into a {@link MapReduceCounts} value object. + * * @param rawResults * @return */ - private static MapReduceCounts parseCounts(DBObject rawResults) { + private static MapReduceCounts parseCounts(Document rawResults) { - DBObject counts = (DBObject) rawResults.get("counts"); + Document counts = (Document) rawResults.get("counts"); if (counts == null) { return MapReduceCounts.NONE; @@ -154,12 +133,13 @@ private static MapReduceCounts parseCounts(DBObject rawResults) { } /** - * Parses the output collection from the raw {@link DBObject} result. - * + * Parses the output collection from the raw {@link Document} result. + * * @param rawResults * @return */ - private static String parseOutputCollection(DBObject rawResults) { + @Nullable + private static String parseOutputCollection(Document rawResults) { Object resultField = rawResults.get("result"); @@ -167,20 +147,7 @@ private static String parseOutputCollection(DBObject rawResults) { return null; } - return resultField instanceof DBObject ? ((DBObject) resultField).get("collection").toString() : resultField - .toString(); - } - - private static MapReduceCounts parseCounts(final MapReduceOutput mapReduceOutput) { - return new MapReduceCounts(mapReduceOutput.getInputCount(), mapReduceOutput.getEmitCount(), - mapReduceOutput.getOutputCount()); - } - - private static String parseOutputCollection(final MapReduceOutput mapReduceOutput) { - return mapReduceOutput.getCollectionName(); - } - - private static MapReduceTiming parseTiming(MapReduceOutput mapReduceOutput) { - return new MapReduceTiming(-1, -1, mapReduceOutput.getDuration()); + return resultField instanceof Document document ? document.get("collection").toString() + : resultField.toString(); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java index 770297b593..28de7fe850 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,12 @@ */ package org.springframework.data.mongodb.core.mapreduce; +import org.springframework.lang.Nullable; + +/** + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. + */ +@Deprecated public class MapReduceTiming { private long mapTime, emitLoopTime, totalTime; @@ -38,19 +44,11 @@ public long getTotalTime() { return totalTime; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "MapReduceTiming [mapTime=" + mapTime + ", emitLoopTime=" + emitLoopTime + ", totalTime=" + totalTime + "]"; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -64,24 +62,17 @@ public int hashCode() { return result; } - /* - * - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof MapReduceTiming)) { + if (!(obj instanceof MapReduceTiming that)) { return false; } - MapReduceTiming that = (MapReduceTiming) obj; - return this.emitLoopTime == that.emitLoopTime && // this.mapTime == that.mapTime && // this.totalTime == that.totalTime; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java index ca447893d8..65522d8613 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java @@ -1,5 +1,8 @@ /** * Support for MongoDB map-reduce operations. + * @deprecated since MongoDB server version 5.0 */ +@Deprecated +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.mapreduce; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java new file mode 100644 index 0000000000..172ecbbe74 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import org.springframework.dao.DataAccessResourceFailureException; + +/** + * Cancelable allows stopping long running tasks and freeing underlying resources. + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface Cancelable { + + /** + * Abort and free resources. + * + * @throws DataAccessResourceFailureException if operation cannot be canceled. + */ + void cancel() throws DataAccessResourceFailureException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java new file mode 100644 index 0000000000..fec7fa60ef --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java @@ -0,0 +1,470 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; +import java.time.Instant; + +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.ChangeStreamOptions; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * {@link SubscriptionRequest} implementation to be used for listening to + * Change Streams via a {@link MessageListenerContainer} + * using the synchronous MongoDB Java driver. + *
                    + * The most trivial use case is subscribing to all events of a specific {@link com.mongodb.client.MongoCollection + * collection} + * + *
                    + * 
                    + *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, () -> "collection-name");
                    + * 
                    + * 
                    + * + * or {@link com.mongodb.client.MongoDatabase} which receives events from all {@link com.mongodb.client.MongoCollection + * collections} in that database. + * + *
                    + * 
                    + *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, RequestOptions.justDatabase("test"));
                    + * 
                    + * 
                    + * + * For more advanced scenarios {@link ChangeStreamOptions} offers abstractions for options like filtering, resuming,... + * + *
                    + * 
                    + *     ChangeStreamOptions options = ChangeStreamOptions.builder()
                    + *         .filter(newAggregation(match(where("age").is(7))))
                    + *         .returnFullDocumentOnUpdate()
                    + *         .build();
                    + *
                    + *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, new ChangeStreamRequestOptions("collection-name", options));
                    + * 
                    + * 
                    + * + * {@link ChangeStreamRequestBuilder} offers a fluent API for creating {@link ChangeStreamRequest} with + * {@link ChangeStreamOptions} in one go. + * + *
                    + * 
                    + *     ChangeStreamRequest<Document> request = ChangeStreamRequest.builder()
                    + *         .collection("collection-name")
                    + *         .publishTo(System.out::println)
                    + *         .filter(newAggregation(match(where("age").is(7))))
                    + *         .fullDocumentLookup(UPDATE_LOOKUP)
                    + *         .build();
                    + * 
                    + * 
                    + * + * {@link Message Messges} passed to the {@link MessageListener} contain the {@link ChangeStreamDocument} within their + * {@link Message#getRaw() raw value} while the {@code fullDocument} is extracted into the {@link Message#getBody() + * messages body}. Unless otherwise specified (via {@link ChangeStreamOptions#getFullDocumentLookup()} the + * {@link Message#getBody() message body} for {@code update events} will be empty for a {@link Document} target type. + * {@link Message#getBody()} Message bodies} that map to a different target type automatically enforce an + * {@link FullDocument#UPDATE_LOOKUP}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + * @since 2.1 + */ +public class ChangeStreamRequest + implements SubscriptionRequest, T, ChangeStreamRequestOptions> { + + private final MessageListener, ? super T> messageListener; + private final ChangeStreamRequestOptions options; + + /** + * Create a new {@link ChangeStreamRequest} with options, passing {@link Message messages} to the given + * {@link MessageListener}. + * + * @param messageListener must not be {@literal null}. + * @param options must not be {@literal null}. + */ + public ChangeStreamRequest(MessageListener, ? super T> messageListener, + RequestOptions options) { + + Assert.notNull(messageListener, "MessageListener must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.options = options instanceof ChangeStreamRequestOptions changeStreamRequestOptions ? + changeStreamRequestOptions : ChangeStreamRequestOptions.of(options); + + this.messageListener = messageListener; + } + + @Override + public MessageListener, ? super T> getMessageListener() { + return messageListener; + } + + @Override + public ChangeStreamRequestOptions getRequestOptions() { + return options; + } + + /** + * Obtain a shiny new {@link ChangeStreamRequestBuilder} and start defining your {@link ChangeStreamRequest} in this + * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when done. + * + * @return new instance of {@link ChangeStreamRequest}. + */ + public static ChangeStreamRequestBuilder builder() { + return new ChangeStreamRequestBuilder(); + } + + /** + * Obtain a shiny new {@link ChangeStreamRequestBuilder} and start defining your {@link ChangeStreamRequest} in this + * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when done. + * + * @return new instance of {@link ChangeStreamRequest}. + */ + public static ChangeStreamRequestBuilder builder( + MessageListener, ? super T> listener) { + + ChangeStreamRequestBuilder builder = new ChangeStreamRequestBuilder<>(); + return builder.publishTo(listener); + } + + /** + * {@link SubscriptionRequest.RequestOptions} implementation specific to a {@link ChangeStreamRequest}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class ChangeStreamRequestOptions implements SubscriptionRequest.RequestOptions { + + private final @Nullable String databaseName; + private final @Nullable String collectionName; + private final @Nullable Duration maxAwaitTime; + private final ChangeStreamOptions options; + + /** + * Create new {@link ChangeStreamRequestOptions}. + * + * @param databaseName can be {@literal null}. + * @param collectionName can be {@literal null}. + * @param options must not be {@literal null}. + */ + public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName, + ChangeStreamOptions options) { + this(databaseName, collectionName, null, options); + } + + /** + * Create new {@link ChangeStreamRequestOptions}. + * + * @param databaseName can be {@literal null}. + * @param collectionName can be {@literal null}. + * @param maxAwaitTime can be {@literal null}. + * @param options must not be {@literal null}. + * @since 3.0 + */ + public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName, + @Nullable Duration maxAwaitTime, ChangeStreamOptions options) { + + Assert.notNull(options, "Options must not be null"); + + this.collectionName = collectionName; + this.databaseName = databaseName; + this.maxAwaitTime = maxAwaitTime; + this.options = options; + } + + public static ChangeStreamRequestOptions of(RequestOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ChangeStreamRequestOptions(options.getDatabaseName(), options.getCollectionName(), + ChangeStreamOptions.builder().build()); + } + + /** + * Get the {@link ChangeStreamOptions} defined. + * + * @return never {@literal null}. + */ + public ChangeStreamOptions getChangeStreamOptions() { + return options; + } + + @Override + public String getCollectionName() { + return collectionName; + } + + @Override + public String getDatabaseName() { + return databaseName; + } + + @Override + public Duration maxAwaitTime() { + return maxAwaitTime != null ? maxAwaitTime : RequestOptions.super.maxAwaitTime(); + } + } + + /** + * Builder for creating {@link ChangeStreamRequest}. + * + * @author Christoph Strobl + * @since 2.1 + * @see ChangeStreamOptions + */ + public static class ChangeStreamRequestBuilder { + + private @Nullable String databaseName; + private @Nullable String collectionName; + private @Nullable Duration maxAwaitTime; + private @Nullable MessageListener, ? super T> listener; + private final ChangeStreamOptionsBuilder delegate = ChangeStreamOptions.builder(); + + private ChangeStreamRequestBuilder() {} + + /** + * Set the name of the {@link com.mongodb.client.MongoDatabase} to listen to. + * + * @param databaseName must not be {@literal null} nor empty. + * @return this. + */ + public ChangeStreamRequestBuilder database(String databaseName) { + + Assert.hasText(databaseName, "DatabaseName must not be null"); + + this.databaseName = databaseName; + return this; + } + + /** + * Set the name of the {@link com.mongodb.client.MongoCollection} to listen to. + * + * @param collectionName must not be {@literal null} nor empty. + * @return this. + */ + public ChangeStreamRequestBuilder collection(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null"); + + this.collectionName = collectionName; + return this; + } + + /** + * Set the {@link MessageListener} event {@link Message messages} will be published to. + * + * @param messageListener must not be {@literal null}. + * @return this. + */ + public ChangeStreamRequestBuilder publishTo( + MessageListener, ? super T> messageListener) { + + Assert.notNull(messageListener, "MessageListener must not be null"); + + this.listener = messageListener; + return this; + } + + /** + * Set the filter to apply. + *
                    + * Fields on aggregation expression root level are prefixed to map to fields contained in + * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, + * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken + * as given, during the mapping procedure. You may want to have a look at the + * structure of Change Events. + *
                    + * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are + * mapped to domain type fields. + * + * @param aggregation the {@link Aggregation Aggregation pipeline} to apply for filtering events. Must not be + * {@literal null}. + * @return this. + * @see ChangeStreamOptions#getFilter() + * @see ChangeStreamOptionsBuilder#filter(Aggregation) + */ + public ChangeStreamRequestBuilder filter(Aggregation aggregation) { + + Assert.notNull(aggregation, "Aggregation must not be null"); + + this.delegate.filter(aggregation); + return this; + } + + /** + * Set the plain filter chain to apply. + * + * @param pipeline must not be {@literal null} nor contain {@literal null} values. + * @return this. + * @see ChangeStreamOptions#getFilter() + */ + public ChangeStreamRequestBuilder filter(Document... pipeline) { + + Assert.notNull(pipeline, "Aggregation pipeline must not be null"); + Assert.noNullElements(pipeline, "Aggregation pipeline must not contain null elements"); + + this.delegate.filter(pipeline); + return this; + } + + /** + * Set the collation to use. + * + * @param collation must not be {@literal null} nor {@literal empty}. + * @return this. + * @see ChangeStreamOptions#getCollation() + * @see ChangeStreamOptionsBuilder#collation(Collation) + */ + public ChangeStreamRequestBuilder collation(Collation collation) { + + Assert.notNull(collation, "Collation must not be null"); + + this.delegate.collation(collation); + return this; + } + + /** + * Set the resume token (typically a {@link org.bson.BsonDocument} containing a {@link org.bson.BsonBinary binary + * token}) after which to start with listening. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @see ChangeStreamOptions#getResumeToken() + * @see ChangeStreamOptionsBuilder#resumeToken(org.bson.BsonValue) + */ + public ChangeStreamRequestBuilder resumeToken(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "Resume token not be null"); + + this.delegate.resumeToken(resumeToken); + return this; + } + + /** + * Set the cluster time at which to resume listening. + * + * @param clusterTime must not be {@literal null}. + * @return this. + * @see ChangeStreamOptions#getResumeTimestamp() + * @see ChangeStreamOptionsBuilder#resumeAt(java.time.Instant) + */ + public ChangeStreamRequestBuilder resumeAt(Instant clusterTime) { + + Assert.notNull(clusterTime, "ClusterTime must not be null"); + + this.delegate.resumeAt(clusterTime); + return this; + } + + /** + * Set the resume token after which to continue emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamRequestBuilder resumeAfter(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "ResumeToken must not be null"); + this.delegate.resumeAfter(resumeToken); + + return this; + } + + /** + * Set the resume token after which to start emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamRequestBuilder startAfter(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "ResumeToken must not be null"); + this.delegate.startAfter(resumeToken); + + return this; + } + + /** + * Set the {@link FullDocument} lookup to {@link FullDocument#UPDATE_LOOKUP}. + * + * @return this. + * @see ChangeStreamOptions#getFullDocumentLookup() + * @see ChangeStreamOptionsBuilder#fullDocumentLookup(FullDocument) + */ + public ChangeStreamRequestBuilder fullDocumentLookup(FullDocument lookup) { + + Assert.notNull(lookup, "FullDocument not be null"); + + this.delegate.fullDocumentLookup(lookup); + return this; + } + + /** + * Set the {@link FullDocumentBeforeChange} lookup to the given value. + * + * @return this. + * @since 4.0 + * @see ChangeStreamOptions#getFullDocumentBeforeChangeLookup() + * @see ChangeStreamOptionsBuilder#fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamRequestBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "FullDocumentBeforeChange not be null"); + + this.delegate.fullDocumentBeforeChangeLookup(lookup); + return this; + } + + /** + * Set the cursors maximum wait time on the server (for a new Document to be emitted). + * + * @param timeout must not be {@literal null}. + * @since 3.0 + */ + public ChangeStreamRequestBuilder maxAwaitTime(Duration timeout) { + + Assert.notNull(timeout, "timeout not be null"); + + this.maxAwaitTime = timeout; + return this; + } + + /** + * @return the build {@link ChangeStreamRequest}. + */ + public ChangeStreamRequest build() { + + Assert.notNull(listener, "MessageListener must not be null"); + + return new ChangeStreamRequest<>(listener, + new ChangeStreamRequestOptions(databaseName, collectionName, maxAwaitTime, delegate.build())); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java new file mode 100644 index 0000000000..fc8372613b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java @@ -0,0 +1,277 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import org.bson.BsonDocument; +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.ChangeStreamEvent; +import org.springframework.data.mongodb.core.ChangeStreamOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ErrorHandler; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoNamespace; +import com.mongodb.client.ChangeStreamIterable; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.Collation; +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * {@link Task} implementation for obtaining {@link ChangeStreamDocument ChangeStreamDocuments} from MongoDB. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + * @since 2.1 + */ +class ChangeStreamTask extends CursorReadingTask, Object> { + + private final Set denylist = new HashSet<>( + Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns")); + + private final QueryMapper queryMapper; + private final MongoConverter mongoConverter; + + @SuppressWarnings({ "unchecked", "rawtypes" }) + ChangeStreamTask(MongoTemplate template, ChangeStreamRequest request, Class targetType, + ErrorHandler errorHandler) { + super(template, (ChangeStreamRequest) request, (Class) targetType, errorHandler); + + queryMapper = new QueryMapper(template.getConverter()); + mongoConverter = template.getConverter(); + } + + @Override + protected MongoCursor> initCursor(MongoTemplate template, RequestOptions options, + Class targetType) { + + List filter = Collections.emptyList(); + BsonDocument resumeToken = new BsonDocument(); + Collation collation = null; + FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT + : FullDocument.UPDATE_LOOKUP; + FullDocumentBeforeChange fullDocumentBeforeChange = null; + BsonTimestamp startAt = null; + boolean resumeAfter = true; + + if (options instanceof ChangeStreamRequest.ChangeStreamRequestOptions requestOptions) { + + ChangeStreamOptions changeStreamOptions = requestOptions.getChangeStreamOptions(); + filter = prepareFilter(template, changeStreamOptions); + + if (changeStreamOptions.getFilter().isPresent()) { + + Object val = changeStreamOptions.getFilter().get(); + if (val instanceof Aggregation aggregation) { + collation = aggregation.getOptions().getCollation() + .map(org.springframework.data.mongodb.core.query.Collation::toMongoCollation).orElse(null); + } + } + + if (changeStreamOptions.getResumeToken().isPresent()) { + + resumeToken = changeStreamOptions.getResumeToken().get().asDocument(); + resumeAfter = changeStreamOptions.isResumeAfter(); + } + + fullDocument = changeStreamOptions.getFullDocumentLookup() + .orElseGet(() -> ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT + : FullDocument.UPDATE_LOOKUP); + + fullDocumentBeforeChange = changeStreamOptions.getFullDocumentBeforeChangeLookup().orElse(null); + + startAt = changeStreamOptions.getResumeBsonTimestamp().orElse(null); + } + + MongoDatabase db = StringUtils.hasText(options.getDatabaseName()) + ? template.getMongoDatabaseFactory().getMongoDatabase(options.getDatabaseName()) + : template.getDb(); + + ChangeStreamIterable iterable; + + if (StringUtils.hasText(options.getCollectionName())) { + iterable = filter.isEmpty() ? db.getCollection(options.getCollectionName()).watch(Document.class) + : db.getCollection(options.getCollectionName()).watch(filter, Document.class); + + } else { + iterable = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); + } + + if (!options.maxAwaitTime().isZero()) { + iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (!resumeToken.isEmpty()) { + + if (resumeAfter) { + iterable = iterable.resumeAfter(resumeToken); + } else { + iterable = iterable.startAfter(resumeToken); + } + } + + if (startAt != null) { + iterable = iterable.startAtOperationTime(startAt); + } + + if (collation != null) { + iterable = iterable.collation(collation); + } + + iterable = iterable.fullDocument(fullDocument); + if(fullDocumentBeforeChange != null) { + iterable = iterable.fullDocumentBeforeChange(fullDocumentBeforeChange); + } + + return iterable.iterator(); + } + + @SuppressWarnings("unchecked") + List prepareFilter(MongoTemplate template, ChangeStreamOptions options) { + + if (!options.getFilter().isPresent()) { + return Collections.emptyList(); + } + + Object filter = options.getFilter().orElse(null); + + if (filter instanceof Aggregation aggregation) { + AggregationOperationContext context = aggregation instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), + template.getConverter().getMappingContext(), queryMapper) + : Aggregation.DEFAULT_CONTEXT; + + return aggregation.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", denylist)); + } + + if (filter instanceof List) { + return (List) filter; + } + + throw new IllegalArgumentException( + "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); + } + + @Override + protected Message, Object> createMessage(ChangeStreamDocument source, + Class targetType, RequestOptions options) { + + MongoNamespace namespace = source.getNamespace() != null ? source.getNamespace() + : createNamespaceFromOptions(options); + + return new ChangeStreamEventMessage<>(new ChangeStreamEvent<>(source, targetType, mongoConverter), MessageProperties + .builder().databaseName(namespace.getDatabaseName()).collectionName(namespace.getCollectionName()).build()); + } + + MongoNamespace createNamespaceFromOptions(RequestOptions options) { + + String collectionName = StringUtils.hasText(options.getCollectionName()) ? options.getCollectionName() : "unknown"; + String databaseName = StringUtils.hasText(options.getDatabaseName()) ? options.getDatabaseName() : "unknown"; + + return new MongoNamespace(databaseName, collectionName); + } + + /** + * {@link Message} implementation for ChangeStreams + * + * @since 2.1 + */ + static class ChangeStreamEventMessage implements Message, T> { + + private final ChangeStreamEvent delegate; + private final MessageProperties messageProperties; + + ChangeStreamEventMessage(ChangeStreamEvent delegate, MessageProperties messageProperties) { + + this.delegate = delegate; + this.messageProperties = messageProperties; + } + + @Nullable + @Override + public ChangeStreamDocument getRaw() { + return delegate.getRaw(); + } + + @Nullable + @Override + public T getBody() { + return delegate.getBody(); + } + + @Nullable + @Override + public T getBodyBeforeChange() { + return delegate.getBodyBeforeChange(); + } + + @Override + public MessageProperties getProperties() { + return this.messageProperties; + } + + /** + * @return the resume token or {@literal null} if not set. + * @see ChangeStreamEvent#getResumeToken() + */ + @Nullable + BsonValue getResumeToken() { + return delegate.getResumeToken(); + } + + /** + * @return the cluster time of the event or {@literal null}. + * @see ChangeStreamEvent#getTimestamp() + */ + @Nullable + Instant getTimestamp() { + return delegate.getTimestamp(); + } + + /** + * Get the {@link ChangeStreamEvent} from the message. + * + * @return never {@literal null}. + */ + ChangeStreamEvent getChangeStreamEvent() { + return delegate; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java new file mode 100644 index 0000000000..41b5fed4f5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java @@ -0,0 +1,254 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; + +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.util.Lock; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ErrorHandler; + +import com.mongodb.client.MongoCursor; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @param type of objects returned by the cursor. + * @param conversion target type. + * @since 2.1 + */ +abstract class CursorReadingTask implements Task { + + private final Lock lock = Lock.of(new ReentrantLock()); + + private final MongoTemplate template; + private final SubscriptionRequest request; + private final Class targetType; + private final ErrorHandler errorHandler; + private final CountDownLatch awaitStart = new CountDownLatch(1); + + private State state = State.CREATED; + + private MongoCursor cursor; + + /** + * @param template must not be {@literal null}. + * @param request must not be {@literal null}. + * @param targetType must not be {@literal null}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + CursorReadingTask(MongoTemplate template, SubscriptionRequest request, + Class targetType, ErrorHandler errorHandler) { + + this.template = template; + this.request = (SubscriptionRequest) request; + this.targetType = targetType; + this.errorHandler = errorHandler; + } + + @Override + public void run() { + + try { + + start(); + + while (isRunning()) { + + try { + + T next = execute(this::getNext); + + if (next != null) { + emitMessage(createMessage(next, targetType, request.getRequestOptions())); + } else { + Thread.sleep(10); + } + } catch (InterruptedException e) { + + lock.executeWithoutResult(() -> state = State.CANCELLED); + Thread.currentThread().interrupt(); + break; + } + } + } catch (RuntimeException e) { + + lock.executeWithoutResult(() -> state = State.CANCELLED); + errorHandler.handleError(e); + } + } + + /** + * Initialize the Task by 1st setting the current state to {@link State#STARTING starting} indicating the + * initialization procedure.
                    + * Moving on the underlying {@link MongoCursor} gets {@link #initCursor(MongoTemplate, RequestOptions, Class) created} + * and is {@link #isValidCursor(MongoCursor) health checked}. Once a valid {@link MongoCursor} is created the + * {@link #state} is set to {@link State#RUNNING running}. If the health check is not passed the {@link MongoCursor} + * is immediately {@link MongoCursor#close() closed} and a new {@link MongoCursor} is requested until a valid one is + * retrieved or the {@link #state} changes. + */ + private void start() { + + lock.executeWithoutResult(() -> { + if (!State.RUNNING.equals(state)) { + state = State.STARTING; + } + }); + + do { + + boolean valid = lock.execute(() -> { + + if (!State.STARTING.equals(state)) { + return false; + } + + MongoCursor cursor = execute(() -> initCursor(template, request.getRequestOptions(), targetType)); + boolean isValid = isValidCursor(cursor); + if (isValid) { + this.cursor = cursor; + state = State.RUNNING; + } else if (cursor != null) { + cursor.close(); + } + return isValid; + }); + + if (!valid) { + + try { + Thread.sleep(100); + } catch (InterruptedException e) { + + lock.executeWithoutResult(() -> state = State.CANCELLED); + Thread.currentThread().interrupt(); + } + } + } while (State.STARTING.equals(getState())); + + if (awaitStart.getCount() == 1) { + awaitStart.countDown(); + } + } + + protected abstract MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType); + + @Override + public void cancel() throws DataAccessResourceFailureException { + + lock.executeWithoutResult(() -> { + + if (State.RUNNING.equals(state) || State.STARTING.equals(state)) { + this.state = State.CANCELLED; + if (cursor != null) { + cursor.close(); + } + } + }); + } + + @Override + public boolean isLongLived() { + return true; + } + + @Override + public State getState() { + return lock.execute(() -> state); + } + + @Override + public boolean awaitStart(Duration timeout) throws InterruptedException { + + Assert.notNull(timeout, "Timeout must not be null"); + Assert.isTrue(!timeout.isNegative(), "Timeout must not be negative"); + + return awaitStart.await(timeout.toNanos(), TimeUnit.NANOSECONDS); + } + + protected Message createMessage(T source, Class targetType, RequestOptions options) { + + SimpleMessage message = new SimpleMessage<>(source, source, MessageProperties.builder() + .databaseName(template.getDb().getName()).collectionName(options.getCollectionName()).build()); + + return new LazyMappingDelegatingMessage<>(message, targetType, template.getConverter()); + } + + private boolean isRunning() { + return State.RUNNING.equals(getState()); + } + + @SuppressWarnings("unchecked") + private void emitMessage(Message message) { + try { + request.getMessageListener().onMessage((Message) message); + } catch (Exception e) { + errorHandler.handleError(e); + } + } + + @Nullable + private T getNext() { + + return lock.execute(() -> { + if (State.RUNNING.equals(state)) { + return cursor.tryNext(); + } + throw new IllegalStateException(String.format("Cursor %s is not longer open", cursor)); + }); + } + + private static boolean isValidCursor(@Nullable MongoCursor cursor) { + + if (cursor == null) { + return false; + } + + return cursor.getServerCursor() != null && cursor.getServerCursor().getId() != 0; + } + + /** + * Execute an operation and take care of translating exceptions using the {@link MongoTemplate templates} + * {@link org.springframework.data.mongodb.core.MongoExceptionTranslator} rethrowing the potentially translated + * exception. + * + * @param callback must not be {@literal null}. + * @param + * @return can be {@literal null}. + * @throws RuntimeException The potentially translated exception. + */ + @Nullable + private V execute(Supplier callback) { + + try { + return callback.get(); + } catch (RuntimeException e) { + + RuntimeException translated = template.getExceptionTranslator().translateExceptionIfPossible(e); + throw translated != null ? translated : e; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java new file mode 100644 index 0000000000..546f3fdd33 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java @@ -0,0 +1,281 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.Executor; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.util.Lock; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ErrorHandler; +import org.springframework.util.ObjectUtils; + +/** + * Simple {@link Executor} based {@link MessageListenerContainer} implementation for running {@link Task tasks} like + * listening to MongoDB Change Streams and tailable + * cursors.
                    + * This message container creates long-running tasks that are executed on {@link Executor}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class DefaultMessageListenerContainer implements MessageListenerContainer { + + private final Executor taskExecutor; + private final TaskFactory taskFactory; + private final Optional errorHandler; + + private final Map subscriptions = new LinkedHashMap<>(); + + private final ReadWriteLock lifecycleMonitor = new ReentrantReadWriteLock(); + private final Lock lifecycleRead = Lock.of(lifecycleMonitor.readLock()); + private final Lock lifecycleWrite = Lock.of(lifecycleMonitor.writeLock()); + + private final ReadWriteLock subscriptionMonitor = new ReentrantReadWriteLock(); + private final Lock subscriptionRead = Lock.of(subscriptionMonitor.readLock()); + private final Lock subscriptionWrite = Lock.of(subscriptionMonitor.writeLock()); + + private boolean running = false; + + /** + * Create a new {@link DefaultMessageListenerContainer}. + * + * @param template must not be {@literal null}. + */ + public DefaultMessageListenerContainer(MongoTemplate template) { + this(template, new SimpleAsyncTaskExecutor()); + } + + /** + * Create a new {@link DefaultMessageListenerContainer} running {@link Task tasks} via the given + * {@literal taskExecutor}. + * + * @param template must not be {@literal null}. + * @param taskExecutor must not be {@literal null}. + */ + public DefaultMessageListenerContainer(MongoTemplate template, Executor taskExecutor) { + this(template, taskExecutor, null); + } + + /** + * Create a new {@link DefaultMessageListenerContainer} running {@link Task tasks} via the given + * {@literal taskExecutor} delegating {@link Exception errors} to the given {@link ErrorHandler}. + * + * @param template must not be {@literal null}. Used by the {@link TaskFactory}. + * @param taskExecutor must not be {@literal null}. + * @param errorHandler the default {@link ErrorHandler} to be used by tasks inside the container. Can be + * {@literal null}. + */ + public DefaultMessageListenerContainer(MongoTemplate template, Executor taskExecutor, + @Nullable ErrorHandler errorHandler) { + + Assert.notNull(template, "Template must not be null"); + Assert.notNull(taskExecutor, "TaskExecutor must not be null"); + + this.taskExecutor = taskExecutor; + this.taskFactory = new TaskFactory(template); + this.errorHandler = Optional.ofNullable(errorHandler); + } + + @Override + public boolean isAutoStartup() { + return false; + } + + @Override + public void stop(Runnable callback) { + + stop(); + callback.run(); + } + + @Override + public void start() { + + lifecycleWrite.executeWithoutResult(() -> { + if (!this.running) { + subscriptions.values().stream() // + .filter(it -> !it.isActive()) // + .filter(TaskSubscription.class::isInstance) // + .map(TaskSubscription.class::cast) // + .map(TaskSubscription::getTask) // + .forEach(taskExecutor::execute); + + running = true; + } + }); + } + + @Override + public void stop() { + lifecycleWrite.executeWithoutResult(() -> { + if (this.running) { + subscriptions.values().forEach(Cancelable::cancel); + running = false; + } + }); + } + + @Override + public boolean isRunning() { + return lifecycleRead.execute(() -> running); + } + + @Override + public int getPhase() { + return Integer.MAX_VALUE; + } + + @Override + public Subscription register(SubscriptionRequest request, + Class bodyType) { + + return register(request, bodyType, errorHandler.orElseGet( + () -> new DecoratingLoggingErrorHandler((exception) -> lookup(request).ifPresent(Subscription::cancel)))); + } + + @Override + public Subscription register(SubscriptionRequest request, + Class bodyType, ErrorHandler errorHandler) { + + return register(request, taskFactory.forRequest(request, bodyType, errorHandler)); + } + + @Override + public Optional lookup(SubscriptionRequest request) { + return subscriptionRead.execute(() -> Optional.ofNullable(subscriptions.get(request))); + } + + public Subscription register(SubscriptionRequest request, Task task) { + + return subscriptionWrite.execute(() -> { + if (subscriptions.containsKey(request)) { + return subscriptions.get(request); + } + + Subscription subscription = new TaskSubscription(task); + this.subscriptions.put(request, subscription); + + if (this.isRunning()) { + taskExecutor.execute(task); + } + return subscription; + }); + + } + + @Override + public void remove(Subscription subscription) { + subscriptionWrite.executeWithoutResult(() -> { + + if (subscriptions.containsValue(subscription)) { + + if (subscription.isActive()) { + subscription.cancel(); + } + + subscriptions.values().remove(subscription); + } + }); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class TaskSubscription implements Subscription { + + private final Task task; + + TaskSubscription(Task task) { + this.task = task; + } + + Task getTask() { + return task; + } + + @Override + public boolean isActive() { + return task.isActive(); + } + + @Override + public boolean await(Duration timeout) throws InterruptedException { + return task.awaitStart(timeout); + } + + @Override + public void cancel() throws DataAccessResourceFailureException { + task.cancel(); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + TaskSubscription that = (TaskSubscription) o; + + return ObjectUtils.nullSafeEquals(this.task, that.task); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(task); + } + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + private static class DecoratingLoggingErrorHandler implements ErrorHandler { + + private final Log logger = LogFactory.getLog(DecoratingLoggingErrorHandler.class); + + private final ErrorHandler delegate; + + DecoratingLoggingErrorHandler(ErrorHandler delegate) { + this.delegate = delegate; + } + + @Override + public void handleError(Throwable t) { + + if (logger.isErrorEnabled()) { + logger.error("Unexpected error occurred while listening to MongoDB", t); + } + + delegate.handleError(t); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java new file mode 100644 index 0000000000..1c934e8302 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import org.bson.Document; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class LazyMappingDelegatingMessage implements Message { + + private final Message delegate; + private final Class targetType; + private final MongoConverter converter; + + LazyMappingDelegatingMessage(Message delegate, Class targetType, MongoConverter converter) { + + this.delegate = delegate; + this.targetType = targetType; + this.converter = converter; + } + + @Override + public S getRaw() { + return delegate.getRaw(); + } + + @Override + public T getBody() { + + if (delegate.getBody() == null || targetType.equals(delegate.getBody().getClass())) { + return targetType.cast(delegate.getBody()); + } + + Object messageBody = delegate.getBody(); + + if (ClassUtils.isAssignable(Document.class, messageBody.getClass())) { + return converter.read(targetType, (Document) messageBody); + } + + if (converter.getConversionService().canConvert(messageBody.getClass(), targetType)) { + return converter.getConversionService().convert(messageBody, targetType); + } + + throw new IllegalArgumentException( + String.format("No converter found capable of converting %s to %s", messageBody.getClass(), targetType)); + } + + @Override + public MessageProperties getProperties() { + return delegate.getProperties(); + } + + public String toString() { + return "LazyMappingDelegatingMessage(delegate=" + this.delegate + ", targetType=" + this.targetType + ")"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java new file mode 100644 index 0000000000..46db068096 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java @@ -0,0 +1,199 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * General message abstraction for any type of Event / Message published by MongoDB server to the client. This might be + * Change Stream Events, or + * {@link org.bson.Document Documents} published by a + * tailable cursor. The original message received + * is preserved in the raw parameter. Additional information about the origin of the {@link Message} is contained in + * {@link MessageProperties}.
                    + * For convenience the {@link #getBody()} of the message gets lazily converted into the target domain type if necessary + * using the mapping infrastructure. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + * @see MessageProperties + * @since 2.1 + */ +public interface Message { + + /** + * The raw message source as emitted by the origin. + * + * @return can be {@literal null}. + */ + @Nullable + S getRaw(); + + /** + * The converted message body if available. + * + * @return can be {@literal null}. + */ + @Nullable + T getBody(); + + /** + * The converted message body before change if available. + * + * @return can be {@literal null}. + * @since 4.0 + */ + @Nullable + default T getBodyBeforeChange() { + return null; + } + + /** + * {@link MessageProperties} containing information about the {@link Message} origin and other metadata. + * + * @return never {@literal null}. + */ + MessageProperties getProperties(); + + /** + * @author Christoph Strobl + * @since 2.1 + */ + class MessageProperties { + + private static final MessageProperties EMPTY = new MessageProperties(); + + private @Nullable String databaseName; + private @Nullable String collectionName; + + /** + * The database name the message originates from. + * + * @return can be {@literal null}. + */ + @Nullable + public String getDatabaseName() { + return databaseName; + } + + /** + * The collection name the message originates from. + * + * @return can be {@literal null}. + */ + @Nullable + public String getCollectionName() { + return collectionName; + } + + /** + * @return empty {@link MessageProperties}. + */ + public static MessageProperties empty() { + return EMPTY; + } + + /** + * Obtain a shiny new {@link MessagePropertiesBuilder} and start defining options in this fancy fluent way. Just + * don't forget to call {@link MessagePropertiesBuilder#build() build()} when done. + * + * @return new instance of {@link MessagePropertiesBuilder}. + */ + public static MessagePropertiesBuilder builder() { + return new MessagePropertiesBuilder(); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MessageProperties that = (MessageProperties) o; + + if (!ObjectUtils.nullSafeEquals(this.databaseName, that.databaseName)) { + return false; + } + + return ObjectUtils.nullSafeEquals(this.collectionName, that.collectionName); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(databaseName); + result = 31 * result + ObjectUtils.nullSafeHashCode(collectionName); + return result; + } + + public String toString() { + return "Message.MessageProperties(databaseName=" + this.getDatabaseName() + ", collectionName=" + + this.getCollectionName() + ")"; + } + + /** + * Builder for {@link MessageProperties}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class MessagePropertiesBuilder { + + private @Nullable String databaseName; + private @Nullable String collectionName; + + /** + * @param dbName must not be {@literal null}. + * @return this. + */ + public MessagePropertiesBuilder databaseName(String dbName) { + + Assert.notNull(dbName, "Database name must not be null"); + + this.databaseName = dbName; + return this; + } + + /** + * @param collectionName must not be {@literal null}. + * @return this + */ + public MessagePropertiesBuilder collectionName(String collectionName) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + this.collectionName = collectionName; + return this; + } + + /** + * @return the built {@link MessageProperties}. + */ + public MessageProperties build() { + + MessageProperties properties = new MessageProperties(); + + properties.collectionName = collectionName; + properties.databaseName = databaseName; + + return properties; + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java new file mode 100644 index 0000000000..e23b6f39dc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java @@ -0,0 +1,35 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +/** + * Listener interface to receive delivery of {@link Message Messages}. + * + * @author Christoph Strobl + * @param source message type. + * @param target message type. + * @since 2.1 + */ +@FunctionalInterface +public interface MessageListener { + + /** + * Callback invoked on receiving {@link Message}. + * + * @param message never {@literal null}. + */ + void onMessage(Message message); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java new file mode 100644 index 0000000000..5d244fb171 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java @@ -0,0 +1,156 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.util.Optional; + +import org.springframework.context.SmartLifecycle; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.util.ErrorHandler; + +/** + * Internal abstraction used by the framework representing a message listener container. Not meant to + * be implemented externally. + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface MessageListenerContainer extends SmartLifecycle { + + /** + * Create a new {@link MessageListenerContainer} given {@link MongoTemplate}. + * + * @param template must not be {@literal null}. + * @return a new {@link MessageListenerContainer} using {@link MongoTemplate}. + */ + static MessageListenerContainer create(MongoTemplate template) { + return new DefaultMessageListenerContainer(template); + } + + /** + * Register a new {@link SubscriptionRequest} in the container. If the {@link MessageListenerContainer#isRunning() is + * already running} the {@link Subscription} will be added and run immediately, otherwise it'll be scheduled and + * started once the container is actually {@link MessageListenerContainer#start() started}. + * + *
                    +	 * 
                    +	 *     MessageListenerContainer container = ...
                    +	 *
                    +	 *     MessageListener<ChangeStreamDocument<Document>, Object> messageListener = (message) -> message....
                    +	 *     ChangeStreamRequest<Object> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name");
                    +	 *
                    +	 *     Subscription subscription = container.register(request);
                    +	 * 
                    +	 * 
                    + * + * Errors during {@link Message} retrieval lead to {@link Subscription#cancel() cannelation} of the underlying task. + * + * @param request must not be {@literal null}. + * @return never {@literal null}. + */ + default Subscription register(SubscriptionRequest request) { + return register(request, Object.class); + } + + /** + * Register a new {@link SubscriptionRequest} in the container. If the {@link MessageListenerContainer#isRunning() is + * already running} the {@link Subscription} will be added and run immediately, otherwise it'll be scheduled and + * started once the container is actually {@link MessageListenerContainer#start() started}. + * + *
                    +	 * 
                    +	 *     MessageListenerContainer container = ...
                    +	 *
                    +	 *     MessageListener<ChangeStreamDocument<Document>, Document> messageListener = (message) -> message.getBody().toJson();
                    +	 *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name");
                    +	 *
                    +	 *     Subscription subscription = container.register(request, Document.class);
                    +	 * 
                    +	 * 
                    + * + * On {@link MessageListenerContainer#stop()} all {@link Subscription subscriptions} are cancelled prior to shutting + * down the container itself. + *
                    + * Registering the very same {@link SubscriptionRequest} more than once simply returns the already existing + * {@link Subscription}. + *
                    + * Unless a {@link Subscription} is {@link #remove(Subscription) removed} form the container, the {@link Subscription} + * is restarted once the container itself is restarted. + *
                    + * Errors during {@link Message} retrieval lead to {@link Subscription#cancel() cannelation} of the underlying task. + * + * @param request must not be {@literal null}. + * @param bodyType the exact target or a more concrete type of the {@link Message#getBody()}. + * @return never {@literal null}. + */ + Subscription register(SubscriptionRequest request, Class bodyType); + + /** + * Register a new {@link SubscriptionRequest} in the container. If the {@link MessageListenerContainer#isRunning() is + * already running} the {@link Subscription} will be added and run immediately, otherwise it'll be scheduled and + * started once the container is actually {@link MessageListenerContainer#start() started}. + * + *
                    +	 * 
                    +	 *     MessageListenerContainer container = ...
                    +	 *
                    +	 *     MessageListener<ChangeStreamDocument<Document>, Document> messageListener = (message) -> message.getBody().toJson();
                    +	 *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name");
                    +	 *
                    +	 *     Subscription subscription = container.register(request, Document.class);
                    +	 * 
                    +	 * 
                    + * + * On {@link MessageListenerContainer#stop()} all {@link Subscription subscriptions} are cancelled prior to shutting + * down the container itself. + *
                    + * Registering the very same {@link SubscriptionRequest} more than once simply returns the already existing + * {@link Subscription}. + *
                    + * Unless a {@link Subscription} is {@link #remove(Subscription) removed} form the container, the {@link Subscription} + * is restarted once the container itself is restarted. + *
                    + * Errors during {@link Message} retrieval are delegated to the given {@link ErrorHandler}. + * + * @param request must not be {@literal null}. + * @param bodyType the exact target or a more concrete type of the {@link Message#getBody()}. Must not be {@literal null}. + * @param errorHandler the callback to invoke when retrieving the {@link Message} from the data source fails for some + * reason. + * @return never {@literal null}. + */ + Subscription register(SubscriptionRequest request, Class bodyType, + ErrorHandler errorHandler); + + /** + * Unregister a given {@link Subscription} from the container. This prevents the {@link Subscription} to be restarted + * in a potential {@link SmartLifecycle#stop() stop}/{@link SmartLifecycle#start() start} scenario.
                    + * An {@link Subscription#isActive() active} {@link Subscription subcription} is {@link Subscription#cancel() + * cancelled} prior to removal. + * + * @param subscription must not be {@literal null}. + */ + void remove(Subscription subscription); + + /** + * Lookup the given {@link SubscriptionRequest} within the container and return the associated {@link Subscription} if + * present. + * + * @param request must not be {@literal null}. + * @return {@link Optional#empty()} if not set. + */ + Optional lookup(SubscriptionRequest request); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java new file mode 100644 index 0000000000..be5308e3cf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java @@ -0,0 +1,93 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Trivial {@link Message} implementation. + * + * @author Christoph Strobl + * @since 2.1 + */ +class SimpleMessage implements Message { + + private @Nullable final S raw; + private @Nullable final T body; + private final MessageProperties properties; + + /** + * @param raw + * @param body + * @param properties must not be {@literal null}. Use {@link MessageProperties#empty()} instead. + */ + SimpleMessage(@Nullable S raw, @Nullable T body, MessageProperties properties) { + + Assert.notNull(properties, "Properties must not be null Use MessageProperties.empty() instead"); + + this.raw = raw; + this.body = body; + this.properties = properties; + } + + @Override + public S getRaw() { + return raw; + } + + @Override + public T getBody() { + return body; + } + + @Override + public MessageProperties getProperties() { + return properties; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + SimpleMessage that = (SimpleMessage) o; + + if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.body, that.body)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.properties, that.properties); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(raw); + result = 31 * result + ObjectUtils.nullSafeHashCode(body); + result = 31 * result + ObjectUtils.nullSafeHashCode(properties); + return result; + } + + public String toString() { + return "SimpleMessage(raw=" + this.getRaw() + ", body=" + this.getBody() + ", properties=" + this.getProperties() + + ")"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java new file mode 100644 index 0000000000..5e928fe49d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; + +/** + * The {@link Subscription} is the link between the {@link SubscriptionRequest} and the actual running {@link Task}. + *
                    + * Due to the asynchronous nature of the {@link Task} execution a {@link Subscription} might not immediately become + * active. {@link #isActive()} provides an answer if the underlying {@link Task} is already running. + *
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface Subscription extends Cancelable { + + /** + * @return {@literal true} if the subscription is currently executed. + */ + boolean isActive(); + + /** + * Synchronous, blocking call returns once the {@link Subscription} becomes {@link #isActive() + * active} or {@link Duration timeout} exceeds. + * + * @param timeout must not be {@literal null}. + * @return {@code true} if the subscription was activated. {@code false} if the waiting time elapsed before task was + * activated. + * @throws InterruptedException if the current thread is interrupted while waiting. + */ + boolean await(Duration timeout) throws InterruptedException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java new file mode 100644 index 0000000000..287ba293b6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java @@ -0,0 +1,158 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * The actual {@link SubscriptionRequest} sent to the {@link MessageListenerContainer}. This wrapper type allows passing + * in {@link RequestOptions additional information} to the Container which can be used for creating the actual + * {@link Task} running.
                    + * The {@link MessageListener} provides the callback interface when pushing {@link Message messages}. + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface SubscriptionRequest { + + /** + * Obtain the {@link MessageListener} to publish {@link Message messages} to. + * + * @return never {@literal null}. + */ + MessageListener getMessageListener(); + + /** + * Get the {@link RequestOptions} specifying the requests behaviour. + * + * @return never {@literal null}. + */ + O getRequestOptions(); + + /** + * Options for specifying the behaviour of the {@link SubscriptionRequest}. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface RequestOptions { + + /** + * Get the database name of the db. + * + * @return the name of the database to subscribe to. Can be {@literal null} in which case the default + * {@link MongoDatabaseFactory#getMongoDatabase() database} is used. + */ + @Nullable + default String getDatabaseName() { + return null; + } + + /** + * Get the collection name. + * + * @return the name of the collection to subscribe to. Can be {@literal null}. + */ + @Nullable + String getCollectionName(); + + /** + * Get the maximum wait time (the time till the next Document is emitted) to apply when reading from the collection. + * + * @return never {@literal null}. {@link Duration#ZERO} by default. + * @since 3.0 + */ + default Duration maxAwaitTime() { + return Duration.ZERO; + } + + /** + * Create empty options. + * + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions none() { + return () -> null; + } + + /** + * Create options with the provided database. + * + * @param database must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions justDatabase(String database) { + + Assert.notNull(database, "Database must not be null"); + + return new RequestOptions() { + + @Override + public String getCollectionName() { + return null; + } + + @Override + public String getDatabaseName() { + return database; + } + }; + } + + /** + * Create options with the provided collection. + * + * @param collection must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions justCollection(String collection) { + + Assert.notNull(collection, "Collection must not be null"); + return () -> collection; + } + + /** + * Create options with the provided database and collection. + * + * @param database must not be {@literal null}. + * @param collection must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions of(String database, String collection) { + + Assert.notNull(database, "Database must not be null"); + Assert.notNull(collection, "Collection must not be null"); + + return new RequestOptions() { + + @Override + public String getCollectionName() { + return collection; + } + + @Override + public String getDatabaseName() { + return database; + } + }; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java new file mode 100644 index 0000000000..c6caef12fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java @@ -0,0 +1,269 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.core.messaging.TailableCursorRequest.TailableCursorRequestOptions.TailableCursorRequestOptionsBuilder; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * {@link SubscriptionRequest} implementation to be used to listen to query results in a + * Capped Collection using a + * Tailable Cursor. + *
                    + * The most trivial use case is subscribing to all events of a specific {@link com.mongodb.client.MongoCollection + * collection}. + * + *
                    + * 
                    + *     TailableCursorRequest<Document> request = new TailableCursorRequest<>(System.out::println, () -> "collection-name");
                    + * 
                    + * 
                    + * + * {@link TailableCursorRequestBuilder} offers a fluent API for creating {@link TailableCursorRequest} with + * {@link TailableCursorRequestOptions} in one go. + * + *
                    + *   
                    + *       TailableCursorRequest<Document> request = TailableCursorRequest.builder()
                    + *           .collection("collection-name")
                    + *           .publishTo(System.out::println)
                    + *           .build();
                    + *   
                    + * 
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class TailableCursorRequest implements SubscriptionRequest { + + private final MessageListener messageListener; + private final TailableCursorRequestOptions options; + + /** + * Create a new {@link TailableCursorRequest} with options, passing {@link Message messages} to the given + * {@link MessageListener}. + * + * @param messageListener must not be {@literal null}. + * @param options must not be {@literal null}. + */ + public TailableCursorRequest(MessageListener messageListener, RequestOptions options) { + + Assert.notNull(messageListener, "MessageListener must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.messageListener = messageListener; + this.options = options instanceof TailableCursorRequestOptions tailableCursorRequestOptions ? + tailableCursorRequestOptions : TailableCursorRequestOptions.of(options); + } + + @Override + public MessageListener getMessageListener() { + return messageListener; + } + + @Override + public TailableCursorRequestOptions getRequestOptions() { + return options; + } + + /** + * Obtain a shiny new {@link TailableCursorRequestBuilder} and start defining options in this fancy fluent way. Just + * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when done. + * + * @return new instance of {@link TailableCursorRequestBuilder}. + */ + public static TailableCursorRequestBuilder builder() { + return new TailableCursorRequestBuilder(); + } + + /** + * Obtain a shiny new {@link TailableCursorRequestBuilder} and start defining options in this fancy fluent way. Just + * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when done. + * + * @return new instance of {@link TailableCursorRequestBuilder}. + */ + public static TailableCursorRequestBuilder builder(MessageListener listener) { + + TailableCursorRequestBuilder builder = new TailableCursorRequestBuilder<>(); + return builder.publishTo(listener); + } + + /** + * {@link SubscriptionRequest.RequestOptions} implementation specific to a {@link TailableCursorRequest}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class TailableCursorRequestOptions implements SubscriptionRequest.RequestOptions { + + private @Nullable String collectionName; + private @Nullable Query query; + + TailableCursorRequestOptions() {} + + public static TailableCursorRequestOptions of(RequestOptions options) { + return builder().collection(options.getCollectionName()).build(); + } + + /** + * Obtain a shiny new {@link TailableCursorRequestOptionsBuilder} and start defining options in this fancy fluent + * way. Just don't forget to call {@link TailableCursorRequestOptionsBuilder#build() build()} when done. + * + * @return new instance of {@link TailableCursorRequestOptionsBuilder}. + */ + public static TailableCursorRequestOptionsBuilder builder() { + return new TailableCursorRequestOptionsBuilder(); + } + + @Override + public String getCollectionName() { + return collectionName; + } + + public Optional getQuery() { + return Optional.ofNullable(query); + } + + /** + * Builder for creating {@link TailableCursorRequestOptions}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class TailableCursorRequestOptionsBuilder { + + private @Nullable String collectionName; + private @Nullable Query query; + + private TailableCursorRequestOptionsBuilder() {} + + /** + * Set the collection name to tail. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return this. + */ + public TailableCursorRequestOptionsBuilder collection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + this.collectionName = collection; + return this; + } + + /** + * Set the filter to apply. + * + * @param filter the {@link Query } to apply for filtering events. Must not be {@literal null}. + * @return this. + */ + public TailableCursorRequestOptionsBuilder filter(Query filter) { + + Assert.notNull(filter, "Filter must not be null"); + + this.query = filter; + return this; + } + + /** + * @return the built {@link TailableCursorRequestOptions}. + */ + public TailableCursorRequestOptions build() { + + TailableCursorRequestOptions options = new TailableCursorRequestOptions(); + + options.collectionName = collectionName; + options.query = query; + + return options; + } + } + } + + /** + * Builder for creating {@link TailableCursorRequest}. + * + * @author Mark Paluch + * @since 2.1 + * @see TailableCursorRequestOptions + */ + public static class TailableCursorRequestBuilder { + + private @Nullable MessageListener listener; + private TailableCursorRequestOptionsBuilder delegate = TailableCursorRequestOptions.builder(); + + private TailableCursorRequestBuilder() {} + + /** + * Set the name of the {@link com.mongodb.client.MongoCollection} to listen to. + * + * @param collectionName must not be {@literal null} nor empty. + * @return this. + */ + public TailableCursorRequestBuilder collection(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null"); + + delegate.collection(collectionName); + return this; + } + + /** + * Set the {@link MessageListener} event {@link Message messages} will be published to. + * + * @param messageListener must not be {@literal null}. + * @return this. + */ + public TailableCursorRequestBuilder publishTo(MessageListener messageListener) { + + Assert.notNull(messageListener, "MessageListener must not be null"); + + this.listener = messageListener; + return this; + } + + /** + * Set the filter to apply. + * + * @param filter the {@link Query } to apply for filtering events. Must not be {@literal null}. + * @return this. + */ + public TailableCursorRequestBuilder filter(Query filter) { + + Assert.notNull(filter, "Filter must not be null"); + + delegate.filter(filter); + return this; + } + + /** + * @return the build {@link ChangeStreamRequest}. + */ + public TailableCursorRequest build() { + + Assert.notNull(listener, "MessageListener must not be null"); + + return new TailableCursorRequest<>(listener, delegate.build()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java new file mode 100644 index 0000000000..43bd7bd55d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.util.ErrorHandler; + +import com.mongodb.CursorType; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.Collation; + +/** + * @author Christoph Strobl + * @since 2.1 + */ +class TailableCursorTask extends CursorReadingTask { + + private QueryMapper queryMapper; + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public TailableCursorTask(MongoTemplate template, TailableCursorRequest request, Class targetType, + ErrorHandler errorHandler) { + super(template, (TailableCursorRequest) request, (Class) targetType, errorHandler); + queryMapper = new QueryMapper(template.getConverter()); + } + + @Override + protected MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType) { + + Document filter = new Document(); + Collation collation = null; + + if (options instanceof TailableCursorRequest.TailableCursorRequestOptions requestOptions) { + + if (requestOptions.getQuery().isPresent()) { + + Query query = requestOptions.getQuery().get(); + + filter.putAll(queryMapper.getMappedObject(query.getQueryObject(), template.getConverter().getMappingContext() + .getPersistentEntity(targetType.equals(Document.class) ? Object.class : targetType))); + + collation = query.getCollation().map(org.springframework.data.mongodb.core.query.Collation::toMongoCollation) + .orElse(null); + } + } + + FindIterable iterable = template.getCollection(options.getCollectionName()).find(filter) + .cursorType(CursorType.TailableAwait).noCursorTimeout(true); + + if (collation != null) { + iterable = iterable.collation(collation); + } + + if (!options.maxAwaitTime().isZero()) { + iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS); + } + + return iterable.iterator(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java new file mode 100644 index 0000000000..e8b9c018b1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; + +import org.springframework.scheduling.SchedulingAwareRunnable; + +/** + * The actual {@link Task} to run within the {@link MessageListenerContainer}. + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface Task extends SchedulingAwareRunnable, Cancelable { + + /** + * @return {@literal true} if the task is currently {@link State#RUNNING running}. + */ + default boolean isActive() { + return State.RUNNING.equals(getState()); + } + + /** + * Get the current lifecycle phase. + * + * @return never {@literal null}. + */ + State getState(); + + /** + * Synchronous, blocking call that awaits until this {@link Task} becomes active. + * + * @param timeout must not be {@literal null}. + * @return {@code true} if the task was started. {@code false} if the waiting time elapsed before task was started. + * @throws InterruptedException if the current thread is interrupted while waiting. + */ + boolean awaitStart(Duration timeout) throws InterruptedException; + + /** + * The {@link Task.State} defining the lifecycle phase the actual {@link Task}. + * + * @author Christoph Strobl + * @since 2.1 + */ + enum State { + CREATED, STARTING, RUNNING, CANCELLED + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java new file mode 100644 index 0000000000..0bfaa1c574 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.util.Assert; +import org.springframework.util.ErrorHandler; + +/** + * A simple factory for creating {@link Task} for a given {@link SubscriptionRequest}. + * + * @author Christoph Strobl + * @since 2.1 + */ +class TaskFactory { + + private final MongoTemplate template; + + /** + * @param template must not be {@literal null}. + */ + TaskFactory(MongoTemplate template) { + + Assert.notNull(template, "Template must not be null"); + + this.template = template; + } + + /** + * Create a {@link Task} for the given {@link SubscriptionRequest}. + * + * @param request must not be {@literal null}. + * @param targetType must not be {@literal null}. + * @param errorHandler must not be {@literal null}. + * @return must not be {@literal null}. Consider {@code Object.class}. + * @throws IllegalArgumentException in case the {@link SubscriptionRequest} is unknown. + */ + Task forRequest(SubscriptionRequest request, Class targetType, + ErrorHandler errorHandler) { + + Assert.notNull(request, "Request must not be null"); + Assert.notNull(targetType, "TargetType must not be null"); + + if (request instanceof ChangeStreamRequest changeStreamRequest) { + return new ChangeStreamTask(template, changeStreamRequest, targetType, errorHandler); + } else if (request instanceof TailableCursorRequest tailableCursorRequest) { + return new TailableCursorTask(template, tailableCursorRequest, targetType, errorHandler); + } + + throw new IllegalArgumentException( + "oh wow - seems you're using some fancy new feature we do not support; Please be so kind and leave us a note in the issue tracker so we can get this fixed\nThank you"); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/package-info.java new file mode 100644 index 0000000000..35be8f2ef8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/package-info.java @@ -0,0 +1,6 @@ +/** + * MongoDB specific messaging support for listening to eg. + * Change Streams. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.messaging; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/package-info.java index a4c52a66de..e2f9169d0d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/package-info.java @@ -1,5 +1,6 @@ /** * MongoDB core support. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java index a871162fc2..8b1620b320 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,131 +17,172 @@ import static org.springframework.util.ObjectUtils.*; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * Custom {@link Query} implementation to setup a basic query from some arbitrary JSON query string. - * + * * @author Thomas Risberg * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont * @author John Willemin + * @author Mark Paluch */ public class BasicQuery extends Query { - private final DBObject queryObject; - private DBObject fieldsObject; - private DBObject sortObject; + private final Document queryObject; + + private Document fieldsObject; + private Document sortObject; - public BasicQuery(String query) { - this((DBObject) JSON.parse(query)); + /** + * Create a new {@link BasicQuery} given a JSON {@code query}. + * + * @param query may be {@literal null}. + */ + public BasicQuery(@Nullable String query) { + this(query, null); } - public BasicQuery(DBObject queryObject) { - this(queryObject, null); + /** + * Create a new {@link BasicQuery} given a query {@link Document}. + * + * @param queryObject must not be {@literal null}. + */ + public BasicQuery(Document queryObject) { + this(queryObject, new Document()); } - public BasicQuery(String query, String fields) { - this.queryObject = (DBObject) JSON.parse(query); - this.fieldsObject = (DBObject) JSON.parse(fields); + /** + * Create a new {@link BasicQuery} given a JSON {@code query} and {@code fields}. + * + * @param query may be {@literal null}. + * @param fields may be {@literal null}. + */ + public BasicQuery(@Nullable String query, @Nullable String fields) { + + this(query != null ? Document.parse(query) : new Document(), + fields != null ? Document.parse(fields) : new Document()); } - public BasicQuery(DBObject queryObject, DBObject fieldsObject) { + /** + * Create a new {@link BasicQuery} given a query {@link Document} and field specification {@link Document}. + * + * @param queryObject must not be {@literal null}. + * @param fieldsObject must not be {@literal null}. + * @throws IllegalArgumentException when {@code queryObject} or {@code fieldsObject} is {@literal null}. + */ + public BasicQuery(Document queryObject, Document fieldsObject) { + + Assert.notNull(queryObject, "Query document must not be null"); + Assert.notNull(fieldsObject, "Field document must not be null"); + this.queryObject = queryObject; this.fieldsObject = fieldsObject; + this.sortObject = new Document(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#addCriteria(org.springframework.data.mongodb.core.query.CriteriaDefinition) + /** + * Create a BasicQuery given a {@link Query}. The resulting query is a copy of {@link Query}. + * + * @param query the query to copy. + * @since 4.4 */ + public BasicQuery(Query query) { + + super(query); + this.queryObject = query.getQueryObject(); + this.setFieldsObject(query.getFieldsObject()); + this.setSortObject(query.getSortObject()); + this.setMeta(query.getMeta()); + } + @Override public Query addCriteria(CriteriaDefinition criteria) { + this.queryObject.putAll(criteria.getCriteriaObject()); + return this; } @Override - public DBObject getQueryObject() { + public Document getQueryObject() { return this.queryObject; } @Override - public DBObject getFieldsObject() { + public Document getFieldsObject() { - if (fieldsObject == null) { - return super.getFieldsObject(); - } - - if (super.getFieldsObject() != null) { - - DBObject combinedFieldsObject = new BasicDBObject(); - combinedFieldsObject.putAll(fieldsObject); - combinedFieldsObject.putAll(super.getFieldsObject()); - return combinedFieldsObject; - } - - return fieldsObject; + Document combinedFieldsObject = new Document(); + combinedFieldsObject.putAll(fieldsObject); + combinedFieldsObject.putAll(super.getFieldsObject()); + return combinedFieldsObject; } @Override - public DBObject getSortObject() { + public Document getSortObject() { - BasicDBObject result = new BasicDBObject(); - if (sortObject != null) { - result.putAll(sortObject); - } + Document result = new Document(); + result.putAll(sortObject); - DBObject overrides = super.getSortObject(); - if (overrides != null) { - result.putAll(overrides); - } + Document overrides = super.getSortObject(); + result.putAll(overrides); return result; } - public void setSortObject(DBObject sortObject) { + /** + * Set the sort {@link Document}. + * + * @param sortObject must not be {@literal null}. + * @throws IllegalArgumentException when {@code sortObject} is {@literal null}. + */ + public void setSortObject(Document sortObject) { + + Assert.notNull(sortObject, "Sort document must not be null"); + this.sortObject = sortObject; } + @Override + public boolean isSorted() { + return super.isSorted() || !sortObject.isEmpty(); + } + /** + * Set the fields (projection) {@link Document}. + * + * @param fieldsObject must not be {@literal null}. + * @throws IllegalArgumentException when {@code fieldsObject} is {@literal null}. * @since 1.6 - * @param fieldsObject */ - protected void setFieldsObject(DBObject fieldsObject) { + public void setFieldsObject(Document fieldsObject) { + + Assert.notNull(fieldsObject, "Field document must not be null"); + this.fieldsObject = fieldsObject; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#equals(java.lang.Object) - */ @Override - public boolean equals(Object o) { + public boolean equals(@Nullable Object o) { if (this == o) { return true; } - if (!(o instanceof BasicQuery)) { + if (!(o instanceof BasicQuery that)) { return false; } - BasicQuery that = (BasicQuery) o; - return querySettingsEquals(that) && // nullSafeEquals(fieldsObject, that.fieldsObject) && // nullSafeEquals(queryObject, that.queryObject) && // nullSafeEquals(sortObject, that.sortObject); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#hashCode() - */ @Override public int hashCode() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java index 883eccc5c8..12843ce622 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,94 +15,140 @@ */ package org.springframework.data.mongodb.core.query; -import java.util.Arrays; +import java.util.ArrayList; import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * {@link Document}-based {@link Update} variant. + * + * @author Thomas Risberg + * @author John Brisbin + * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + */ public class BasicUpdate extends Update { - private DBObject updateObject = null; + private final Document updateObject; public BasicUpdate(String updateString) { - super(); - this.updateObject = (DBObject) JSON.parse(updateString); + this(Document.parse(updateString)); } - public BasicUpdate(DBObject updateObject) { - super(); + public BasicUpdate(Document updateObject) { this.updateObject = updateObject; } @Override - public Update set(String key, Object value) { - updateObject.put("$set", Collections.singletonMap(key, value)); + public Update set(String key, @Nullable Object value) { + setOperationValue("$set", key, value); return this; } @Override public Update unset(String key) { - updateObject.put("$unset", Collections.singletonMap(key, 1)); + setOperationValue("$unset", key, 1); return this; } @Override public Update inc(String key, Number inc) { - updateObject.put("$inc", Collections.singletonMap(key, inc)); - return this; - } - - @Override - public Update push(String key, Object value) { - updateObject.put("$push", Collections.singletonMap(key, value)); + setOperationValue("$inc", key, inc); return this; } @Override - public Update pushAll(String key, Object[] values) { - DBObject keyValue = new BasicDBObject(); - keyValue.put(key, values); - updateObject.put("$pushAll", keyValue); + public Update push(String key, @Nullable Object value) { + setOperationValue("$push", key, value); return this; } @Override - public Update addToSet(String key, Object value) { - updateObject.put("$addToSet", Collections.singletonMap(key, value)); + public Update addToSet(String key, @Nullable Object value) { + setOperationValue("$addToSet", key, value); return this; } @Override public Update pop(String key, Position pos) { - updateObject.put("$pop", Collections.singletonMap(key, (pos == Position.FIRST ? -1 : 1))); + setOperationValue("$pop", key, (pos == Position.FIRST ? -1 : 1)); return this; } @Override - public Update pull(String key, Object value) { - updateObject.put("$pull", Collections.singletonMap(key, value)); + public Update pull(String key, @Nullable Object value) { + setOperationValue("$pull", key, value); return this; } @Override public Update pullAll(String key, Object[] values) { - DBObject keyValue = new BasicDBObject(); - keyValue.put(key, Arrays.copyOf(values, values.length)); - updateObject.put("$pullAll", keyValue); + setOperationValue("$pullAll", key, List.of(values), (o, o2) -> { + + if (o instanceof List prev && o2 instanceof List currentValue) { + List merged = new ArrayList<>(prev.size() + currentValue.size()); + merged.addAll(prev); + merged.addAll(currentValue); + return merged; + } + + return o2; + }); return this; } @Override public Update rename(String oldName, String newName) { - updateObject.put("$rename", Collections.singletonMap(oldName, newName)); + setOperationValue("$rename", oldName, newName); return this; } @Override - public DBObject getUpdateObject() { + public boolean modifies(String key) { + return super.modifies(key) || Update.fromDocument(getUpdateObject()).modifies(key); + } + + @Override + public Document getUpdateObject() { return updateObject; } + void setOperationValue(String operator, String key, @Nullable Object value) { + setOperationValue(operator, key, value, (o, o2) -> o2); + } + + void setOperationValue(String operator, String key, @Nullable Object value, + BiFunction mergeFunction) { + + if (!updateObject.containsKey(operator)) { + updateObject.put(operator, Collections.singletonMap(key, value)); + } else { + Object o = updateObject.get(operator); + if (o instanceof Map existing) { + Map target = new LinkedHashMap<>(existing); + + if (target.containsKey(key)) { + target.put(key, mergeFunction.apply(target.get(key), value)); + } else { + target.put(key, value); + } + updateObject.put(operator, target); + } else { + throw new IllegalStateException( + "Cannot add ['%s' : { '%s' : ... }]. Operator already exists with value of type [%s] which is not suitable for appending" + .formatted(operator, key, + o != null ? ClassUtils.getShortName(o.getClass()) : "null")); + } + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java new file mode 100644 index 0000000000..de24c0511d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java @@ -0,0 +1,897 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import java.util.Locale; +import java.util.Optional; + +import org.bson.Document; +import org.springframework.core.convert.converter.Converter; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.Collation.Builder; +import com.mongodb.client.model.CollationAlternate; +import com.mongodb.client.model.CollationCaseFirst; +import com.mongodb.client.model.CollationMaxVariable; +import com.mongodb.client.model.CollationStrength; + +/** + * Central abstraction for MongoDB collation support.
                    + * Allows fluent creation of a collation {@link Document} that can be used for creating collections & indexes as well as + * querying data. + *
                    + * NOTE: Please keep in mind that queries will only make use of an index with collation settings if the + * query itself specifies the same collation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Jens Schauder + * @see MongoDB Reference - Collation + * @since 2.0 + */ +public class Collation { + + private static final Collation SIMPLE = of("simple"); + + private final CollationLocale locale; + + private Optional strength = Optional.empty(); + private Optional numericOrdering = Optional.empty(); + private Optional alternate = Optional.empty(); + private Optional backwards = Optional.empty(); + private Optional normalization = Optional.empty(); + private Optional version = Optional.empty(); + + private Collation(CollationLocale locale) { + + Assert.notNull(locale, "ICULocale must not be null"); + this.locale = locale; + } + + /** + * Create a {@link Collation} using {@literal simple} binary comparison. + * + * @return a {@link Collation} for {@literal simple} binary comparison. + */ + public static Collation simple() { + return SIMPLE; + } + + /** + * Create new {@link Collation} with locale set to {{@link java.util.Locale#getLanguage()}} and + * {@link java.util.Locale#getVariant()}. + * + * @param locale must not be {@literal null}. + * @return new instance of {@link Collation}. + */ + public static Collation of(Locale locale) { + + Assert.notNull(locale, "Locale must not be null"); + + String format; + + if (StringUtils.hasText(locale.getCountry())) { + format = String.format("%s_%s", locale.getLanguage(), locale.getCountry()); + } else { + format = locale.getLanguage(); + } + + return of(CollationLocale.of(format).variant(locale.getVariant())); + } + + /** + * Create new {@link Collation} with locale set to the given ICU language. + * + * @param language must not be {@literal null}. + * @return new instance of {@link Collation}. + */ + public static Collation of(String language) { + return of(CollationLocale.of(language)); + } + + /** + * Create new {@link Collation} with locale set to the given {@link CollationLocale}. + * + * @param locale must not be {@literal null}. + * @return new instance of {@link Collation}. + */ + public static Collation of(CollationLocale locale) { + return new Collation(locale); + } + + /** + * Parse the given {@code collation} string into a {@link Collation}. + * + * @param collation the collation to parse. Can be a simple string like {@code en_US} or a + * {@link Document#parse(String) parsable} document like { 'locale' : '?0' } . + * @return never {@literal null}. + * @throws IllegalArgumentException if {@literal collation} is null. + * @since 2.2 + */ + public static Collation parse(String collation) { + + Assert.notNull(collation, "Collation must not be null"); + + return collation.stripLeading().startsWith("{") ? from(Document.parse(collation)) + : of(collation); + } + + /** + * Create new {@link Collation} from values in {@link Document}. + * + * @param source must not be {@literal null}. + * @return new instance of {@link Collation}. + * @see MongoDB Reference - + * Collation Document + */ + public static Collation from(Document source) { + + Assert.notNull(source, "Source must not be null"); + + Collation collation = Collation.of(source.getString("locale")); + if (source.containsKey("strength")) { + collation = collation.strength(source.getInteger("strength")); + } + if (source.containsKey("caseLevel")) { + collation = collation.caseLevel(source.getBoolean("caseLevel")); + } + if (source.containsKey("caseFirst")) { + collation = collation.caseFirst(source.getString("caseFirst")); + } + if (source.containsKey("numericOrdering")) { + collation = collation.numericOrdering(source.getBoolean("numericOrdering")); + } + if (source.containsKey("alternate")) { + collation = collation.alternate(source.getString("alternate")); + } + if (source.containsKey("maxVariable")) { + collation = collation.maxVariable(source.getString("maxVariable")); + } + if (source.containsKey("backwards")) { + collation = collation.backwards(source.getBoolean("backwards")); + } + if (source.containsKey("normalization")) { + collation = collation.normalization(source.getBoolean("normalization")); + } + if (source.containsKey("version")) { + collation.version = Optional.of(source.get("version").toString()); + } + return collation; + } + + /** + * Set the level of comparison to perform. + * + * @param strength comparison level. + * @return new {@link Collation}. + */ + public Collation strength(int strength) { + + ComparisonLevel current = this.strength.orElseGet(() -> new ICUComparisonLevel(strength)); + return strength(new ICUComparisonLevel(strength, current.getCaseFirst(), current.getCaseLevel())); + } + + /** + * Set the level of comparison to perform. + * + * @param comparisonLevel must not be {@literal null}. + * @return new {@link Collation} + */ + public Collation strength(ComparisonLevel comparisonLevel) { + + Collation newInstance = copy(); + newInstance.strength = Optional.of(comparisonLevel); + return newInstance; + } + + /** + * Set whether to include {@code caseLevel} comparison.
                    + * + * @param caseLevel use {@literal true} to enable {@code caseLevel} comparison. + * @return new {@link Collation}. + */ + public Collation caseLevel(boolean caseLevel) { + + ComparisonLevel strengthValue = strength.orElseGet(ComparisonLevel::primary); + return strength( + new ICUComparisonLevel(strengthValue.getLevel(), strengthValue.getCaseFirst(), Optional.of(caseLevel))); + } + + /** + * Set the flag that determines sort order of case differences during tertiary level comparisons. + * + * @param caseFirst must not be {@literal null}. + * @return new instance of {@link Collation}. + */ + public Collation caseFirst(String caseFirst) { + return caseFirst(new CaseFirst(caseFirst)); + } + + /** + * Set the flag that determines sort order of case differences during tertiary level comparisons. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link Collation}. + */ + public Collation caseFirst(CaseFirst sort) { + + ComparisonLevel strengthValue = strength.orElseGet(ComparisonLevel::tertiary); + return strength(new ICUComparisonLevel(strengthValue.getLevel(), Optional.of(sort), strengthValue.getCaseLevel())); + } + + /** + * Treat numeric strings as numbers for comparison. + * + * @return new {@link Collation}. + */ + public Collation numericOrderingEnabled() { + return numericOrdering(true); + } + + /** + * Treat numeric strings as string for comparison. + * + * @return new {@link Collation}. + */ + public Collation numericOrderingDisabled() { + return numericOrdering(false); + } + + /** + * Set the flag that determines whether to compare numeric strings as numbers or as strings. + * + * @return new {@link Collation}. + */ + public Collation numericOrdering(boolean flag) { + + Collation newInstance = copy(); + newInstance.numericOrdering = Optional.of(flag); + return newInstance; + } + + /** + * Set the Field that determines whether collation should consider whitespace and punctuation as base characters for + * purposes of comparison. + * + * @param alternate must not be {@literal null}. + * @return new {@link Collation}. + */ + public Collation alternate(String alternate) { + + Alternate instance = this.alternate.orElseGet(() -> new Alternate(alternate, Optional.empty())); + return alternate(new Alternate(alternate, instance.maxVariable)); + } + + /** + * Set the Field that determines whether collation should consider whitespace and punctuation as base characters for + * purposes of comparison. + * + * @param alternate must not be {@literal null}. + * @return new {@link Collation}. + */ + public Collation alternate(Alternate alternate) { + + Collation newInstance = copy(); + newInstance.alternate = Optional.of(alternate); + return newInstance; + } + + /** + * Sort string with diacritics sort from back of the string. + * + * @return new {@link Collation}. + */ + public Collation backwardDiacriticSort() { + return backwards(true); + } + + /** + * Do not sort string with diacritics sort from back of the string. + * + * @return new {@link Collation}. + */ + public Collation forwardDiacriticSort() { + return backwards(false); + } + + /** + * Set the flag that determines whether strings with diacritics sort from back of the string. + * + * @param backwards must not be {@literal null}. + * @return new {@link Collation}. + */ + public Collation backwards(boolean backwards) { + + Collation newInstance = copy(); + newInstance.backwards = Optional.of(backwards); + return newInstance; + } + + /** + * Enable text normalization. + * + * @return new {@link Collation}. + */ + public Collation normalizationEnabled() { + return normalization(true); + } + + /** + * Disable text normalization. + * + * @return new {@link Collation}. + */ + public Collation normalizationDisabled() { + return normalization(false); + } + + /** + * Set the flag that determines whether to check if text require normalization and to perform normalization. + * + * @param normalization must not be {@literal null}. + * @return new {@link Collation}. + */ + public Collation normalization(boolean normalization) { + + Collation newInstance = copy(); + newInstance.normalization = Optional.of(normalization); + return newInstance; + } + + /** + * Set the field that determines up to which characters are considered ignorable when alternate is {@code shifted}. + * + * @param maxVariable must not be {@literal null}. + * @return new {@link Collation}. + */ + public Collation maxVariable(String maxVariable) { + + Alternate alternateValue = alternate.orElseGet(Alternate::shifted); + return alternate(new AlternateWithMaxVariable(alternateValue.alternate, maxVariable)); + } + + /** + * Get the {@link Document} representation of the {@link Collation}. + * + * @return the native MongoDB {@link Document} representation of the {@link Collation}. + */ + public Document toDocument() { + return map(toMongoDocumentConverter()); + } + + /** + * Get the {@link com.mongodb.client.model.Collation} representation of the {@link Collation}. + * + * @return he native MongoDB representation of the {@link Collation}. + */ + public com.mongodb.client.model.Collation toMongoCollation() { + return map(toMongoCollationConverter()); + } + + /** + * Transform {@code this} {@link Collation} by applying a {@link Converter}. + * + * @param mapper must not be {@literal null}. + * @param + * @return the converted result. + */ + public R map(Converter mapper) { + return mapper.convert(this); + } + + @Override + public String toString() { + return toDocument().toJson(); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + Collation that = (Collation) o; + return this.toDocument().equals(that.toDocument()); + } + + @Override + public int hashCode() { + return toDocument().hashCode(); + } + + private Collation copy() { + + Collation collation = new Collation(locale); + collation.strength = this.strength; + collation.normalization = this.normalization; + collation.numericOrdering = this.numericOrdering; + collation.alternate = this.alternate; + collation.backwards = this.backwards; + return collation; + } + + /** + * Abstraction for the ICU Comparison Levels. + * + * @since 2.0 + */ + public interface ComparisonLevel { + + /** + * Primary level of comparison. Collation performs comparisons of the base characters only, ignoring other + * differences such as diacritics and case.
                    + * The {@code caseLevel} can be set via {@link PrimaryICUComparisonLevel#includeCase()} and + * {@link PrimaryICUComparisonLevel#excludeCase()}. + * + * @return new {@link SecondaryICUComparisonLevel}. + */ + static PrimaryICUComparisonLevel primary() { + return PrimaryICUComparisonLevel.DEFAULT; + } + + /** + * Secondary level of comparison. Collation performs comparisons up to secondary differences, such as + * diacritics.
                    + * The {@code caseLevel} can be set via {@link SecondaryICUComparisonLevel#includeCase()} and + * {@link SecondaryICUComparisonLevel#excludeCase()}. + * + * @return new {@link SecondaryICUComparisonLevel}. + */ + static SecondaryICUComparisonLevel secondary() { + return SecondaryICUComparisonLevel.DEFAULT; + } + + /** + * Tertiary level of comparison. Collation performs comparisons up to tertiary differences, such as case and letter + * variants.
                    + * The {@code caseLevel} cannot be set for {@link ICUComparisonLevel} above {@code secondary}. + * + * @return new {@link ICUComparisonLevel}. + */ + static TertiaryICUComparisonLevel tertiary() { + return TertiaryICUComparisonLevel.DEFAULT; + } + + /** + * Quaternary Level. Limited for specific use case to consider punctuation.
                    + * The {@code caseLevel} cannot be set for {@link ICUComparisonLevel} above {@code secondary}. + * + * @return new {@link ComparisonLevel}. + */ + static ComparisonLevel quaternary() { + return ComparisonLevels.QUATERNARY; + } + + /** + * Identical Level. Limited for specific use case of tie breaker.
                    + * The {@code caseLevel} cannot be set for {@link ICUComparisonLevel} above {@code secondary}. + * + * @return new {@link ComparisonLevel}. + */ + static ComparisonLevel identical() { + return ComparisonLevels.IDENTICAL; + } + + /** + * @return collation strength, {@literal 1} for primary, {@literal 2} for secondary and so on. + */ + int getLevel(); + + default Optional getCaseFirst() { + return Optional.empty(); + } + + default Optional getCaseLevel() { + return Optional.empty(); + } + } + + /** + * Abstraction for the ICU Comparison Levels. + * + * @since 2.0 + */ + static class ICUComparisonLevel implements ComparisonLevel { + + private final int level; + private final Optional caseFirst; + private final Optional caseLevel; + + ICUComparisonLevel(int level) { + this(level, Optional.empty(), Optional.empty()); + } + + ICUComparisonLevel(int level, Optional caseFirst, Optional caseLevel) { + this.level = level; + this.caseFirst = caseFirst; + this.caseLevel = caseLevel; + } + + public int getLevel() { + return this.level; + } + + public Optional getCaseFirst() { + return this.caseFirst; + } + + public Optional getCaseLevel() { + return this.caseLevel; + } + } + + /** + * Simple comparison levels. + */ + enum ComparisonLevels implements ComparisonLevel { + + QUATERNARY(4), IDENTICAL(5); + + private final int level; + + ComparisonLevels(int level) { + this.level = level; + } + + @Override + public int getLevel() { + return level; + } + } + + /** + * Primary-strength {@link ICUComparisonLevel}. + */ + public static class PrimaryICUComparisonLevel extends ICUComparisonLevel { + + static final PrimaryICUComparisonLevel DEFAULT = new PrimaryICUComparisonLevel(); + static final PrimaryICUComparisonLevel WITH_CASE_LEVEL = new PrimaryICUComparisonLevel(true); + static final PrimaryICUComparisonLevel WITHOUT_CASE_LEVEL = new PrimaryICUComparisonLevel(false); + + private PrimaryICUComparisonLevel() { + super(1); + } + + private PrimaryICUComparisonLevel(boolean caseLevel) { + super(1, Optional.empty(), Optional.of(caseLevel)); + } + + /** + * Include case comparison. + * + * @return new {@link ICUComparisonLevel} + */ + public ComparisonLevel includeCase() { + return WITH_CASE_LEVEL; + } + + /** + * Exclude case comparison. + * + * @return new {@link ICUComparisonLevel} + */ + public ComparisonLevel excludeCase() { + return WITHOUT_CASE_LEVEL; + } + } + + /** + * Secondary-strength {@link ICUComparisonLevel}. + */ + public static class SecondaryICUComparisonLevel extends ICUComparisonLevel { + + static final SecondaryICUComparisonLevel DEFAULT = new SecondaryICUComparisonLevel(); + static final SecondaryICUComparisonLevel WITH_CASE_LEVEL = new SecondaryICUComparisonLevel(true); + static final SecondaryICUComparisonLevel WITHOUT_CASE_LEVEL = new SecondaryICUComparisonLevel(false); + + private SecondaryICUComparisonLevel() { + super(2); + } + + private SecondaryICUComparisonLevel(boolean caseLevel) { + super(2, Optional.empty(), Optional.of(caseLevel)); + } + + /** + * Include case comparison. + * + * @return new {@link SecondaryICUComparisonLevel} + */ + public ComparisonLevel includeCase() { + return WITH_CASE_LEVEL; + } + + /** + * Exclude case comparison. + * + * @return new {@link SecondaryICUComparisonLevel} + */ + public ComparisonLevel excludeCase() { + return WITHOUT_CASE_LEVEL; + } + } + + /** + * Tertiary-strength {@link ICUComparisonLevel}. + */ + public static class TertiaryICUComparisonLevel extends ICUComparisonLevel { + + static final TertiaryICUComparisonLevel DEFAULT = new TertiaryICUComparisonLevel(); + + private TertiaryICUComparisonLevel() { + super(3); + } + + private TertiaryICUComparisonLevel(CaseFirst caseFirst) { + super(3, Optional.of(caseFirst), Optional.empty()); + } + + /** + * Set the flag that determines sort order of case differences. + * + * @param caseFirst must not be {@literal null}. + * @return new {@link ICUComparisonLevel} + */ + public ComparisonLevel caseFirst(CaseFirst caseFirst) { + + Assert.notNull(caseFirst, "CaseFirst must not be null"); + return new TertiaryICUComparisonLevel(caseFirst); + } + } + + /** + * @since 2.0 + */ + public static class CaseFirst { + + private static final CaseFirst UPPER = new CaseFirst("upper"); + private static final CaseFirst LOWER = new CaseFirst("lower"); + private static final CaseFirst OFF = new CaseFirst("off"); + + private final String state; + + private CaseFirst(String state) { + this.state = state; + } + + /** + * Sort uppercase before lowercase. + * + * @return new {@link CaseFirst}. + */ + public static CaseFirst upper() { + return UPPER; + } + + /** + * Sort lowercase before uppercase. + * + * @return new {@link CaseFirst}. + */ + public static CaseFirst lower() { + return LOWER; + } + + /** + * Use the default. + * + * @return new {@link CaseFirst}. + */ + public static CaseFirst off() { + return OFF; + } + } + + /** + * @since 2.0 + */ + public static class Alternate { + + private static final Alternate NON_IGNORABLE = new Alternate("non-ignorable", Optional.empty()); + + final String alternate; + final Optional maxVariable; + + Alternate(String alternate, Optional maxVariable) { + this.alternate = alternate; + this.maxVariable = maxVariable; + } + + /** + * Consider Whitespace and punctuation as base characters. + * + * @return new {@link Alternate}. + */ + public static Alternate nonIgnorable() { + return NON_IGNORABLE; + } + + /** + * Whitespace and punctuation are not considered base characters and are only distinguished at + * strength.
                    + * NOTE: Only works for {@link ICUComparisonLevel} above {@link ComparisonLevel#tertiary()}. + * + * @return new {@link AlternateWithMaxVariable}. + */ + public static AlternateWithMaxVariable shifted() { + return AlternateWithMaxVariable.DEFAULT; + } + } + + /** + * @since 2.0 + */ + public static class AlternateWithMaxVariable extends Alternate { + + static final AlternateWithMaxVariable DEFAULT = new AlternateWithMaxVariable("shifted"); + static final Alternate SHIFTED_PUNCT = new AlternateWithMaxVariable("shifted", "punct"); + static final Alternate SHIFTED_SPACE = new AlternateWithMaxVariable("shifted", "space"); + + private AlternateWithMaxVariable(String alternate) { + super(alternate, Optional.empty()); + } + + private AlternateWithMaxVariable(String alternate, String maxVariable) { + super(alternate, Optional.of(maxVariable)); + } + + /** + * Consider both whitespaces and punctuation as ignorable. + * + * @return new {@link AlternateWithMaxVariable}. + */ + public Alternate punct() { + return SHIFTED_PUNCT; + } + + /** + * Only consider whitespaces as ignorable. + * + * @return new {@link AlternateWithMaxVariable}. + */ + public Alternate space() { + return SHIFTED_SPACE; + } + } + + /** + * ICU locale abstraction for usage with MongoDB {@link Collation}. + * + * @see ICU - International Components for Unicode + * @since 2.0 + */ + public static class CollationLocale { + + private final String language; + private final Optional variant; + + private CollationLocale(String language, Optional variant) { + + this.language = language; + this.variant = variant; + } + + /** + * Create new {@link CollationLocale} for given language. + * + * @param language must not be {@literal null}. + * @return new instance of {@link CollationLocale}. + */ + public static CollationLocale of(String language) { + + Assert.notNull(language, "Code must not be null"); + return new CollationLocale(language, Optional.empty()); + } + + /** + * Define language variant. + * + * @param variant must not be {@literal null}. + * @return new {@link CollationLocale}. + */ + public CollationLocale variant(String variant) { + + Assert.notNull(variant, "Variant must not be null"); + return new CollationLocale(language, Optional.of(variant)); + } + + /** + * Get the string representation. + * + * @return the collation {@link String} in Mongo ICU format. + */ + public String asString() { + + StringBuilder sb = new StringBuilder(language); + + variant.filter(it -> !it.isEmpty()).ifPresent(val -> { + + // Mongo requires variant rendered as ICU keyword (@key=value;key=value…) + sb.append("@collation=").append(val); + }); + + return sb.toString(); + } + } + + private static Converter toMongoDocumentConverter() { + + return source -> { + + Document document = new Document(); + document.append("locale", source.locale.asString()); + + source.strength.ifPresent(strength -> { + + document.append("strength", strength.getLevel()); + + strength.getCaseLevel().ifPresent(it -> document.append("caseLevel", it)); + strength.getCaseFirst().ifPresent(it -> document.append("caseFirst", it.state)); + }); + + source.numericOrdering.ifPresent(val -> document.append("numericOrdering", val)); + source.alternate.ifPresent(it -> { + + document.append("alternate", it.alternate); + it.maxVariable.ifPresent(maxVariable -> document.append("maxVariable", maxVariable)); + }); + + source.backwards.ifPresent(it -> document.append("backwards", it)); + source.normalization.ifPresent(it -> document.append("normalization", it)); + source.version.ifPresent(it -> document.append("version", it)); + + return document; + }; + } + + private static Converter toMongoCollationConverter() { + + return source -> { + + Builder builder = com.mongodb.client.model.Collation.builder(); + + builder.locale(source.locale.asString()); + + source.strength.ifPresent(strength -> { + + builder.collationStrength(CollationStrength.fromInt(strength.getLevel())); + + strength.getCaseLevel().ifPresent(builder::caseLevel); + strength.getCaseFirst().ifPresent(it -> builder.collationCaseFirst(CollationCaseFirst.fromString(it.state))); + }); + + source.numericOrdering.ifPresent(builder::numericOrdering); + source.alternate.ifPresent(it -> { + + builder.collationAlternate(CollationAlternate.fromString(it.alternate)); + it.maxVariable + .ifPresent(maxVariable -> builder.collationMaxVariable(CollationMaxVariable.fromString(maxVariable))); + }); + + source.backwards.ifPresent(builder::backwards); + source.normalization.ifPresent(builder::normalization); + + return builder.build(); + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index 0836cdae9d..8d4cb703bb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,37 +19,53 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collection; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; +import java.util.stream.Collectors; -import org.bson.BSON; +import org.bson.BsonRegularExpression; +import org.bson.BsonType; +import org.bson.Document; +import org.bson.types.Binary; import org.springframework.data.domain.Example; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.MongoExpression; import org.springframework.data.mongodb.core.geo.GeoJson; import org.springframework.data.mongodb.core.geo.Sphere; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.util.RegexFlags; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; /** * Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple - * criteria. Static import of the 'Criteria.where' method will improve readability. - * + * criteria. Static import of the {@link Criteria#where Criteria.where} method improves readability. + * * @author Thomas Risberg * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Andreas Zink + * @author Ziemowit Stolarczyk + * @author Clément Petit + * @author James McNee */ public class Criteria implements CriteriaDefinition { @@ -58,10 +74,10 @@ public class Criteria implements CriteriaDefinition { */ private static final Object NOT_SET = new Object(); - private String key; + private @Nullable String key; private List criteriaChain; private LinkedHashMap criteria = new LinkedHashMap(); - private Object isValue = NOT_SET; + private @Nullable Object isValue = NOT_SET; public Criteria() { this.criteriaChain = new ArrayList(); @@ -81,9 +97,9 @@ protected Criteria(List criteriaChain, String key) { /** * Static factory method to create a Criteria using the provided key - * - * @param key - * @return + * + * @param key the property or field name. + * @return new instance of {@link Criteria}. */ public static Criteria where(String key) { return new Criteria(key); @@ -91,9 +107,9 @@ public static Criteria where(String key) { /** * Static factory method to create a {@link Criteria} matching an example object. - * + * * @param example must not be {@literal null}. - * @return + * @return new instance of {@link Criteria}. * @see Criteria#alike(Example) * @since 1.8 */ @@ -102,10 +118,15 @@ public static Criteria byExample(Object example) { } /** - * Static factory method to create a {@link Criteria} matching an example object. - * + * Static factory method to create a {@link Criteria} matching an example object.
                    + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] } .
                    + * To avoid the above-mentioned type restriction use an {@link UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * * @param example must not be {@literal null}. - * @return + * @return new instance of {@link Criteria}. * @see Criteria#alike(Example) * @since 1.8 */ @@ -113,10 +134,55 @@ public static Criteria byExample(Example example) { return new Criteria().alike(example); } + /** + * Static factory method to create a {@link Criteria} matching documents against a given structure defined by the + * {@link MongoJsonSchema} using ({@code $jsonSchema}) operator. + * + * @param schema must not be {@literal null}. + * @return this + * @since 2.1 + * @see MongoDB Query operator: + * $jsonSchema + */ + public static Criteria matchingDocumentStructure(MongoJsonSchema schema) { + return new Criteria().andDocumentStructureMatches(schema); + } + + /** + * Static factory method to create a {@link Criteria} matching a documents against the given {@link MongoExpression + * expression}. + *

                    + * The {@link MongoExpression expression} can be either something that directly renders to the store native + * representation like + * + *

                    +	 * expr(() -> Document.parse("{ $gt : [ '$spent', '$budget'] }")))
                    +	 * 
                    + * + * or an {@link org.springframework.data.mongodb.core.aggregation.AggregationExpression} which will be subject to + * context (domain type) specific field mapping. + * + *
                    +	 * expr(valueOf("amountSpent").greaterThan("budget"))
                    +	 * 
                    + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Criteria}. + * @since 4.1 + */ + public static Criteria expr(MongoExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + Criteria criteria = new Criteria(); + criteria.criteria.put("$expr", expression); + return criteria; + } + /** * Static factory method to create a Criteria using the provided key - * - * @return + * + * @return new instance of {@link Criteria}. */ public Criteria and(String key) { return new Criteria(this.criteriaChain, key); @@ -124,22 +190,56 @@ public Criteria and(String key) { /** * Creates a criterion using equality - * - * @param o - * @return + * + * @param value can be {@literal null}. + * @return this. */ - public Criteria is(Object o) { + public Criteria is(@Nullable Object value) { - if (!isValue.equals(NOT_SET)) { + if (!NOT_SET.equals(isValue)) { throw new InvalidMongoDbApiUsageException( - "Multiple 'is' values declared. You need to use 'and' with multiple criteria"); + "Multiple 'is' values declared; You need to use 'and' with multiple criteria"); } if (lastOperatorWasNot()) { - throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead."); + throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead"); } - this.isValue = o; + this.isValue = value; + return this; + } + + /** + * Creates a criterion using {@literal null} equality comparison which matches documents that either contain the item + * field whose value is {@literal null} or that do not contain the item field.
                    + * Use {@link #isNullValue()} to only query for documents that contain the field whose value is equal to + * {@link org.bson.BsonType#NULL}.
                    + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or + * Missing Fields: Equality Filter + * @since 3.3 + */ + public Criteria isNull() { + return is(null); + } + + /** + * Creates a criterion using a {@link org.bson.BsonType} comparison which matches only documents that contain the item + * field whose value is equal to {@link org.bson.BsonType#NULL}.
                    + * Use {@link #isNull()} to query for documents that contain the field with a {@literal null} value or do not contain + * the field at all.
                    + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or Missing + * Fields: Type Check + * @since 3.3 + */ + public Criteria isNullValue() { + + criteria.put("$type", BsonType.NULL.getValue()); return this; } @@ -149,125 +249,125 @@ private boolean lastOperatorWasNot() { /** * Creates a criterion using the {@literal $ne} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/ne/ - * @param o - * @return + * + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Query operator: $ne */ - public Criteria ne(Object o) { - criteria.put("$ne", o); + public Criteria ne(@Nullable Object value) { + criteria.put("$ne", value); return this; } /** * Creates a criterion using the {@literal $lt} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/lt/ - * @param o - * @return + * + * @param value must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $lt */ - public Criteria lt(Object o) { - criteria.put("$lt", o); + public Criteria lt(Object value) { + criteria.put("$lt", value); return this; } /** * Creates a criterion using the {@literal $lte} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/lte/ - * @param o - * @return + * + * @param value must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $lte */ - public Criteria lte(Object o) { - criteria.put("$lte", o); + public Criteria lte(Object value) { + criteria.put("$lte", value); return this; } /** * Creates a criterion using the {@literal $gt} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/gt/ - * @param o - * @return + * + * @param value must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $gt */ - public Criteria gt(Object o) { - criteria.put("$gt", o); + public Criteria gt(Object value) { + criteria.put("$gt", value); return this; } /** * Creates a criterion using the {@literal $gte} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/gte/ - * @param o - * @return + * + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Query operator: $gte */ - public Criteria gte(Object o) { - criteria.put("$gte", o); + public Criteria gte(Object value) { + criteria.put("$gte", value); return this; } /** * Creates a criterion using the {@literal $in} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/in/ - * @param o the values to match against - * @return + * + * @param values the values to match against + * @return this. + * @see MongoDB Query operator: $in */ - public Criteria in(Object... o) { - if (o.length > 1 && o[1] instanceof Collection) { - throw new InvalidMongoDbApiUsageException("You can only pass in one argument of type " - + o[1].getClass().getName()); + public Criteria in(Object... values) { + if (values.length > 1 && values[1] instanceof Collection) { + throw new InvalidMongoDbApiUsageException( + "You can only pass in one argument of type " + values[1].getClass().getName()); } - criteria.put("$in", Arrays.asList(o)); + criteria.put("$in", Arrays.asList(values)); return this; } /** * Creates a criterion using the {@literal $in} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/in/ - * @param c the collection containing the values to match against - * @return + * + * @param values the collection containing the values to match against + * @return this. + * @see MongoDB Query operator: $in */ - public Criteria in(Collection c) { - criteria.put("$in", c); + public Criteria in(Collection values) { + criteria.put("$in", values); return this; } /** * Creates a criterion using the {@literal $nin} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/nin/ - * @param o - * @return + * + * @param values + * @return this. + * @see MongoDB Query operator: $nin */ - public Criteria nin(Object... o) { - return nin(Arrays.asList(o)); + public Criteria nin(Object... values) { + return nin(Arrays.asList(values)); } /** * Creates a criterion using the {@literal $nin} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/nin/ - * @param o - * @return + * + * @param values must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $nin */ - public Criteria nin(Collection o) { - criteria.put("$nin", o); + public Criteria nin(Collection values) { + criteria.put("$nin", values); return this; } /** * Creates a criterion using the {@literal $mod} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/mod/ - * @param value - * @param remainder - * @return + * + * @param value must not be {@literal null}. + * @param remainder must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $mod */ public Criteria mod(Number value, Number remainder) { - List l = new ArrayList(); + List l = new ArrayList<>(2); l.add(value); l.add(remainder); criteria.put("$mod", l); @@ -276,68 +376,119 @@ public Criteria mod(Number value, Number remainder) { /** * Creates a criterion using the {@literal $all} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/all/ - * @param o - * @return + * + * @param values must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $all */ - public Criteria all(Object... o) { - return all(Arrays.asList(o)); + public Criteria all(Object... values) { + return all(Arrays.asList(values)); } /** * Creates a criterion using the {@literal $all} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/all/ - * @param o - * @return + * + * @param values must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $all */ - public Criteria all(Collection o) { - criteria.put("$all", o); + public Criteria all(Collection values) { + criteria.put("$all", values); return this; } /** * Creates a criterion using the {@literal $size} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/size/ - * @param s - * @return + * + * @param size + * @return this. + * @see MongoDB Query operator: $size */ - public Criteria size(int s) { - criteria.put("$size", s); + public Criteria size(int size) { + criteria.put("$size", size); return this; } /** * Creates a criterion using the {@literal $exists} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/exists/ - * @param b - * @return + * + * @param value + * @return this. + * @see MongoDB Query operator: $exists + */ + public Criteria exists(boolean value) { + criteria.put("$exists", value); + return this; + } + + /** + * Creates a criterion using the {@literal $sampleRate} operator. + * + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. Must be + * between {@code 0} and {@code 1}. + * @return this. + * @see MongoDB Query operator: + * $sampleRate + * @since 3.3 */ - public Criteria exists(boolean b) { - criteria.put("$exists", b); + public Criteria sampleRate(double sampleRate) { + + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero"); + Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one"); + + criteria.put("$sampleRate", sampleRate); return this; } /** * Creates a criterion using the {@literal $type} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/type/ - * @param t - * @return + * + * @param typeNumber + * @return this. + * @see MongoDB Query operator: $type + */ + public Criteria type(int typeNumber) { + criteria.put("$type", typeNumber); + return this; + } + + /** + * Creates a criterion using the {@literal $type} operator. + * + * @param types must not be {@literal null}. + * @return this. + * @since 2.1 + * @see MongoDB Query operator: $type */ - public Criteria type(int t) { - criteria.put("$type", t); + public Criteria type(Type... types) { + + Assert.notNull(types, "Types must not be null"); + Assert.noNullElements(types, "Types must not contain null"); + + return type(Arrays.asList(types)); + } + + /** + * Creates a criterion using the {@literal $type} operator. + * + * @param types must not be {@literal null}. + * @return this. + * @since 3.2 + * @see MongoDB Query operator: $type + */ + public Criteria type(Collection types) { + + Assert.notNull(types, "Types must not be null"); + + criteria.put("$type", types.stream().map(Type::toBsonType).map(Type::value).collect(Collectors.toList())); return this; } /** * Creates a criterion using the {@literal $not} meta operator which affects the clause directly following - * - * @see http://docs.mongodb.org/manual/reference/operator/query/not/ - * @return + * + * @return this. + * @see MongoDB Query operator: $not */ public Criteria not() { return not(null); @@ -345,49 +496,48 @@ public Criteria not() { /** * Creates a criterion using the {@literal $not} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/not/ - * @param value - * @return + * + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Query operator: $not */ - private Criteria not(Object value) { + private Criteria not(@Nullable Object value) { criteria.put("$not", value); return this; } /** * Creates a criterion using a {@literal $regex} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/regex/ - * @param re - * @return + * + * @param regex must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: $regex */ - public Criteria regex(String re) { - return regex(re, null); + public Criteria regex(String regex) { + return regex(regex, null); } /** * Creates a criterion using a {@literal $regex} and {@literal $options} operator. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/regex/ - * @see http://docs.mongodb.org/manual/reference/operator/query/regex/#op._S_options - * @param re - * @param options - * @return + * + * @param regex must not be {@literal null}. + * @param options can be {@literal null}. + * @return this. + * @see MongoDB Query operator: $regex */ - public Criteria regex(String re, String options) { - return regex(toPattern(re, options)); + public Criteria regex(String regex, @Nullable String options) { + return regex(toPattern(regex, options)); } /** * Syntactical sugar for {@link #is(Object)} making obvious that we create a regex predicate. - * - * @param pattern - * @return + * + * @param pattern must not be {@literal null}. + * @return this. */ public Criteria regex(Pattern pattern) { - Assert.notNull(pattern); + Assert.notNull(pattern, "Pattern must not be null"); if (lastOperatorWasNot()) { return not(pattern); @@ -397,49 +547,75 @@ public Criteria regex(Pattern pattern) { return this; } - private Pattern toPattern(String regex, String options) { - Assert.notNull(regex); - return Pattern.compile(regex, options == null ? 0 : BSON.regexFlags(options)); + /** + * Use a MongoDB native {@link BsonRegularExpression}. + * + * @param regex must not be {@literal null}. + * @return this. + */ + public Criteria regex(BsonRegularExpression regex) { + + if (lastOperatorWasNot()) { + return not(regex); + } + + this.isValue = regex; + return this; + } + + private Pattern toPattern(String regex, @Nullable String options) { + + Assert.notNull(regex, "Regex string must not be null"); + + return Pattern.compile(regex, RegexFlags.toRegexFlags(options)); } /** * Creates a geospatial criterion using a {@literal $geoWithin $centerSphere} operation. This is only available for * Mongo 2.4 and higher. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/geoWithin/ - * @see http://docs.mongodb.org/manual/reference/operator/query/centerSphere/ + * * @param circle must not be {@literal null} - * @return + * @return this. + * @see MongoDB Query operator: + * $geoWithin + * @see MongoDB Query operator: + * $centerSphere */ public Criteria withinSphere(Circle circle) { - Assert.notNull(circle); + + Assert.notNull(circle, "Circle must not be null"); + criteria.put("$geoWithin", new GeoCommand(new Sphere(circle))); return this; } /** * Creates a geospatial criterion using a {@literal $geoWithin} operation. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/geoWithin/ - * @param shape - * @return + * + * @param shape must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: + * $geoWithin */ public Criteria within(Shape shape) { - Assert.notNull(shape); + Assert.notNull(shape, "Shape must not be null"); + criteria.put("$geoWithin", new GeoCommand(shape)); return this; } /** * Creates a geospatial criterion using a {@literal $near} operation. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/near/ + * * @param point must not be {@literal null} - * @return + * @return this. + * @see MongoDB Query operator: $near */ public Criteria near(Point point) { - Assert.notNull(point); + + Assert.notNull(point, "Point must not be null"); + criteria.put("$near", point); return this; } @@ -447,13 +623,16 @@ public Criteria near(Point point) { /** * Creates a geospatial criterion using a {@literal $nearSphere} operation. This is only available for Mongo 1.7 and * higher. - * - * @see http://docs.mongodb.org/manual/reference/operator/query/nearSphere/ + * * @param point must not be {@literal null} - * @return + * @return this. + * @see MongoDB Query operator: + * $nearSphere */ public Criteria nearSphere(Point point) { - Assert.notNull(point); + + Assert.notNull(point, "Point must not be null"); + criteria.put("$nearSphere", point); return this; } @@ -461,25 +640,30 @@ public Criteria nearSphere(Point point) { /** * Creates criterion using {@code $geoIntersects} operator which matches intersections of the given {@code geoJson} * structure and the documents one. Requires MongoDB 2.4 or better. - * + * * @param geoJson must not be {@literal null}. - * @return + * @return this. * @since 1.8 */ @SuppressWarnings("rawtypes") public Criteria intersects(GeoJson geoJson) { - Assert.notNull(geoJson, "GeoJson must not be null!"); + Assert.notNull(geoJson, "GeoJson must not be null"); criteria.put("$geoIntersects", geoJson); return this; } /** - * Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with $near - * - * @see http://docs.mongodb.org/manual/reference/operator/query/maxDistance/ - * @param maxDistance - * @return + * Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with {@literal $near} or + * {@literal $nearSphere}. + *

                    + * NOTE: The unit of measure for distance may depends on the used coordinate representation (legacy + * vs. geoJson) as well as the target operation. + * + * @param maxDistance radians or meters + * @return this. + * @see MongoDB Query operator: + * $maxDistance */ public Criteria maxDistance(double maxDistance) { @@ -495,9 +679,12 @@ public Criteria maxDistance(double maxDistance) { /** * Creates a geospatial criterion using a {@literal $minDistance} operation, for use with {@literal $near} or * {@literal $nearSphere}. - * - * @param minDistance - * @return + *

                    + * NOTE: The unit of measure for distance may depends on the used coordinate representation (legacy + * vs. geoJson) as well as the target operation. + * + * @param minDistance radians or meters + * @return this. * @since 1.7 */ public Criteria minDistance(double minDistance) { @@ -513,68 +700,171 @@ public Criteria minDistance(double minDistance) { /** * Creates a criterion using the {@literal $elemMatch} operator - * - * @see http://docs.mongodb.org/manual/reference/operator/query/elemMatch/ - * @param c - * @return + * + * @param criteria must not be {@literal null}. + * @return this. + * @see MongoDB Query operator: + * $elemMatch */ - public Criteria elemMatch(Criteria c) { - criteria.put("$elemMatch", c.getCriteriaObject()); + public Criteria elemMatch(Criteria criteria) { + this.criteria.put("$elemMatch", criteria.getCriteriaObject()); return this; } /** * Creates a criterion using the given object as a pattern. - * - * @param sample - * @return + * + * @param sample must not be {@literal null}. + * @return this. * @since 1.8 */ public Criteria alike(Example sample) { - criteria.put("$sample", sample); - this.criteriaChain.add(this); - return this; + if (StringUtils.hasText(this.getKey())) { + + criteria.put("$example", sample); + return this; + } + + Criteria exampleCriteria = new Criteria(); + exampleCriteria.criteria.put("$example", sample); + return registerCriteriaChainElement(exampleCriteria); } /** - * Creates an 'or' criteria using the $or operator for all of the provided criteria - *

                    - * Note that mongodb doesn't support an $or operator to be wrapped in a $not operator. + * Creates a criterion ({@code $jsonSchema}) matching documents against a given structure defined by the + * {@link MongoJsonSchema}.
                    + * NOTE: {@code $jsonSchema} cannot be used on field/property level but defines the whole document + * structure. Please use + * {@link org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder#properties(JsonSchemaProperty...)} + * to specify nested fields or query them using the {@link #type(Type...) $type} operator. + * + * @param schema must not be {@literal null}. + * @return this + * @since 2.1 + * @see MongoDB Query operator: + * $jsonSchema + */ + public Criteria andDocumentStructureMatches(MongoJsonSchema schema) { + + Assert.notNull(schema, "Schema must not be null"); + + Criteria schemaCriteria = new Criteria(); + schemaCriteria.criteria.putAll(schema.toDocument()); + + return registerCriteriaChainElement(schemaCriteria); + } + + /** + * Use {@link BitwiseCriteriaOperators} as gateway to create a criterion using one of the + * bitwise operators like + * {@code $bitsAllClear}. + * + * @return new instance of {@link BitwiseCriteriaOperators}. Never {@literal null}. + * @since 2.1 + */ + public BitwiseCriteriaOperators bits() { + return new BitwiseCriteriaOperatorsImpl(this); + } + + /** + * Creates a criteria using the {@code $or} operator for all of the provided criteria. *

                    - * - * @throws IllegalArgumentException if {@link #orOperator(Criteria...)} follows a not() call directly. - * @param criteria + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria orOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return orOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $or} operator for all of the provided criteria. + *

                    + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria orOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$or").is(bsonList)); } /** - * Creates a 'nor' criteria using the $nor operator for all of the provided criteria. + * Creates a criteria using the {@code $nor} operator for all of the provided criteria. *

                    - * Note that mongodb doesn't support an $nor operator to be wrapped in a $not operator. - *

                    - * - * @throws IllegalArgumentException if {@link #norOperator(Criteria...)} follows a not() call directly. - * @param criteria + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria norOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return norOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $nor} operator for all of the provided criteria. + *

                    + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria norOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$nor").is(bsonList)); } /** - * Creates an 'and' criteria using the $and operator for all of the provided criteria. - *

                    - * Note that mongodb doesn't support an $and operator to be wrapped in a $not operator. + * Creates a criteria using the {@code $and} operator for all of the provided criteria. *

                    - * - * @throws IllegalArgumentException if {@link #andOperator(Criteria...)} follows a not() call directly. - * @param criteria + * Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria andOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return andOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $and} operator for all of the provided criteria. + *

                    + * Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria andOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$and").is(bsonList)); } @@ -582,43 +872,44 @@ public Criteria andOperator(Criteria... criteria) { private Criteria registerCriteriaChainElement(Criteria criteria) { if (lastOperatorWasNot()) { - throw new IllegalArgumentException("operator $not is not allowed around criteria chain element: " - + criteria.getCriteriaObject()); + throw new IllegalArgumentException( + "operator $not is not allowed around criteria chain element: " + criteria.getCriteriaObject()); } else { criteriaChain.add(criteria); } return this; } + /* + * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getKey() + */ + @Override + @Nullable public String getKey() { return this.key; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject() - */ - public DBObject getCriteriaObject() { + public Document getCriteriaObject() { if (this.criteriaChain.size() == 1) { return criteriaChain.get(0).getSingleCriteriaObject(); } else if (CollectionUtils.isEmpty(this.criteriaChain) && !CollectionUtils.isEmpty(this.criteria)) { return getSingleCriteriaObject(); } else { - DBObject criteriaObject = new BasicDBObject(); + Document criteriaObject = new Document(); for (Criteria c : this.criteriaChain) { - DBObject dbo = c.getSingleCriteriaObject(); - for (String k : dbo.keySet()) { - setValue(criteriaObject, k, dbo.get(k)); + Document document = c.getSingleCriteriaObject(); + for (String k : document.keySet()) { + setValue(criteriaObject, k, document.get(k)); } } return criteriaObject; } } - protected DBObject getSingleCriteriaObject() { + protected Document getSingleCriteriaObject() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); boolean not = false; for (Entry entry : criteria.entrySet()) { @@ -627,43 +918,51 @@ protected DBObject getSingleCriteriaObject() { Object value = entry.getValue(); if (requiresGeoJsonFormat(value)) { - value = new BasicDBObject("$geometry", value); + value = new Document("$geometry", value); } if (not) { - DBObject notDbo = new BasicDBObject(); - notDbo.put(key, value); - dbo.put("$not", notDbo); + Document notDocument = new Document(); + notDocument.put(key, value); + document.put("$not", notDocument); not = false; } else { if ("$not".equals(key) && value == null) { not = true; } else { - dbo.put(key, value); + document.put(key, value); } } } if (!StringUtils.hasText(this.key)) { if (not) { - return new BasicDBObject("$not", dbo); + return new Document("$not", document); } - return dbo; + return document; } - DBObject queryCriteria = new BasicDBObject(); + Document queryCriteria = new Document(); if (!NOT_SET.equals(isValue)) { - queryCriteria.put(this.key, this.isValue); - queryCriteria.putAll(dbo); + if (document.isEmpty()) { + queryCriteria.put(this.key, this.isValue); + } else { + if (isValue instanceof Pattern || isValue instanceof BsonRegularExpression) { + document.put("$regex", isValue); + } else { + document.put("$eq", isValue); + } + queryCriteria.put(this.key, document); + } } else { - queryCriteria.put(this.key, dbo); + queryCriteria.put(this.key, document); } return queryCriteria; } - private BasicDBList createCriteriaList(Criteria[] criteria) { + private BasicDBList createCriteriaList(Collection criteria) { BasicDBList bsonList = new BasicDBList(); for (Criteria c : criteria) { bsonList.add(c.getCriteriaObject()); @@ -671,14 +970,16 @@ private BasicDBList createCriteriaList(Criteria[] criteria) { return bsonList; } - private void setValue(DBObject dbo, String key, Object value) { - Object existing = dbo.get(key); + private void setValue(Document document, String key, Object value) { + + Object existing = document.get(key); + if (existing == null) { - dbo.put(key, value); + document.put(key, value); } else { - throw new InvalidMongoDbApiUsageException("Due to limitations of the com.mongodb.BasicDBObject, " - + "you can't add a second '" + key + "' expression specified as '" + key + " : " + value + "'. " - + "Criteria already contains '" + key + " : " + existing + "'."); + throw new InvalidMongoDbApiUsageException("Due to limitations of the org.bson.Document, " + + "you can't add a second '" + key + "' expression specified as '" + key + " : " + value + "';" + + " Criteria already contains '" + key + " : " + existing + "'"); } } @@ -690,15 +991,15 @@ private boolean createNearCriteriaForCommand(String command, String operation, d Object existingNearOperationValue = criteria.get(command); - if (existingNearOperationValue instanceof DBObject) { + if (existingNearOperationValue instanceof Document document) { - ((DBObject) existingNearOperationValue).put(operation, maxDistance); + document.put(operation, maxDistance); return true; } else if (existingNearOperationValue instanceof GeoJson) { - BasicDBObject dbo = new BasicDBObject("$geometry", existingNearOperationValue).append(operation, maxDistance); + Document dbo = new Document("$geometry", existingNearOperationValue).append(operation, maxDistance); criteria.put(command, dbo); return true; @@ -707,12 +1008,8 @@ private boolean createNearCriteriaForCommand(String command, String operation, d return false; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -752,28 +1049,74 @@ private boolean simpleCriteriaEquals(Criteria left, Criteria right) { /** * Checks the given objects for equality. Handles {@link Pattern} and arrays correctly. - * + * * @param left * @param right * @return */ - private boolean isEqual(Object left, Object right) { + private boolean isEqual(@Nullable Object left, @Nullable Object right) { if (left == null) { return right == null; } - if (left instanceof Pattern) { - return right instanceof Pattern ? ((Pattern) left).pattern().equals(((Pattern) right).pattern()) : false; + if (left instanceof Pattern leftPattern) { + + if (!(right instanceof Pattern rightPattern)) { + return false; + } + + return leftPattern.pattern().equals(rightPattern.pattern()) // + && leftPattern.flags() == rightPattern.flags(); + } + + if (left instanceof Document leftDocument) { + + if (!(right instanceof Document rightDocument)) { + return false; + } + + Iterator> leftIterator = leftDocument.entrySet().iterator(); + Iterator> rightIterator = rightDocument.entrySet().iterator(); + + while (leftIterator.hasNext() && rightIterator.hasNext()) { + + Map.Entry leftEntry = leftIterator.next(); + Map.Entry rightEntry = rightIterator.next(); + + if (!isEqual(leftEntry.getKey(), rightEntry.getKey()) + || !isEqual(leftEntry.getValue(), rightEntry.getValue())) { + return false; + } + } + + return !leftIterator.hasNext() && !rightIterator.hasNext(); + } + + if (Collection.class.isAssignableFrom(left.getClass())) { + + if (!Collection.class.isAssignableFrom(right.getClass())) { + return false; + } + + Collection leftCollection = (Collection) left; + Collection rightCollection = (Collection) right; + Iterator leftIterator = leftCollection.iterator(); + Iterator rightIterator = rightCollection.iterator(); + + while (leftIterator.hasNext() && rightIterator.hasNext()) { + + if (!isEqual(leftIterator.next(), rightIterator.next())) { + return false; + } + } + + return !leftIterator.hasNext() && !rightIterator.hasNext(); } return ObjectUtils.nullSafeEquals(left, right); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -788,6 +1131,278 @@ public int hashCode() { private static boolean requiresGeoJsonFormat(Object value) { return value instanceof GeoJson - || (value instanceof GeoCommand && ((GeoCommand) value).getShape() instanceof GeoJson); + || (value instanceof GeoCommand geoCommand && geoCommand.getShape() instanceof GeoJson); + } + + /** + * MongoDB specific bitwise query + * operators like {@code $bitsAllClear, $bitsAllSet,...} for usage with {@link Criteria#bits()} and {@link Query}. + * + * @author Christoph Strobl + * @since 2.1 + * @see https://docs.mongodb.com/manual/reference/operator/query-bitwise/ + * @currentRead Beyond the Shadows - Brent Weeks + */ + public interface BitwiseCriteriaOperators { + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(List positions); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(List positions); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(List positions); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAnySet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAnySet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(List positions); + + } + + /** + * Default implementation of {@link BitwiseCriteriaOperators}. + * + * @author Christoph Strobl + * @currentRead Beyond the Shadows - Brent Weeks + */ + private static class BitwiseCriteriaOperatorsImpl implements BitwiseCriteriaOperators { + + private final Criteria target; + + BitwiseCriteriaOperatorsImpl(Criteria target) { + this.target = target; + } + + @Override + public Criteria allClear(int numericBitmask) { + return numericBitmask("$bitsAllClear", numericBitmask); + } + + @Override + public Criteria allClear(String bitmask) { + return stringBitmask("$bitsAllClear", bitmask); + } + + @Override + public Criteria allClear(List positions) { + return positions("$bitsAllClear", positions); + } + + @Override + public Criteria allSet(int numericBitmask) { + return numericBitmask("$bitsAllSet", numericBitmask); + } + + @Override + public Criteria allSet(String bitmask) { + return stringBitmask("$bitsAllSet", bitmask); + } + + @Override + public Criteria allSet(List positions) { + return positions("$bitsAllSet", positions); + } + + @Override + public Criteria anyClear(int numericBitmask) { + return numericBitmask("$bitsAnyClear", numericBitmask); + } + + @Override + public Criteria anyClear(String bitmask) { + return stringBitmask("$bitsAnyClear", bitmask); + } + + @Override + public Criteria anyClear(List positions) { + return positions("$bitsAnyClear", positions); + } + + @Override + public Criteria anySet(int numericBitmask) { + return numericBitmask("$bitsAnySet", numericBitmask); + } + + @Override + public Criteria anySet(String bitmask) { + return stringBitmask("$bitsAnySet", bitmask); + } + + @Override + public Criteria anySet(List positions) { + return positions("$bitsAnySet", positions); + } + + private Criteria positions(String operator, List positions) { + + Assert.notNull(positions, "Positions must not be null"); + Assert.noNullElements(positions.toArray(), "Positions must not contain null values"); + + target.criteria.put(operator, positions); + return target; + } + + private Criteria stringBitmask(String operator, String bitmask) { + + Assert.hasText(bitmask, "Bitmask must not be null"); + + target.criteria.put(operator, new Binary(Base64.getDecoder().decode(bitmask))); + return target; + } + + private Criteria numericBitmask(String operator, int bitmask) { + + target.criteria.put(operator, bitmask); + return target; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java index 76c3fae291..c00b1d4b82 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2014 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,8 @@ */ package org.springframework.data.mongodb.core.query; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.lang.Nullable; /** * @author Oliver Gierke @@ -24,18 +25,19 @@ public interface CriteriaDefinition { /** - * Get {@link DBObject} representation. - * - * @return + * Get {@link Document} representation. + * + * @return never {@literal null}. */ - DBObject getCriteriaObject(); + Document getCriteriaObject(); /** * Get the identifying {@literal key}. - * - * @return + * + * @return can be {@literal null}. * @since 1.6 */ + @Nullable String getKey(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java index 0676b52fa7..3540a5a836 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,143 +15,299 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; +import org.bson.Document; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** + * Field projection. + * * @author Thomas Risberg * @author Oliver Gierke * @author Patryk Wasik + * @author Christoph Strobl + * @author Mark Paluch + * @author Owen Q + * @author Kirill Egorov */ public class Field { - private final Map criteria = new HashMap(); - private final Map slices = new HashMap(); - private final Map elemMatchs = new HashMap(); - private String postionKey; + private final Map criteria = new HashMap<>(); + private final Map slices = new HashMap<>(); + private final Map elemMatches = new HashMap<>(); + private @Nullable String positionKey; private int positionValue; - public Field include(String key) { - criteria.put(key, Integer.valueOf(1)); + /** + * Include a single {@code field} to be returned by the query operation. + * + * @param field the document field name to be included. + * @return {@code this} field projection instance. + */ + public Field include(String field) { + + Assert.notNull(field, "Key must not be null"); + + criteria.put(field, 1); + + return this; + } + + /** + * Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the + * result. + * + *

                    +	 *
                    +	 * // { 'name' : { '$toUpper' : '$name' } }
                    +	 *
                    +	 * // native MongoDB expression
                    +	 * .project(MongoExpression.expressionFromString("'$toUpper' : '$name'")).as("name");
                    +	 *
                    +	 * // Aggregation Framework expression
                    +	 * .project(StringOperators.valueOf("name").toUpper()).as("name");
                    +	 *
                    +	 * // Aggregation Framework SpEL expression
                    +	 * .project(AggregationSpELExpression.expressionOf("toUpper(name)")).as("name");
                    +	 * 
                    + * + * @param expression must not be {@literal null}. + * @return new instance of {@link FieldProjectionExpression}. Define the target field name through + * {@link FieldProjectionExpression#as(String) as(String)}. + * @since 3.2 + */ + public FieldProjectionExpression project(MongoExpression expression) { + return field -> Field.this.projectAs(expression, field); + } + + /** + * Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the + * result. + * + *
                    +	 *
                    +	 * // { 'name' : { '$toUpper' : '$name' } }
                    +	 *
                    +	 * // native MongoDB expression
                    +	 * .projectAs(MongoExpression.expressionFromString("'$toUpper' : '$name'"), "name");
                    +	 *
                    +	 * // Aggregation Framework expression
                    +	 * .projectAs(StringOperators.valueOf("name").toUpper(), "name");
                    +	 *
                    +	 * // Aggregation Framework SpEL expression
                    +	 * .projectAs(AggregationSpELExpression.expressionOf("toUpper(name)"), "name");
                    +	 * 
                    + * + * @param expression must not be {@literal null}. + * @param field the field name used in the result. + * @return new instance of {@link FieldProjectionExpression}. + * @since 3.2 + */ + public Field projectAs(MongoExpression expression, String field) { + + criteria.put(field, expression); + return this; + } + + /** + * Include one or more {@code fields} to be returned by the query operation. + * + * @param fields the document field names to be included. + * @return {@code this} field projection instance. + * @since 3.1 + */ + public Field include(String... fields) { + return include(Arrays.asList(fields)); + } + + /** + * Include one or more {@code fields} to be returned by the query operation. + * + * @param fields the document field names to be included. + * @return {@code this} field projection instance. + * @since 4.4 + */ + public Field include(Collection fields) { + + Assert.notNull(fields, "Keys must not be null"); + + fields.forEach(this::include); + return this; + } + + /** + * Exclude a single {@code field} from being returned by the query operation. + * + * @param field the document field name to be excluded. + * @return {@code this} field projection instance. + */ + public Field exclude(String field) { + + Assert.notNull(field, "Key must not be null"); + + criteria.put(field, 0); + return this; } - public Field exclude(String key) { - criteria.put(key, Integer.valueOf(0)); + /** + * Exclude one or more {@code fields} from being returned by the query operation. + * + * @param fields the document field names to be excluded. + * @return {@code this} field projection instance. + * @since 3.1 + */ + public Field exclude(String... fields) { + return exclude(Arrays.asList(fields)); + } + + /** + * Exclude one or more {@code fields} from being returned by the query operation. + * + * @param fields the document field names to be excluded. + * @return {@code this} field projection instance. + * @since 4.4 + */ + public Field exclude(Collection fields) { + + Assert.notNull(fields, "Keys must not be null"); + + fields.forEach(this::exclude); return this; } - public Field slice(String key, int size) { - slices.put(key, Integer.valueOf(size)); + /** + * Project a {@code $slice} of the array {@code field} using the first {@code size} elements. + * + * @param field the document field name to project, must be an array field. + * @param size the number of elements to include. + * @return {@code this} field projection instance. + */ + public Field slice(String field, int size) { + + Assert.notNull(field, "Key must not be null"); + + slices.put(field, size); + return this; } - public Field slice(String key, int offset, int size) { - slices.put(key, new Integer[] { Integer.valueOf(offset), Integer.valueOf(size) }); + /** + * Project a {@code $slice} of the array {@code field} using the first {@code size} elements starting at + * {@code offset}. + * + * @param field the document field name to project, must be an array field. + * @param offset the offset to start at. + * @param size the number of elements to include. + * @return {@code this} field projection instance. + */ + public Field slice(String field, int offset, int size) { + + slices.put(field, Arrays.asList(offset, size)); return this; } - public Field elemMatch(String key, Criteria elemMatchCriteria) { - elemMatchs.put(key, elemMatchCriteria); + public Field elemMatch(String field, Criteria elemMatchCriteria) { + + elemMatches.put(field, elemMatchCriteria); return this; } /** * The array field must appear in the query. Only one positional {@code $} operator can appear in the projection and * only one array field can appear in the query. - * + * * @param field query array field, must not be {@literal null} or empty. * @param value - * @return + * @return {@code this} field projection instance. */ public Field position(String field, int value) { - Assert.hasText(field, "DocumentField must not be null or empty!"); + Assert.hasText(field, "DocumentField must not be null or empty"); - postionKey = field; + positionKey = field; positionValue = value; return this; } - public DBObject getFieldsObject() { + public Document getFieldsObject() { - DBObject dbo = new BasicDBObject(criteria); + Document document = new Document(criteria); for (Entry entry : slices.entrySet()) { - dbo.put(entry.getKey(), new BasicDBObject("$slice", entry.getValue())); + document.put(entry.getKey(), new Document("$slice", entry.getValue())); } - for (Entry entry : elemMatchs.entrySet()) { - DBObject dbObject = new BasicDBObject("$elemMatch", entry.getValue().getCriteriaObject()); - dbo.put(entry.getKey(), dbObject); + for (Entry entry : elemMatches.entrySet()) { + document.put(entry.getKey(), new Document("$elemMatch", entry.getValue().getCriteriaObject())); } - if (postionKey != null) { - dbo.put(postionKey + ".$", positionValue); + if (positionKey != null) { + document.put(positionKey + ".$", positionValue); } - return dbo; + return document; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object object) { + public boolean equals(@Nullable Object o) { - if (this == object) { + if (this == o) return true; - } - - if (!(object instanceof Field)) { + if (o == null || getClass() != o.getClass()) return false; - } - Field that = (Field) object; + Field field = (Field) o; - if (!this.criteria.equals(that.criteria)) { + if (positionValue != field.positionValue) { return false; } - - if (!this.slices.equals(that.slices)) { + if (!ObjectUtils.nullSafeEquals(criteria, field.criteria)) { return false; } - - if (!this.elemMatchs.equals(that.elemMatchs)) { + if (!ObjectUtils.nullSafeEquals(slices, field.slices)) { return false; } - - boolean samePositionKey = this.postionKey == null ? that.postionKey == null - : this.postionKey.equals(that.postionKey); - boolean samePositionValue = this.positionValue == that.positionValue; - - return samePositionKey && samePositionValue; + if (!ObjectUtils.nullSafeEquals(elemMatches, field.elemMatches)) { + return false; + } + return ObjectUtils.nullSafeEquals(positionKey, field.positionKey); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { - int result = 17; - - result += 31 * ObjectUtils.nullSafeHashCode(this.criteria); - result += 31 * ObjectUtils.nullSafeHashCode(this.elemMatchs); - result += 31 * ObjectUtils.nullSafeHashCode(this.slices); - result += 31 * ObjectUtils.nullSafeHashCode(this.postionKey); - result += 31 * ObjectUtils.nullSafeHashCode(this.positionValue); - + int result = ObjectUtils.nullSafeHashCode(criteria); + result = 31 * result + ObjectUtils.nullSafeHashCode(slices); + result = 31 * result + ObjectUtils.nullSafeHashCode(elemMatches); + result = 31 * result + ObjectUtils.nullSafeHashCode(positionKey); + result = 31 * result + positionValue; return result; } + + /** + * Intermediate builder part for projecting a {@link MongoExpression} to a result field. + * + * @since 3.2 + * @author Christoph Strobl + */ + public interface FieldProjectionExpression { + + /** + * Set the name to be used in the result and return a {@link Field}. + * + * @param name must not be {@literal null}. + * @return the calling instance {@link Field}. + */ + Field as(String name); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java index 66e3b89a33..83417c7200 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,11 +22,12 @@ import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; import org.springframework.data.mongodb.core.geo.Sphere; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * Wrapper around a {@link Shape} to allow appropriate query rendering. - * + * * @author Thomas Darimont * @author Christoph Strobl * @since 1.5 @@ -38,12 +39,12 @@ public final class GeoCommand { /** * Creates a new {@link GeoCommand}. - * + * * @param shape must not be {@literal null}. */ public GeoCommand(Shape shape) { - Assert.notNull(shape, "Shape must not be null!"); + Assert.notNull(shape, "Shape must not be null"); this.shape = shape; this.command = getCommand(shape); @@ -65,13 +66,14 @@ public String getCommand() { /** * Returns the MongoDB command for the given {@link Shape}. - * + * * @param shape must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws IllegalArgumentException for unknown {@link Shape}. */ private String getCommand(Shape shape) { - Assert.notNull(shape, "Shape must not be null!"); + Assert.notNull(shape, "Shape must not be null"); if (shape instanceof Box) { return "$box"; @@ -86,10 +88,6 @@ private String getCommand(Shape shape) { throw new IllegalArgumentException("Unknown shape: " + shape); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -101,23 +99,17 @@ public int hashCode() { return result; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoCommand)) { + if (!(obj instanceof GeoCommand other)) { return false; } - GeoCommand that = (GeoCommand) obj; - - return nullSafeEquals(this.command, that.command) && nullSafeEquals(this.shape, that.shape); + return nullSafeEquals(this.command, other.command) && nullSafeEquals(this.shape, other.shape); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java index 4b364bc596..5757aa94a2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,25 @@ */ package org.springframework.data.mongodb.core.query; +import java.time.Duration; import java.util.Collections; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.Map; import java.util.Map.Entry; -import java.util.concurrent.TimeUnit; +import java.util.Set; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** * Meta-data for {@link Query} instances. - * + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch * @since 1.6 */ public class Meta { @@ -37,60 +41,136 @@ public class Meta { private enum MetaKey { MAX_TIME_MS("$maxTimeMS"), MAX_SCAN("$maxScan"), COMMENT("$comment"), SNAPSHOT("$snapshot"); - private String key; + private final String key; - private MetaKey(String key) { + MetaKey(String key) { this.key = key; } } - private final Map values = new LinkedHashMap(2); + private Map values = Collections.emptyMap(); + private Set flags = Collections.emptySet(); + private Integer cursorBatchSize; + private Boolean allowDiskUse; + + public Meta() {} + + /** + * Copy a {@link Meta} object. + * + * @since 2.2 + * @param source + */ + Meta(Meta source) { + + this.values = new LinkedHashMap<>(source.values); + this.flags = new LinkedHashSet<>(source.flags); + this.cursorBatchSize = source.cursorBatchSize; + this.allowDiskUse = source.allowDiskUse; + } + + /** + * Return whether the maximum time limit for processing operations is set. + * + * @return {@code true} if set; {@code false} otherwise. + * @since 4.0.6 + */ + public boolean hasMaxTime() { + + Long maxTimeMsec = getMaxTimeMsec(); + + return maxTimeMsec != null && maxTimeMsec > 0; + } /** * @return {@literal null} if not set. */ + @Nullable public Long getMaxTimeMsec() { return getValue(MetaKey.MAX_TIME_MS.key); } + /** + * Returns the required maximum time limit in milliseconds or throws {@link IllegalStateException} if the maximum time + * limit is not set. + * + * @return the maximum time limit in milliseconds for processing operations. + * @throws IllegalStateException if the maximum time limit is not set + * @see #hasMaxTime() + * @since 4.0.6 + */ + public Long getRequiredMaxTimeMsec() { + + Long maxTimeMsec = getMaxTimeMsec(); + + if (maxTimeMsec == null) { + throw new IllegalStateException("Maximum time limit in milliseconds not set"); + } + + return maxTimeMsec; + } + /** * Set the maximum time limit in milliseconds for processing operations. - * + * * @param maxTimeMsec */ public void setMaxTimeMsec(long maxTimeMsec) { - setMaxTime(maxTimeMsec, TimeUnit.MILLISECONDS); + setMaxTime(Duration.ofMillis(maxTimeMsec)); } /** * Set the maximum time limit for processing operations. - * - * @param timeout - * @param timeUnit + * + * @param timeout must not be {@literal null}. + * @since 2.1 + */ + public void setMaxTime(Duration timeout) { + + Assert.notNull(timeout, "Timeout must not be null"); + setValue(MetaKey.MAX_TIME_MS.key, timeout.toMillis()); + } + + /** + * Return whether the comment is set. + * + * @return {@code true} if set; {@code false} otherwise. + * @since 4.0.6 */ - public void setMaxTime(long timeout, TimeUnit timeUnit) { - setValue(MetaKey.MAX_TIME_MS.key, (timeUnit != null ? timeUnit : TimeUnit.MILLISECONDS).toMillis(timeout)); + public boolean hasComment() { + return StringUtils.hasText(getComment()); } /** * @return {@literal null} if not set. */ - public Long getMaxScan() { - return getValue(MetaKey.MAX_SCAN.key); + @Nullable + public String getComment() { + return getValue(MetaKey.COMMENT.key); } /** - * Only scan the specified number of documents. - * - * @param maxScan + * Returns the required comment or throws {@link IllegalStateException} if the comment is not set. + * + * @return the comment. + * @throws IllegalStateException if the comment is not set + * @see #hasComment() + * @since 4.0.6 */ - public void setMaxScan(long maxScan) { - setValue(MetaKey.MAX_SCAN.key, maxScan); + public String getRequiredComment() { + + String comment = getComment(); + + if (comment == null) { + throw new IllegalStateException("Comment not set"); + } + + return comment; } /** - * Add a comment to the query. - * + * Add a comment to the query that is propagated to the profile log. + * * @param comment */ public void setComment(String comment) { @@ -99,37 +179,86 @@ public void setComment(String comment) { /** * @return {@literal null} if not set. + * @since 2.1 */ - public String getComment() { - return getValue(MetaKey.COMMENT.key); + @Nullable + public Integer getCursorBatchSize() { + return cursorBatchSize; } /** - * Using snapshot prevents the cursor from returning a document more than once. - * - * @param useSnapshot + * Apply the batch size (number of documents to return in each response) for a query.
                    + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @param cursorBatchSize The number of documents to return per batch. + * @since 2.1 */ - public void setSnapshot(boolean useSnapshot) { - setValue(MetaKey.SNAPSHOT.key, useSnapshot); + public void setCursorBatchSize(int cursorBatchSize) { + this.cursorBatchSize = cursorBatchSize; } /** + * Add {@link CursorOption} influencing behavior of the {@link com.mongodb.client.FindIterable}. + * + * @param option must not be {@literal null}. + * @return + * @since 1.10 + */ + public boolean addFlag(CursorOption option) { + + Assert.notNull(option, "CursorOption must not be null"); + + if (this.flags == Collections.EMPTY_SET) { + this.flags = new LinkedHashSet<>(2); + } + + return this.flags.add(option); + } + + /** + * @return never {@literal null}. + * @since 1.10 + */ + public Set getFlags() { + return flags; + } + + /** + * When set to {@literal true}, aggregation stages can write data to disk. + * * @return {@literal null} if not set. + * @since 3.0 */ - public boolean getSnapshot() { - return getValue(MetaKey.SNAPSHOT.key, false); + @Nullable + public Boolean getAllowDiskUse() { + return allowDiskUse; + } + + /** + * Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation + * stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory. + *

                    + * Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk} + * indicator if any aggregation stage wrote data to temporary files due to memory restrictions. + * + * @param allowDiskUse use {@literal null} for server defaults. + * @since 3.0 + */ + public void setAllowDiskUse(@Nullable Boolean allowDiskUse) { + this.allowDiskUse = allowDiskUse; } /** * @return */ public boolean hasValues() { - return !this.values.isEmpty(); + return !this.values.isEmpty() || !this.flags.isEmpty() || this.cursorBatchSize != null || this.allowDiskUse != null; } /** * Get {@link Iterable} of set meta values. - * + * * @return */ public Iterable> values() { @@ -138,20 +267,25 @@ public Iterable> values() { /** * Sets or removes the value in case of {@literal null} or empty {@link String}. - * + * * @param key must not be {@literal null} or empty. * @param value */ - private void setValue(String key, Object value) { + void setValue(String key, @Nullable Object value) { + + Assert.hasText(key, "Meta key must not be 'null' or blank"); - Assert.hasText(key, "Meta key must not be 'null' or blank."); + if (values == Collections.EMPTY_MAP) { + values = new LinkedHashMap<>(2); + } - if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) { + if (value == null || (value instanceof String stringValue && !StringUtils.hasText(stringValue))) { this.values.remove(key); } this.values.put(key, value); } + @Nullable @SuppressWarnings("unchecked") private T getValue(String key) { return (T) this.values.get(key); @@ -163,31 +297,58 @@ private T getValue(String key, T defaultValue) { return value != null ? value : defaultValue; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { - return ObjectUtils.nullSafeHashCode(this.values); + + int hash = ObjectUtils.nullSafeHashCode(this.values); + hash += ObjectUtils.nullSafeHashCode(this.flags); + return hash; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof Meta)) { + if (!(obj instanceof Meta other)) { + return false; + } + + if (!ObjectUtils.nullSafeEquals(this.values, other.values)) { return false; } + return ObjectUtils.nullSafeEquals(this.flags, other.flags); + } + + /** + * {@link CursorOption} represents {@code OP_QUERY} wire protocol flags to change the behavior of queries. + * + * @author Christoph Strobl + * @since 1.10 + */ + public enum CursorOption { + + /** Prevents the server from timing out idle cursors. */ + NO_TIMEOUT, + + /** + * Sets the cursor to return all data returned by the query at once rather than splitting the results into batches. + */ + EXHAUST, + + /** + * Allows querying of a replica. + * + * @since 3.0.2 + */ + SECONDARY_READS, - Meta other = (Meta) obj; - return ObjectUtils.nullSafeEquals(this.values, other.values); + /** + * Sets the cursor to return partial data from a query against a sharded cluster in which some shards do not respond + * rather than throwing an error. + */ + PARTIAL } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java new file mode 100644 index 0000000000..571bbd275c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java @@ -0,0 +1,183 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.query; + +import java.math.BigDecimal; +import java.math.MathContext; +import java.math.RoundingMode; + +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.Metric; +import org.springframework.data.geo.Metrics; + +/** + * {@link Metric} and {@link Distance} conversions using the metric system. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class MetricConversion { + + private static final BigDecimal METERS_MULTIPLIER = new BigDecimal(Metrics.KILOMETERS.getMultiplier()) + .multiply(new BigDecimal(1000)); + + // to achieve a calculation that is accurate to 0.3 meters + private static final int PRECISION = 8; + + /** + * Return meters to {@code metric} multiplier. + * + * @param metric + * @return + */ + public static double getMetersToMetricMultiplier(Metric metric) { + + ConversionMultiplier conversionMultiplier = ConversionMultiplier.builder().from(METERS_MULTIPLIER).to(metric) + .build(); + return conversionMultiplier.multiplier().doubleValue(); + } + + /** + * Return {@code distance} in meters. + * + * @param distance + * @return + */ + public static double getDistanceInMeters(Distance distance) { + return new BigDecimal(distance.getValue()).multiply(getMetricToMetersMultiplier(distance.getMetric())) + .doubleValue(); + } + + /** + * Return {@code distance} in radians (on an earth like sphere). + * + * @param distance must not be {@literal null}. + * @return distance in radians. + * @since 3.4.4 + */ + public static double toRadians(Distance distance) { + return metersToRadians(getDistanceInMeters(distance)); + } + + /** + * Return {@code distance} in radians (on an earth like sphere). + * + * @param meters + * @return distance in radians. + * @since 3.4.4 + */ + public static double metersToRadians(double meters) { + return BigDecimal.valueOf(meters).divide(METERS_MULTIPLIER, MathContext.DECIMAL64).doubleValue(); + } + + /** + * Return {@code metric} to meters multiplier. + * + * @param metric + * @return + */ + private static BigDecimal getMetricToMetersMultiplier(Metric metric) { + + ConversionMultiplier conversionMultiplier = ConversionMultiplier.builder().from(metric).to(METERS_MULTIPLIER) + .build(); + return conversionMultiplier.multiplier(); + } + + /** + * Provides a multiplier to convert between various metrics. Metrics must share the same base scale and provide a + * multiplier to convert between the base scale and its own metric. + * + * @author Mark Paluch + */ + private static class ConversionMultiplier { + + private final BigDecimal source; + private final BigDecimal target; + + ConversionMultiplier(Number source, Number target) { + + if (source instanceof BigDecimal bigDecimal) { + this.source = bigDecimal; + } else { + this.source = BigDecimal.valueOf(source.doubleValue()); + } + + if (target instanceof BigDecimal bigDecimal) { + this.target = bigDecimal; + } else { + this.target = BigDecimal.valueOf(target.doubleValue()); + } + } + + /** + * Returns the multiplier to convert a number from the {@code source} metric to the {@code target} metric. + * + * @return + */ + BigDecimal multiplier() { + return target.divide(source, PRECISION, RoundingMode.HALF_UP); + } + + /** + * Creates a new {@link ConversionMultiplierBuilder}. + * + * @return + */ + static ConversionMultiplierBuilder builder() { + return new ConversionMultiplierBuilder(); + } + + } + + /** + * Builder for {@link ConversionMultiplier}. + * + * @author Mark Paluch + */ + private static class ConversionMultiplierBuilder { + + private Number from; + private Number to; + + ConversionMultiplierBuilder() {} + + ConversionMultiplierBuilder from(Number from) { + this.from = from; + return this; + } + + ConversionMultiplierBuilder from(Metric from) { + this.from = from.getMultiplier(); + return this; + } + + ConversionMultiplierBuilder to(Number to) { + this.to = to; + return this; + } + + ConversionMultiplierBuilder to(Metric to) { + this.to = to.getMultiplier(); + return this; + } + + ConversionMultiplier build() { + return new ConversionMultiplier(this.from, this.to); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java index a1d85dff99..e26a61c61e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,62 +17,105 @@ import java.util.regex.Pattern; -import org.springframework.data.repository.query.parser.Part.Type; -import org.springframework.util.ObjectUtils; +import org.bson.BsonRegularExpression; +import org.springframework.lang.Nullable; /** * @author Christoph Strobl * @author Mark Paluch + * @author Jens Schauder * @since 1.8 */ public enum MongoRegexCreator { INSTANCE; + /** + * Match modes for treatment of {@link String} values. + * + * @author Christoph Strobl + * @author Jens Schauder + */ + public enum MatchMode { + + /** + * Store specific default. + */ + DEFAULT, + + /** + * Matches the exact string + */ + EXACT, + + /** + * Matches string starting with pattern + */ + STARTING_WITH, + + /** + * Matches string ending with pattern + */ + ENDING_WITH, + + /** + * Matches string containing pattern + */ + CONTAINING, + + /** + * Treats strings as regular expression patterns + */ + REGEX, + + LIKE + } + private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}"); /** * Creates a regular expression String to be used with {@code $regex}. - * + * * @param source the plain String - * @param type - * @return {@literal source} when {@literal source} or {@literal type} is {@literal null}. + * @param matcherType the type of matching to perform + * @return {@literal source} when {@literal source} or {@literal matcherType} is {@literal null}. */ - public String toRegularExpression(String source, Type type) { + @Nullable + public String toRegularExpression(@Nullable String source, @Nullable MatchMode matcherType) { - if (type == null || source == null) { + if (matcherType == null || source == null) { return source; } - String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, type); - - switch (type) { - case STARTING_WITH: - regex = "^" + regex; - break; - case ENDING_WITH: - regex = regex + "$"; - break; - case CONTAINING: - case NOT_CONTAINING: - regex = ".*" + regex + ".*"; - break; - case SIMPLE_PROPERTY: - case NEGATING_SIMPLE_PROPERTY: - regex = "^" + regex + "$"; - default: - } + String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, matcherType); + + return switch (matcherType) { + case STARTING_WITH -> String.format("^%s", regex); + case ENDING_WITH -> String.format("%s$", regex); + case CONTAINING -> String.format(".*%s.*", regex); + case EXACT -> String.format("^%s$", regex); + default -> regex; + }; + } - return regex; + /** + * @param source + * @return + * @since 2.2.14 + * @deprecated since 4.1.1 + */ + @Deprecated(since = "4.1.1", forRemoval = true) + public Object toCaseInsensitiveMatch(Object source) { + return source instanceof String stringValue ? new BsonRegularExpression(Pattern.quote(stringValue), "i") : source; } - private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Type type) { + private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, MatchMode matcherType) { - if (ObjectUtils.nullSafeEquals(Type.REGEX, type)) { + if (MatchMode.REGEX == matcherType) { return source; } - if (!ObjectUtils.nullSafeEquals(Type.LIKE, type)) { + if (MatchMode.LIKE != matcherType) { return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source; } @@ -95,12 +138,13 @@ private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Type if (leadingWildcard) { sb.append(".*"); } + sb.append(valueToUse); + if (trailingWildcard) { sb.append(".*"); } return sb.toString(); } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java index b847d192e6..f0f3b0a4dc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,71 +17,213 @@ import java.util.Arrays; +import org.bson.Document; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.CustomMetric; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** - * Builder class to build near-queries. - * + * Builder class to build near-queries.
                    + * MongoDB {@code $geoNear} operator allows usage of a {@literal GeoJSON Point} or legacy coordinate pair. Though + * syntactically different, there's no difference between {@code near: [-73.99171, 40.738868]} and {@code near: { type: + * "Point", coordinates: [-73.99171, 40.738868] } } for the MongoDB server
                    + *
                    + * Please note that there is a huge difference in the distance calculation. Using the legacy format (for near) operates + * upon {@literal Radians} on an Earth like sphere, whereas the {@literal GeoJSON} format uses {@literal Meters}. The + * actual type within the document is of no concern at this point.
                    + * To avoid a serious headache make sure to set the {@link Metric} to the desired unit of measure which ensures the + * distance to be calculated correctly.
                    + *
                    + * In other words:
                    + * Assume you've got 5 Documents like the ones below
                    + * + *

                    + *     
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a5"),
                    + *     "name" : "Penn Station",
                    + *     "location" : { "type" : "Point", "coordinates" : [  -73.99408, 40.75057 ] }
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
                    + *     "name" : "10gen Office",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
                    + *     "name" : "City Bakery ",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
                    + *     "name" : "Splash Bar",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796ab"),
                    + *     "name" : "Momofuku Milk Bar",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.985839, 40.731698 ] }
                    + * }
                    + *      
                    + * 
                    + * + * Fetching all Documents within a 400 Meter radius from {@code [-73.99171, 40.738868] } would look like this using + * {@literal GeoJSON}: + * + *
                    + *     
                    + * {
                    + *     $geoNear: {
                    + *         maxDistance: 400,
                    + *         num: 10,
                    + *         near: { type: "Point", coordinates: [-73.99171, 40.738868] },
                    + *         spherical:true,
                    + *         key: "location",
                    + *         distanceField: "distance"
                    + *     }
                    + * }
                    + *
                    + *     
                    + * 
                    + * + * resulting in the following 3 Documents. + * + *
                    + *     
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
                    + *     "name" : "10gen Office",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
                    + *     "distance" : 0.0 // Meters
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
                    + *     "name" : "City Bakery ",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + *     "distance" : 69.3582262492474 // Meters
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
                    + *     "name" : "Splash Bar",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + *     "distance" : 69.3582262492474 // Meters
                    + * }
                    + *     
                    + * 
                    + * + * Using legacy coordinate pairs one operates upon radians as discussed before. Assume we use {@link Metrics#KILOMETERS} + * when constructing the geoNear command. The {@link Metric} will make sure the distance multiplier is set correctly, so + * the command is rendered like + * + *
                    + *     
                    + * {
                    + *     $geoNear: {
                    + *         maxDistance: 0.0000627142377, // 400 Meters
                    + *         distanceMultiplier: 6378.137,
                    + *         num: 10,
                    + *         near: [-73.99171, 40.738868],
                    + *         spherical:true,
                    + *         key: "location",
                    + *         distanceField: "distance"
                    + *     }
                    + * }
                    + *     
                    + * 
                    + * + * Please note the calculated distance now uses {@literal Kilometers} instead of {@literal Meters} as unit of measure, + * so we need to take it times 1000 to match up to {@literal Meters} as in the {@literal GeoJSON} variant.
                    + * Still as we've been requesting the {@link Distance} in {@link Metrics#KILOMETERS} the {@link Distance#getValue()} + * reflects exactly this. + * + *
                    + *     
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
                    + *     "name" : "10gen Office",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
                    + *     "distance" : 0.0 // Kilometers
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
                    + *     "name" : "City Bakery ",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + *     "distance" : 0.0693586286032982 // Kilometers
                    + * }
                    + * {
                    + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
                    + *     "name" : "Splash Bar",
                    + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
                    + *     "distance" : 0.0693586286032982 // Kilometers
                    + * }
                    + *     
                    + * 
                    + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -public final class NearQuery { +public final class NearQuery implements ReadConcernAware, ReadPreferenceAware { private final Point point; - private Query query; - private Distance maxDistance; - private Distance minDistance; + private @Nullable Query query; + private @Nullable Distance maxDistance; + private @Nullable Distance minDistance; private Metric metric; private boolean spherical; - private Integer num; - private Integer skip; + private @Nullable Long limit; + private @Nullable Long skip; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; /** * Creates a new {@link NearQuery}. - * - * @param point + * + * @param point must not be {@literal null}. + * @param metric must not be {@literal null}. */ private NearQuery(Point point, Metric metric) { - Assert.notNull(point); + Assert.notNull(point, "Point must not be null"); + Assert.notNull(metric, "Metric must not be null"); this.point = point; this.spherical = false; - - if (metric != null) { - in(metric); - } + this.metric = metric; } /** * Creates a new {@link NearQuery} starting near the given coordinates. - * - * @param i - * @param j + * + * @param x + * @param y * @return */ public static NearQuery near(double x, double y) { - return near(x, y, null); + return near(x, y, Metrics.NEUTRAL); } /** * Creates a new {@link NearQuery} starting at the given coordinates using the given {@link Metric} to adapt given * values to further configuration. E.g. setting a {@link #maxDistance(double)} will be interpreted as a value of the * initially set {@link Metric}. - * + * * @param x * @param y - * @param metric + * @param metric must not be {@literal null}. * @return */ public static NearQuery near(double x, double y, Metric metric) { @@ -89,85 +231,95 @@ public static NearQuery near(double x, double y, Metric metric) { } /** - * Creates a new {@link NearQuery} starting at the given {@link Point}. - * + * Creates a new {@link NearQuery} starting at the given {@link Point}.
                    + * NOTE: There is a difference in using {@link Point} versus {@link GeoJsonPoint}. {@link Point} + * values are rendered as coordinate pairs in the legacy format and operate upon radians, whereas the + * {@link GeoJsonPoint} uses according to its specification {@literal meters} as unit of measure. This may lead to + * different results when using a {@link Metrics#NEUTRAL neutral Metric}. + * * @param point must not be {@literal null}. - * @return + * @return new instance of {@link NearQuery}. */ public static NearQuery near(Point point) { - return near(point, null); + return near(point, Metrics.NEUTRAL); } /** * Creates a {@link NearQuery} starting near the given {@link Point} using the given {@link Metric} to adapt given * values to further configuration. E.g. setting a {@link #maxDistance(double)} will be interpreted as a value of the - * initially set {@link Metric}. - * + * initially set {@link Metric}.
                    + * NOTE: There is a difference in using {@link Point} versus {@link GeoJsonPoint}. {@link Point} + * values are rendered as coordinate pairs in the legacy format and operate upon radians, whereas the + * {@link GeoJsonPoint} uses according to its specification {@literal meters} as unit of measure. This may lead to + * different results when using a {@link Metrics#NEUTRAL neutral Metric}. + * * @param point must not be {@literal null}. - * @param metric - * @return + * @param metric must not be {@literal null}. + * @return new instance of {@link NearQuery}. */ public static NearQuery near(Point point, Metric metric) { - Assert.notNull(point); return new NearQuery(point, metric); } /** * Returns the {@link Metric} underlying the actual query. If no metric was set explicitly {@link Metrics#NEUTRAL} * will be returned. - * + * * @return will never be {@literal null}. */ public Metric getMetric() { - return metric == null ? Metrics.NEUTRAL : metric; + return metric; } /** * Configures the maximum number of results to return. - * - * @param num + * + * @param limit * @return + * @since 2.2 */ - public NearQuery num(int num) { - this.num = num; + public NearQuery limit(long limit) { + this.limit = limit; return this; } /** * Configures the number of results to skip. - * + * * @param skip * @return */ - public NearQuery skip(int skip) { + public NearQuery skip(long skip) { this.skip = skip; return this; } /** * Configures the {@link Pageable} to use. - * + * * @param pageable must not be {@literal null} * @return */ public NearQuery with(Pageable pageable) { - Assert.notNull(pageable, "Pageable must not be 'null'."); - this.num = pageable.getOffset() + pageable.getPageSize(); - this.skip = pageable.getOffset(); + Assert.notNull(pageable, "Pageable must not be 'null'"); + if (pageable.isPaged()) { + this.skip = pageable.getOffset(); + this.limit = (long) pageable.getPageSize(); + } return this; } /** * Sets the max distance results shall have from the configured origin. If a {@link Metric} was set before the given * value will be interpreted as being a value in that metric. E.g. - * + * *
                     	 * NearQuery query = near(10.0, 20.0, Metrics.KILOMETERS).maxDistance(150);
                     	 * 
                    - * + * * Will set the maximum distance to 150 kilometers. - * + * * @param maxDistance * @return */ @@ -178,33 +330,34 @@ public NearQuery maxDistance(double maxDistance) { /** * Sets the maximum distance supplied in a given metric. Will normalize the distance but not reconfigure the query's * result {@link Metric} if one was configured before. - * + * * @param maxDistance * @param metric must not be {@literal null}. * @return */ public NearQuery maxDistance(double maxDistance, Metric metric) { - Assert.notNull(metric); + Assert.notNull(metric, "Metric must not be null"); + return maxDistance(new Distance(maxDistance, metric)); } /** * Sets the maximum distance to the given {@link Distance}. Will set the returned {@link Metric} to be the one of the - * given {@link Distance} if no {@link Metric} was set before. - * + * given {@link Distance} if {@link Metric} was {@link Metrics#NEUTRAL} before. + * * @param distance must not be {@literal null}. * @return */ public NearQuery maxDistance(Distance distance) { - Assert.notNull(distance); + Assert.notNull(distance, "Distance must not be null"); if (distance.getMetric() != Metrics.NEUTRAL) { this.spherical(true); } - if (this.metric == null) { + if (ObjectUtils.nullSafeEquals(Metrics.NEUTRAL, this.metric)) { in(distance.getMetric()); } @@ -215,13 +368,13 @@ public NearQuery maxDistance(Distance distance) { /** * Sets the minimum distance results shall have from the configured origin. If a {@link Metric} was set before the * given value will be interpreted as being a value in that metric. E.g. - * + * *
                     	 * NearQuery query = near(10.0, 20.0, Metrics.KILOMETERS).minDistance(150);
                     	 * 
                    - * + * * Will set the minimum distance to 150 kilometers. - * + * * @param minDistance * @return * @since 1.7 @@ -233,7 +386,7 @@ public NearQuery minDistance(double minDistance) { /** * Sets the minimum distance supplied in a given metric. Will normalize the distance but not reconfigure the query's * result {@link Metric} if one was configured before. - * + * * @param minDistance * @param metric must not be {@literal null}. * @return @@ -241,21 +394,22 @@ public NearQuery minDistance(double minDistance) { */ public NearQuery minDistance(double minDistance, Metric metric) { - Assert.notNull(metric); + Assert.notNull(metric, "Metric must not be null"); + return minDistance(new Distance(minDistance, metric)); } /** * Sets the minimum distance to the given {@link Distance}. Will set the returned {@link Metric} to be the one of the * given {@link Distance} if no {@link Metric} was set before. - * + * * @param distance must not be {@literal null}. * @return * @since 1.7 */ public NearQuery minDistance(Distance distance) { - Assert.notNull(distance); + Assert.notNull(distance, "Distance must not be null"); if (distance.getMetric() != Metrics.NEUTRAL) { this.spherical(true); @@ -271,26 +425,28 @@ public NearQuery minDistance(Distance distance) { /** * Returns the maximum {@link Distance}. - * + * * @return */ + @Nullable public Distance getMaxDistance() { return this.maxDistance; } /** * Returns the maximum {@link Distance}. - * + * * @return * @since 1.7 */ + @Nullable public Distance getMinDistance() { return this.minDistance; } /** * Configures a {@link CustomMetric} with the given multiplier. - * + * * @param distanceMultiplier * @return */ @@ -300,22 +456,9 @@ public NearQuery distanceMultiplier(double distanceMultiplier) { return this; } - /** - * Configures the distance multiplier to the multiplier of the given {@link Metric}. - * - * @deprecated use {@link #in(Metric)} instead. - * @param metric must not be {@literal null}. - * @return - */ - @Deprecated - public NearQuery distanceMultiplier(Metric metric) { - Assert.notNull(metric); - return in(metric); - } - /** * Configures whether to return spherical values for the actual distance. - * + * * @param spherical * @return */ @@ -326,7 +469,7 @@ public NearQuery spherical(boolean spherical) { /** * Returns whether spharical values will be returned. - * + * * @return */ public boolean isSpherical() { @@ -336,7 +479,7 @@ public boolean isSpherical() { /** * Will cause the results' distances being returned in kilometers. Sets {@link #distanceMultiplier(double)} and * {@link #spherical(boolean)} accordingly. - * + * * @return */ public NearQuery inKilometers() { @@ -346,7 +489,7 @@ public NearQuery inKilometers() { /** * Will cause the results' distances being returned in miles. Sets {@link #distanceMultiplier(double)} and * {@link #spherical(boolean)} accordingly. - * + * * @return */ public NearQuery inMiles() { @@ -356,19 +499,19 @@ public NearQuery inMiles() { /** * Will cause the results' distances being returned in the given metric. Sets {@link #distanceMultiplier(double)} * accordingly as well as {@link #spherical(boolean)} if the given {@link Metric} is not {@link Metrics#NEUTRAL}. - * + * * @param metric the metric the results shall be returned in. Uses {@link Metrics#NEUTRAL} if {@literal null} is * passed. * @return */ - public NearQuery in(Metric metric) { + public NearQuery in(@Nullable Metric metric) { return adaptMetric(metric == null ? Metrics.NEUTRAL : metric); } /** * Configures the given {@link Metric} to be used as base on for this query and recalculate the maximum distance if no * metric was set before. - * + * * @param metric */ private NearQuery adaptMetric(Metric metric) { @@ -383,18 +526,19 @@ private NearQuery adaptMetric(Metric metric) { /** * Adds an actual query to the {@link NearQuery} to restrict the objects considered for the actual near operation. - * + * * @param query must not be {@literal null}. * @return */ public NearQuery query(Query query) { - Assert.notNull(query, "Cannot apply 'null' query on NearQuery."); + Assert.notNull(query, "Cannot apply 'null' query on NearQuery"); + this.query = query; this.skip = query.getSkip(); if (query.getLimit() != 0) { - this.num = query.getLimit(); + this.limit = (long) query.getLimit(); } return this; } @@ -402,43 +546,146 @@ public NearQuery query(Query query) { /** * @return the number of elements to skip. */ - public Integer getSkip() { + @Nullable + public Long getSkip() { return skip; } /** - * Returns the {@link DBObject} built by the {@link NearQuery}. + * Get the {@link Collation} to use along with the {@link #query(Query)}. + * + * @return the {@link Collation} if set. {@literal null} otherwise. + * @since 2.2 + */ + @Nullable + public Collation getCollation() { + return query != null ? query.getCollation().orElse(null) : null; + } + + /** + * Configures the query to use the given {@link ReadConcern} unless the underlying {@link #query(Query)} + * {@link Query#hasReadConcern() specifies} another one. + * + * @param readConcern must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public NearQuery withReadConcern(ReadConcern readConcern) { + + Assert.notNull(readConcern, "ReadConcern must not be null"); + this.readConcern = readConcern; + return this; + } + + /** + * Configures the query to use the given {@link ReadPreference} unless the underlying {@link #query(Query)} + * {@link Query#hasReadPreference() specifies} another one. + * + * @param readPreference must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public NearQuery withReadPreference(ReadPreference readPreference) { + + Assert.notNull(readPreference, "ReadPreference must not be null"); + this.readPreference = readPreference; + return this; + } + + /** + * Get the {@link ReadConcern} to use. Will return the underlying {@link #query(Query) queries} + * {@link Query#getReadConcern() ReadConcern} if present or the one defined on the {@link NearQuery#readConcern} + * itself. * + * @return can be {@literal null} if none set. + * @since 4.1 + * @see ReadConcernAware + */ + @Nullable + @Override + public ReadConcern getReadConcern() { + + if (query != null && query.hasReadConcern()) { + return query.getReadConcern(); + } + return readConcern; + } + + /** + * Get the {@link ReadPreference} to use. Will return the underlying {@link #query(Query) queries} + * {@link Query#getReadPreference() ReadPreference} if present or the one defined on the + * {@link NearQuery#readPreference} itself. + * + * @return can be {@literal null} if none set. + * @since 4.1 + * @see ReadPreferenceAware + */ + @Nullable + @Override + public ReadPreference getReadPreference() { + + if (query != null && query.hasReadPreference()) { + return query.getReadPreference(); + } + return readPreference; + } + + /** + * Returns the {@link Document} built by the {@link NearQuery}. + * * @return */ - public DBObject toDBObject() { + public Document toDocument() { - BasicDBObject dbObject = new BasicDBObject(); + Document document = new Document(); if (query != null) { - dbObject.put("query", query.getQueryObject()); + + document.put("query", query.getQueryObject()); + query.getCollation().ifPresent(collation -> document.append("collation", collation.toDocument())); } if (maxDistance != null) { - dbObject.put("maxDistance", maxDistance.getNormalizedValue()); + document.put("maxDistance", getDistanceValueInRadiantsOrMeters(maxDistance)); } if (minDistance != null) { - dbObject.put("minDistance", minDistance.getNormalizedValue()); + document.put("minDistance", getDistanceValueInRadiantsOrMeters(minDistance)); } if (metric != null) { - dbObject.put("distanceMultiplier", metric.getMultiplier()); + document.put("distanceMultiplier", getDistanceMultiplier()); } - if (num != null) { - dbObject.put("num", num); + if (limit != null) { + document.put("num", limit); } - dbObject.put("near", Arrays.asList(point.getX(), point.getY())); + if (usesGeoJson()) { + document.put("near", point); + } else { + document.put("near", Arrays.asList(point.getX(), point.getY())); + } - dbObject.put("spherical", spherical); + document.put("spherical", spherical ? spherical : usesGeoJson()); - return dbObject; + return document; } + + private double getDistanceMultiplier() { + return usesMetricSystem() ? MetricConversion.getMetersToMetricMultiplier(metric) : metric.getMultiplier(); + } + + private double getDistanceValueInRadiantsOrMeters(Distance distance) { + return usesMetricSystem() ? MetricConversion.getDistanceInMeters(distance) : distance.getNormalizedValue(); + } + + private boolean usesMetricSystem() { + return usesGeoJson(); + } + + private boolean usesGeoJson() { + return point instanceof GeoJsonPoint; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Order.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Order.java deleted file mode 100644 index 5936218b7b..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Order.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2010-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import org.springframework.data.domain.Sort.Direction; - -/** - * An enum that specifies the ordering for sort or index specifications - * - * @deprecated prefer {@link Direction} - * @author Thomas Risberg - * @author Oliver Gierke - */ -@Deprecated -public enum Order { - - ASCENDING { - @Override - public Direction toDirection() { - return Direction.ASC; - } - }, - - DESCENDING { - @Override - public Direction toDirection() { - return Direction.DESC; - } - }; - - public abstract Direction toDirection(); -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java index b8bb5957bd..31c6b9069f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,49 +18,86 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import static org.springframework.util.ObjectUtils.*; +import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; -import java.util.concurrent.TimeUnit; +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.OffsetScrollPosition; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.query.Meta.CursorOption; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** + * MongoDB Query object representing criteria, projection, sorting and query hints. + * * @author Thomas Risberg * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Anton Barkan */ -public class Query { +public class Query implements ReadConcernAware, ReadPreferenceAware { private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES"; - private final Set> restrictedTypes = new HashSet>(); - private final Map criteria = new LinkedHashMap(); - private Field fieldSpec; - private Sort sort; - private int skip; - private int limit; - private String hint; + private Set> restrictedTypes = Collections.emptySet(); + private final Map criteria = new LinkedHashMap<>(); + private @Nullable Field fieldSpec = null; + private Sort sort = Sort.unsorted(); + private long skip; + private Limit limit = Limit.unlimited(); + + private KeysetScrollPosition keysetScrollPosition; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; + + private @Nullable String hint; private Meta meta = new Meta(); + private Optional collation = Optional.empty(); + + Query(Query query) { + this.restrictedTypes = query.restrictedTypes; + this.fieldSpec = query.fieldSpec; + this.sort = query.sort; + this.limit = query.limit; + this.skip = query.skip; + this.keysetScrollPosition = query.keysetScrollPosition; + this.readConcern = query.readConcern; + this.readPreference = query.readPreference; + this.hint = query.hint; + this.meta = query.meta; + this.collation = query.collation; + } + /** * Static factory method to create a {@link Query} using the provided {@link CriteriaDefinition}. - * + * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return new instance of {@link Query}. * @since 1.6 */ public static Query query(CriteriaDefinition criteriaDefinition) { @@ -71,7 +108,7 @@ public Query() {} /** * Creates a new {@link Query} using the given {@link CriteriaDefinition}. - * + * * @param criteriaDefinition must not be {@literal null}. * @since 1.6 */ @@ -81,13 +118,15 @@ public Query(CriteriaDefinition criteriaDefinition) { /** * Adds the given {@link CriteriaDefinition} to the current {@link Query}. - * + * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return this. * @since 1.6 */ public Query addCriteria(CriteriaDefinition criteriaDefinition) { + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); + CriteriaDefinition existing = this.criteria.get(criteriaDefinition.getKey()); String key = criteriaDefinition.getKey(); @@ -95,97 +134,247 @@ public Query addCriteria(CriteriaDefinition criteriaDefinition) { this.criteria.put(key, criteriaDefinition); } else { throw new InvalidMongoDbApiUsageException( - "Due to limitations of the com.mongodb.BasicDBObject, " + "you can't add a second '" + key + "' criteria. " - + "Query already contains '" + existing.getCriteriaObject() + "'."); + String.format("Due to limitations of the com.mongodb.BasicDocument, you can't add a second '%s' criteria;" + + " Query already contains '%s'", key, serializeToJsonSafely(existing.getCriteriaObject()))); } return this; } public Field fields() { - if (fieldSpec == null) { + + if (this.fieldSpec == null) { this.fieldSpec = new Field(); } + return this.fieldSpec; } /** - * Set number of documents to skip before returning results. - * - * @param skip - * @return + * Set number of documents to skip before returning results. Use {@literal zero} or a {@literal negative} value to + * avoid skipping. + * + * @param skip number of documents to skip. Use {@literal zero} or a {@literal negative} value to avoid skipping. + * @return this. */ - public Query skip(int skip) { + public Query skip(long skip) { this.skip = skip; return this; } /** - * Limit the number of returned documents to {@code limit}. - * - * @param limit - * @return + * Limit the number of returned documents to {@code limit}. A {@literal zero} or {@literal negative} value is + * considered as unlimited. + * + * @param limit number of documents to return. Use {@literal zero} or {@literal negative} for unlimited. + * @return this. */ public Query limit(int limit) { - this.limit = limit; + this.limit = limit > 0 ? Limit.of(limit) : Limit.unlimited(); return this; } /** - * Configures the query to use the given hint when being executed. - * - * @param name must not be {@literal null} or empty. - * @return + * Limit the number of returned documents to {@link Limit}. + * + * @param limit number of documents to return. + * @return this. + * @since 4.2 */ - public Query withHint(String name) { - Assert.hasText(name, "Hint must not be empty or null!"); - this.hint = name; + public Query limit(Limit limit) { + + Assert.notNull(limit, "Limit must not be null"); + + if (limit.isUnlimited()) { + this.limit = limit; + return this; + } + + // retain zero/negative semantics for unlimited. + return limit(limit.max()); + } + + /** + * Configures the query to use the given hint when being executed. The {@code hint} can either be an index name or a + * json {@link Document} representation. + * + * @param hint must not be {@literal null} or empty. + * @return this. + * @see Document#parse(String) + */ + public Query withHint(String hint) { + + Assert.hasText(hint, "Hint must not be empty or null"); + this.hint = hint; + return this; + } + + /** + * Configures the query to use the given {@link ReadConcern} when being executed. + * + * @param readConcern must not be {@literal null}. + * @return this. + * @since 3.1 + */ + public Query withReadConcern(ReadConcern readConcern) { + + Assert.notNull(readConcern, "ReadConcern must not be null"); + this.readConcern = readConcern; + return this; + } + + /** + * Configures the query to use the given {@link ReadPreference} when being executed. + * + * @param readPreference must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public Query withReadPreference(ReadPreference readPreference) { + + Assert.notNull(readPreference, "ReadPreference must not be null"); + this.readPreference = readPreference; + return this; + } + + @Override + public boolean hasReadConcern() { + return this.readConcern != null; + } + + @Override + public ReadConcern getReadConcern() { + return this.readConcern; + } + + @Override + public boolean hasReadPreference() { + return this.readPreference != null || getMeta().getFlags().contains(CursorOption.SECONDARY_READS); + } + + @Override + public ReadPreference getReadPreference() { + + if (readPreference == null) { + return getMeta().getFlags().contains(CursorOption.SECONDARY_READS) ? ReadPreference.primaryPreferred() : null; + } + + return this.readPreference; + } + + /** + * Configures the query to use the given {@link Document hint} when being executed. + * + * @param hint must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public Query withHint(Document hint) { + + Assert.notNull(hint, "Hint must not be null"); + this.hint = hint.toJson(); return this; } /** * Sets the given pagination information on the {@link Query} instance. Will transparently set {@code skip} and * {@code limit} as well as applying the {@link Sort} instance defined with the {@link Pageable}. - * - * @param pageable - * @return + * + * @param pageable must not be {@literal null}. + * @return this. */ public Query with(Pageable pageable) { - if (pageable == null) { - return this; + if (pageable.isPaged()) { + this.limit = pageable.toLimit(); + this.skip = pageable.getOffset(); } - this.limit = pageable.getPageSize(); - this.skip = pageable.getOffset(); - return with(pageable.getSort()); } + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently set {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(ScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + if (position instanceof OffsetScrollPosition offset) { + return with(offset); + } + + if (position instanceof KeysetScrollPosition keyset) { + return with(keyset); + } + + throw new IllegalArgumentException(String.format("ScrollPosition %s not supported", position)); + } + + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently set {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(OffsetScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + this.skip = position.isInitial() ? 0 : position.getOffset() + 1; + this.keysetScrollPosition = null; + return this; + } + + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently reset {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(KeysetScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + this.skip = 0; + this.keysetScrollPosition = position; + + return this; + } + + public boolean hasKeyset() { + return keysetScrollPosition != null; + } + + @Nullable + public KeysetScrollPosition getKeyset() { + return keysetScrollPosition; + } + /** * Adds a {@link Sort} to the {@link Query} instance. - * - * @param sort - * @return + * + * @param sort must not be {@literal null}. + * @return this. */ public Query with(Sort sort) { - if (sort == null) { + Assert.notNull(sort, "Sort must not be null"); + + if (sort.isUnsorted()) { return this; } - for (Order order : sort) { - if (order.isIgnoreCase()) { - throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! " - + "MongoDB does not support sorting ignoreing case currently!", order.getProperty())); - } - } + sort.stream().filter(Order::isIgnoreCase).findFirst().ifPresent(it -> { - if (this.sort == null) { - this.sort = sort; - } else { - this.sort = this.sort.and(sort); - } + throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case;" + + " MongoDB does not support sorting ignoring case currently", it.getProperty())); + }); + + this.sort = this.sort.and(sort); return this; } @@ -194,91 +383,138 @@ public Query with(Sort sort) { * @return the restrictedTypes */ public Set> getRestrictedTypes() { - return restrictedTypes == null ? Collections.> emptySet() : restrictedTypes; + return restrictedTypes; } /** * Restricts the query to only return documents instances that are exactly of the given types. - * + * * @param type may not be {@literal null} * @param additionalTypes may not be {@literal null} - * @return + * @return this. */ public Query restrict(Class type, Class... additionalTypes) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); Assert.notNull(additionalTypes, "AdditionalTypes must not be null"); + if (restrictedTypes == Collections.EMPTY_SET) { + restrictedTypes = new HashSet<>(1 + additionalTypes.length); + } + restrictedTypes.add(type); - for (Class additionalType : additionalTypes) { - restrictedTypes.add(additionalType); + + if (additionalTypes.length > 0) { + restrictedTypes.addAll(Arrays.asList(additionalTypes)); } return this; } - public DBObject getQueryObject() { + /** + * @return the query {@link Document}. + */ + public Document getQueryObject() { - DBObject dbo = new BasicDBObject(); + if (criteria.isEmpty() && restrictedTypes.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + if (criteria.size() == 1 && restrictedTypes.isEmpty()) { + + for (CriteriaDefinition definition : criteria.values()) { + return definition.getCriteriaObject(); + } + } + + Document document = new Document(); for (CriteriaDefinition definition : criteria.values()) { - dbo.putAll(definition.getCriteriaObject()); + document.putAll(definition.getCriteriaObject()); } if (!restrictedTypes.isEmpty()) { - dbo.put(RESTRICTED_TYPES_KEY, getRestrictedTypes()); + document.put(RESTRICTED_TYPES_KEY, getRestrictedTypes()); } - return dbo; + return document; } - public DBObject getFieldsObject() { - return this.fieldSpec == null ? null : fieldSpec.getFieldsObject(); + /** + * @return the field {@link Document}. + */ + public Document getFieldsObject() { + return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject(); } - public DBObject getSortObject() { + /** + * @return the sort {@link Document}. + */ + public Document getSortObject() { - if (this.sort == null) { - return null; + if (this.sort.isUnsorted()) { + return BsonUtils.EMPTY_DOCUMENT; } - DBObject dbo = new BasicDBObject(); + Document document = new Document(); - for (org.springframework.data.domain.Sort.Order order : this.sort) { - dbo.put(order.getProperty(), order.isAscending() ? 1 : -1); - } + this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); - return dbo; + return document; } /** - * Get the number of documents to skip. - * - * @return + * Returns {@literal true} if the {@link Query} has a sort parameter. + * + * @return {@literal true} if sorted. + * @see Sort#isSorted() + * @since 2.2 + */ + public boolean isSorted() { + return sort.isSorted(); + } + + /** + * Get the number of documents to skip. {@literal Zero} or a {@literal negative} value indicates no skip. + * + * @return number of documents to skip */ - public int getSkip() { + public long getSkip() { return this.skip; } /** - * Get the maximum number of documents to be return. - * - * @return + * Returns whether the query is {@link #limit(int) limited}. + * + * @return {@code true} if the query is limited; {@code false} otherwise. + * @since 4.1 + */ + public boolean isLimited() { + return this.limit.isLimited(); + } + + /** + * Get the maximum number of documents to be return. {@literal Zero} or a {@literal negative} value indicates no + * limit. + * + * @return number of documents to return. + * @see #isLimited() */ public int getLimit() { - return this.limit; + return limit.isUnlimited() ? 0 : this.limit.max(); } /** - * @return + * @return can be {@literal null}. */ + @Nullable public String getHint() { return hint; } /** * @param maxTimeMsec - * @return + * @return this. * @see Meta#setMaxTimeMsec(long) * @since 1.6 */ @@ -289,50 +525,108 @@ public Query maxTimeMsec(long maxTimeMsec) { } /** - * @param timeout - * @param timeUnit - * @return - * @see Meta#setMaxTime(long, TimeUnit) - * @since 1.6 + * @param timeout must not be {@literal null}. + * @return this. + * @see Meta#setMaxTime(Duration) + * @since 2.1 */ - public Query maxTime(long timeout, TimeUnit timeUnit) { + public Query maxTime(Duration timeout) { - meta.setMaxTime(timeout, timeUnit); + meta.setMaxTime(timeout); return this; } /** - * @param maxScan - * @return - * @see Meta#setMaxScan(long) + * Add a comment to the query that is propagated to the profile log. + * + * @param comment must not be {@literal null}. + * @return this. + * @see Meta#setComment(String) * @since 1.6 */ - public Query maxScan(long maxScan) { + public Query comment(String comment) { - meta.setMaxScan(maxScan); + meta.setComment(comment); return this; } /** - * @param comment - * @return - * @see Meta#setComment(String) - * @since 1.6 + * Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation + * stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory. + *

                    + * Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk} + * indicator if any aggregation stage wrote data to temporary files due to memory restrictions. + * + * @param allowDiskUse + * @return this. + * @see Meta#setAllowDiskUse(Boolean) + * @since 3.2 */ - public Query comment(String comment) { + public Query allowDiskUse(boolean allowDiskUse) { - meta.setComment(comment); + meta.setAllowDiskUse(allowDiskUse); return this; } /** - * @return - * @see Meta#setSnapshot(boolean) - * @since 1.6 + * Set the number of documents to return in each response batch.
                    + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @param batchSize The number of documents to return per batch. + * @return this. + * @see Meta#setCursorBatchSize(int) + * @since 2.1 + */ + public Query cursorBatchSize(int batchSize) { + + meta.setCursorBatchSize(batchSize); + return this; + } + + /** + * @return this. + * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#NO_TIMEOUT + * @since 1.10 */ - public Query useSnapshot() { + public Query noCursorTimeout() { - meta.setSnapshot(true); + meta.addFlag(Meta.CursorOption.NO_TIMEOUT); + return this; + } + + /** + * @return this. + * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#EXHAUST + * @since 1.10 + */ + public Query exhaust() { + + meta.addFlag(Meta.CursorOption.EXHAUST); + return this; + } + + /** + * Allows querying of a replica. + * + * @return this. + * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#SECONDARY_READS + * @since 3.0.2 + */ + public Query allowSecondaryReads() { + + meta.addFlag(Meta.CursorOption.SECONDARY_READS); + return this; + } + + /** + * @return this. + * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#PARTIAL + * @since 1.10 + */ + public Query partialResults() { + + meta.addFlag(Meta.CursorOption.PARTIAL); return this; } @@ -350,30 +644,99 @@ public Meta getMeta() { */ public void setMeta(Meta meta) { - Assert.notNull(meta, "Query meta might be empty but must not be null."); + Assert.notNull(meta, "Query meta might be empty but must not be null"); this.meta = meta; } + /** + * Set the {@link Collation} applying language-specific rules for string comparison. + * + * @param collation can be {@literal null}. + * @return this. + * @since 2.0 + */ + public Query collation(@Nullable Collation collation) { + + this.collation = Optional.ofNullable(collation); + return this; + } + + /** + * Get the {@link Collation} defining language-specific rules for string comparison. + * + * @return never {@literal null}. + * @since 2.0 + */ + public Optional getCollation() { + return collation; + } + protected List getCriteria() { - return new ArrayList(this.criteria.values()); + return new ArrayList<>(this.criteria.values()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() + /** + * Create an independent copy of the given {@link Query}.
                    + * The resulting {@link Query} will not be {@link Object#equals(Object) binary equal} to the given source but + * semantically equal in terms of creating the same result when executed. + * + * @param source The source {@link Query} to use a reference. Must not be {@literal null}. + * @return new {@link Query}. + * @since 2.2 */ + public static Query of(Query source) { + + Assert.notNull(source, "Source must not be null"); + + Document sourceFields = source.getFieldsObject(); + Document sourceSort = source.getSortObject(); + Document sourceQuery = source.getQueryObject(); + + Query target = new Query() { + + @Override + public Document getFieldsObject() { + return BsonUtils.merge(sourceFields, super.getFieldsObject()); + } + + @Override + public Document getSortObject() { + return BsonUtils.merge(sourceSort, super.getSortObject()); + } + + @Override + public Document getQueryObject() { + return BsonUtils.merge(sourceQuery, super.getQueryObject()); + } + + @Override + public boolean isSorted() { + return source.isSorted() || super.isSorted(); + } + }; + + target.skip = source.getSkip(); + + target.limit = source.isLimited() ? Limit.of(source.getLimit()) : Limit.unlimited(); + target.hint = source.getHint(); + target.collation = source.getCollation(); + target.restrictedTypes = new HashSet<>(source.getRestrictedTypes()); + + if (source.getMeta().hasValues()) { + target.setMeta(new Meta(source.getMeta())); + } + + return target; + } + @Override public String toString() { return String.format("Query: %s, Fields: %s, Sort: %s", serializeToJsonSafely(getQueryObject()), serializeToJsonSafely(getFieldsObject()), serializeToJsonSafely(getSortObject())); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -388,7 +751,7 @@ public boolean equals(Object obj) { /** * Tests whether the settings of the given {@link Query} are equal to this query. - * + * * @param that * @return */ @@ -396,19 +759,17 @@ protected boolean querySettingsEquals(Query that) { boolean criteriaEqual = this.criteria.equals(that.criteria); boolean fieldsEqual = nullSafeEquals(this.fieldSpec, that.fieldSpec); - boolean sortEqual = nullSafeEquals(this.sort, that.sort); + boolean sortEqual = this.sort.equals(that.sort); boolean hintEqual = nullSafeEquals(this.hint, that.hint); boolean skipEqual = this.skip == that.skip; - boolean limitEqual = this.limit == that.limit; + boolean limitEqual = nullSafeEquals(this.limit, that.limit); boolean metaEqual = nullSafeEquals(this.meta, that.meta); + boolean collationEqual = nullSafeEquals(this.collation.orElse(null), that.collation.orElse(null)); - return criteriaEqual && fieldsEqual && sortEqual && hintEqual && skipEqual && limitEqual && metaEqual; + return criteriaEqual && fieldsEqual && sortEqual && hintEqual && skipEqual && limitEqual && metaEqual + && collationEqual; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -419,15 +780,16 @@ public int hashCode() { result += 31 * nullSafeHashCode(sort); result += 31 * nullSafeHashCode(hint); result += 31 * skip; - result += 31 * limit; + result += 31 * limit.hashCode(); result += 31 * nullSafeHashCode(meta); + result += 31 * nullSafeHashCode(collation.orElse(null)); return result; } /** * Returns whether the given key is the one used to hold the type restriction information. - * + * * @deprecated don't call this method as the restricted type handling will undergo some significant changes going * forward. * @param key @@ -437,4 +799,5 @@ public int hashCode() { public static boolean isRestrictedTypeKey(String key) { return RESTRICTED_TYPES_KEY.equals(key); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java index 4064b239b8..11e0f7fb24 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2016 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,25 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; -import java.util.Map.Entry; -import org.springframework.core.convert.converter.Converter; +import org.bson.Document; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; +import org.springframework.core.convert.converter.Converter; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * Utility methods for JSON serialization. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public abstract class SerializationUtils { @@ -41,8 +42,8 @@ private SerializationUtils() { } /** - * Flattens out a given {@link DBObject}. - * + * Flattens out a given {@link Document}. + * *

                     	 * 
                     	 * {
                    @@ -50,7 +51,7 @@ private SerializationUtils() {
                     	 *   nested : { value : "conflux"}
                     	 * }
                     	 * 
                    -	 * will result in 
                    +	 * will result in
                     	 * 
                     	 * {
                     	 *   _id : 1
                    @@ -58,39 +59,38 @@ private SerializationUtils() {
                     	 * }
                     	 * 
                     	 * 
                    - * + * * @param source can be {@literal null}. * @return {@link Collections#emptyMap()} when source is {@literal null} * @since 1.8 */ - public static Map flattenMap(DBObject source) { + public static Map flattenMap(@Nullable Document source) { if (source == null) { return Collections.emptyMap(); } - Map result = new HashMap(); + Map result = new LinkedHashMap<>(); toFlatMap("", source, result); return result; } private static void toFlatMap(String currentPath, Object source, Map map) { - if (source instanceof BasicDBObject) { + if (source instanceof Document document) { - BasicDBObject dbo = (BasicDBObject) source; - Iterator> iter = dbo.entrySet().iterator(); - String pathPrefix = currentPath.isEmpty() ? "" : currentPath + "."; + Iterator> it = document.entrySet().iterator(); + String pathPrefix = currentPath.isEmpty() ? "" : currentPath + '.'; - while (iter.hasNext()) { + while (it.hasNext()) { - Map.Entry entry = iter.next(); + Map.Entry entry = it.next(); if (entry.getKey().startsWith("$")) { if (map.containsKey(currentPath)) { - ((BasicDBObject) map.get(currentPath)).put(entry.getKey(), entry.getValue()); + ((Document) map.get(currentPath)).put(entry.getKey(), entry.getValue()); } else { - map.put(currentPath, new BasicDBObject(entry.getKey(), entry.getValue())); + map.put(currentPath, new Document(entry.getKey(), entry.getValue())); } } else { @@ -105,53 +105,59 @@ private static void toFlatMap(String currentPath, Object source, Map) value); - } else if (value instanceof Map) { - return toString((Map) value); - } else if (value instanceof DBObject) { - return toString(((DBObject) value).toMap()); + + if (value instanceof Collection collection) { + return toString(collection); + } else if (value instanceof Map map) { + return toString(map); + } else if (ObjectUtils.isArray(value)) { + return toString(Arrays.asList(ObjectUtils.toObjectArray(value))); } else { - return String.format("{ $java : %s }", value.toString()); + return String.format("{ \"$java\" : %s }", value); } } } + public static String serializeValue(@Nullable Object value) { + + if (value == null) { + return "null"; + } + + String documentJson = new Document("toBeEncoded", value).toJson(); + return documentJson.substring(documentJson.indexOf(':') + 1, documentJson.length() - 1).trim(); + } + private static String toString(Map source) { - return iterableToDelimitedString(source.entrySet(), "{ ", " }", new Converter, Object>() { - public Object convert(Entry source) { - return String.format("\"%s\" : %s", source.getKey(), serializeToJsonSafely(source.getValue())); - } - }); + return iterableToDelimitedString(source.entrySet(), "{ ", " }", + entry -> String.format("\"%s\" : %s", entry.getKey(), serializeToJsonSafely(entry.getValue()))); } private static String toString(Collection source) { - return iterableToDelimitedString(source, "[ ", " ]", new Converter() { - public Object convert(Object source) { - return serializeToJsonSafely(source); - } - }); + return iterableToDelimitedString(source, "[ ", " ]", SerializationUtils::serializeToJsonSafely); } /** * Creates a string representation from the given {@link Iterable} prepending the postfix, applying the given * {@link Converter} to each element before adding it to the result {@link String}, concatenating each element with * {@literal ,} and applying the postfix. - * + * * @param source * @param prefix * @param postfix @@ -165,6 +171,7 @@ private static String iterableToDelimitedString(Iterable source, String p Iterator iterator = source.iterator(); while (iterator.hasNext()) { + builder.append(transformer.convert(iterator.next())); if (iterator.hasNext()) { builder.append(", "); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java index c7a2edffb5..bd6d8c3469 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,21 @@ */ package org.springframework.data.mongodb.core.query; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + /** * A {@link Term} defines one or multiple words {@link Type#WORD} or phrases {@link Type#PHRASE} to be used in the * context of full text search. - * + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 */ public class Term { public enum Type { - WORD, PHRASE; + WORD, PHRASE } private final Type type; @@ -34,7 +38,7 @@ public enum Type { /** * Creates a new {@link Term} of {@link Type#WORD}. - * + * * @param raw */ public Term(String raw) { @@ -43,18 +47,18 @@ public Term(String raw) { /** * Creates a new {@link Term} of given {@link Type}. - * + * * @param raw * @param type defaulted to {@link Type#WORD} if {@literal null}. */ - public Term(String raw, Type type) { + public Term(String raw, @Nullable Type type) { this.raw = raw; this.type = type == null ? Type.WORD : type; } /** * Negates the term. - * + * * @return */ public Term negate() { @@ -78,7 +82,7 @@ public Type getType() { /** * Get formatted representation of term. - * + * * @return */ public String getFormatted() { @@ -87,6 +91,33 @@ public String getFormatted() { return negated ? negateRaw(formatted) : formatted; } + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (!(o instanceof Term other)) { + return false; + } + + return ObjectUtils.nullSafeEquals(negated, other.negated) && ObjectUtils.nullSafeEquals(type, other.type) + && ObjectUtils.nullSafeEquals(raw, other.raw); + } + + @Override + public int hashCode() { + + int result = 17; + + result += ObjectUtils.nullSafeHashCode(type); + result += ObjectUtils.nullSafeHashCode(raw); + result += ObjectUtils.nullSafeHashCode(negated); + + return result; + } + @Override public String toString() { return getFormatted(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java index 61bd209584..e1a7d0c4d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,28 +18,30 @@ import java.util.ArrayList; import java.util.List; +import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; - /** * Implementation of {@link CriteriaDefinition} to be used for full text search. - * + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch * @since 1.6 */ public class TextCriteria implements CriteriaDefinition { private final List terms; - private String language; + private final @Nullable String language; + private @Nullable Boolean caseSensitive; + private @Nullable Boolean diacriticSensitive; /** * Creates a new {@link TextCriteria}. - * + * * @see #forDefaultLanguage() * @see #forLanguage(String) */ @@ -47,15 +49,15 @@ public TextCriteria() { this(null); } - private TextCriteria(String language) { + private TextCriteria(@Nullable String language) { this.language = language; - this.terms = new ArrayList(); + this.terms = new ArrayList<>(); } /** * Returns a new {@link TextCriteria} for the default language. - * + * * @return */ public static TextCriteria forDefaultLanguage() { @@ -63,21 +65,21 @@ public static TextCriteria forDefaultLanguage() { } /** - * For a full list of supported languages see the mongodb reference manual for Text Search Languages. - * + * For a full list of supported languages see the mongodb reference manual for + * Text Search Languages. + * * @param language * @return */ public static TextCriteria forLanguage(String language) { - Assert.hasText(language, "Language must not be null or empty!"); + Assert.hasText(language, "Language must not be null or empty"); return new TextCriteria(language); } /** * Configures the {@link TextCriteria} to match any of the given words. - * + * * @param words the words to match. * @return */ @@ -92,12 +94,12 @@ public TextCriteria matchingAny(String... words) { /** * Adds given {@link Term} to criteria. - * + * * @param term must not be {@literal null}. */ public TextCriteria matching(Term term) { - Assert.notNull(term, "Term to add must not be null."); + Assert.notNull(term, "Term to add must not be null"); this.terms.add(term); return this; @@ -141,7 +143,7 @@ public TextCriteria notMatchingAny(String... words) { /** * Given value will treated as a single phrase. - * + * * @param phrase * @return */ @@ -155,7 +157,7 @@ public TextCriteria notMatchingPhrase(String phrase) { /** * Given value will treated as a single phrase. - * + * * @param phrase * @return */ @@ -167,38 +169,92 @@ public TextCriteria matchingPhrase(String phrase) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getKey() + /** + * Optionally enable or disable case sensitive search. + * + * @param caseSensitive boolean flag to enable/disable. + * @return never {@literal null}. + * @since 1.10 + */ + public TextCriteria caseSensitive(boolean caseSensitive) { + + this.caseSensitive = caseSensitive; + return this; + } + + /** + * Optionally enable or disable diacritic sensitive search against version 3 text indexes. + * + * @param diacriticSensitive boolean flag to enable/disable. + * @return never {@literal null}. + * @since 1.10 */ + public TextCriteria diacriticSensitive(boolean diacriticSensitive) { + + this.diacriticSensitive = diacriticSensitive; + return this; + } + @Override public String getKey() { return "$text"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject() - */ @Override - public DBObject getCriteriaObject() { + public Document getCriteriaObject() { - BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); + Document document = new Document(); if (StringUtils.hasText(language)) { - builder.add("$language", language); + document.put("$language", language); } if (!terms.isEmpty()) { - builder.add("$search", join(terms)); + document.put("$search", join(terms)); + } + + if (caseSensitive != null) { + document.put("$caseSensitive", caseSensitive); } - return new BasicDBObject("$text", builder.get()); + if (diacriticSensitive != null) { + document.put("$diacriticSensitive", diacriticSensitive); + } + + return new Document("$text", document); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + if (!(o instanceof TextCriteria that)) { + return false; + } + + return ObjectUtils.nullSafeEquals(terms, that.terms) && ObjectUtils.nullSafeEquals(language, that.language) + && ObjectUtils.nullSafeEquals(caseSensitive, that.caseSensitive) + && ObjectUtils.nullSafeEquals(diacriticSensitive, that.diacriticSensitive); + } + + @Override + public int hashCode() { + + int result = 17; + + result += ObjectUtils.nullSafeHashCode(terms); + result += ObjectUtils.nullSafeHashCode(language); + result += ObjectUtils.nullSafeHashCode(caseSensitive); + result += ObjectUtils.nullSafeHashCode(diacriticSensitive); + + return result; } private String join(Iterable terms) { - List result = new ArrayList(); + List result = new ArrayList<>(); for (Term term : terms) { if (term != null) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java index 8e54122cab..a6583299d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,28 +16,32 @@ package org.springframework.data.mongodb.core.query; import java.util.Locale; +import java.util.Map.Entry; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; /** * {@link Query} implementation to be used to for performing full text searches. - * + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 */ public class TextQuery extends Query { private final String DEFAULT_SCORE_FIELD_FIELDNAME = "score"; - private final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore"); + private final Document META_TEXT_SCORE = new Document("$meta", "textScore"); private String scoreFieldName = DEFAULT_SCORE_FIELD_FIELDNAME; private boolean includeScore = false; private boolean sortByScore = false; + private int sortByScoreIndex = 0; /** * Creates new {@link TextQuery} using the the given {@code wordsAndPhrases} with {@link TextCriteria} - * + * * @param wordsAndPhrases * @see TextCriteria#matching(String) */ @@ -47,34 +51,34 @@ public TextQuery(String wordsAndPhrases) { /** * Creates new {@link TextQuery} in {@code language}.
                    - * For a full list of supported languages see the mongdodb reference manual for Text Search Languages. - * + * For a full list of supported languages see the mongdodb reference manual for + * Text Search Languages. + * * @param wordsAndPhrases * @param language * @see TextCriteria#forLanguage(String) * @see TextCriteria#matching(String) */ - public TextQuery(String wordsAndPhrases, String language) { + public TextQuery(String wordsAndPhrases, @Nullable String language) { super(TextCriteria.forLanguage(language).matching(wordsAndPhrases)); } /** * Creates new {@link TextQuery} using the {@code locale}s language.
                    - * For a full list of supported languages see the mongdodb reference manual for Text Search Languages. - * - * @param wordsAndPhrases - * @param locale + * For a full list of supported languages see the mongdodb reference manual for + * Text Search Languages. + * + * @param wordsAndPhrases must not be {@literal null}. + * @param locale can be {@literal null}. */ - public TextQuery(String wordsAndPhrases, Locale locale) { - this(wordsAndPhrases, locale != null ? locale.getLanguage() : (String) null); + public TextQuery(String wordsAndPhrases, @Nullable Locale locale) { + this(wordsAndPhrases, locale != null ? locale.getLanguage() : null); } /** * Creates new {@link TextQuery} for given {@link TextCriteria}. - * - * @param criteria. + * + * @param criteria must not be {@literal null}. */ public TextQuery(TextCriteria criteria) { super(criteria); @@ -82,9 +86,9 @@ public TextQuery(TextCriteria criteria) { /** * Creates new {@link TextQuery} searching for given {@link TextCriteria}. - * - * @param criteria - * @return + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TextQuery}. */ public static TextQuery queryText(TextCriteria criteria) { return new TextQuery(criteria); @@ -92,12 +96,13 @@ public static TextQuery queryText(TextCriteria criteria) { /** * Add sorting by text score. Will also add text score to returned fields. - * + * * @see TextQuery#includeScore() - * @return + * @return this. */ public TextQuery sortByScore() { + this.sortByScoreIndex = getSortObject().size(); this.includeScore(); this.sortByScore = true; return this; @@ -105,8 +110,8 @@ public TextQuery sortByScore() { /** * Add field {@literal score} holding the documents textScore to the returned fields. - * - * @return + * + * @return this. */ public TextQuery includeScore() { @@ -116,9 +121,9 @@ public TextQuery includeScore() { /** * Include text search document score in returned fields using the given fieldname. - * - * @param fieldname - * @return + * + * @param fieldname must not be {@literal null}. + * @return this. */ public TextQuery includeScore(String fieldname) { @@ -129,8 +134,8 @@ public TextQuery includeScore(String fieldname) { /** * Set the fieldname used for scoring. - * - * @param fieldName + * + * @param fieldName must not be {@literal null}. */ public void setScoreFieldName(String fieldName) { this.scoreFieldName = fieldName; @@ -138,51 +143,73 @@ public void setScoreFieldName(String fieldName) { /** * Get the fieldname used for scoring - * - * @return + * + * @return never {@literal null}. */ public String getScoreFieldName() { return scoreFieldName; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#getFieldsObject() - */ @Override - public DBObject getFieldsObject() { + public Document getFieldsObject() { if (!this.includeScore) { return super.getFieldsObject(); } - DBObject fields = super.getFieldsObject(); - - if (fields == null) { - fields = new BasicDBObject(); - } + Document fields = BsonUtils.asMutableDocument(super.getFieldsObject()); fields.put(getScoreFieldName(), META_TEXT_SCORE); return fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#getSortObject() - */ @Override - public DBObject getSortObject() { - - DBObject sort = new BasicDBObject(); + public Document getSortObject() { if (this.sortByScore) { - sort.put(getScoreFieldName(), META_TEXT_SCORE); - } - if (super.getSortObject() != null) { - sort.putAll(super.getSortObject()); + int sortByScoreIndex = this.sortByScoreIndex; + + return sortByScoreIndex != 0 + ? sortByScoreAtPosition(super.getSortObject(), sortByScoreIndex) + : sortByScoreAtPositionZero(); } + return super.getSortObject(); + } + + private Document sortByScoreAtPositionZero() { + + Document sort = new Document(); + + sort.put(getScoreFieldName(), META_TEXT_SCORE); + sort.putAll(super.getSortObject()); + return sort; } + + private Document sortByScoreAtPosition(Document source, int sortByScoreIndex) { + + Document target = new Document(); + int index = 0; + + for (Entry entry : source.entrySet()) { + if (index == sortByScoreIndex) { + target.put(getScoreFieldName(), META_TEXT_SCORE); + } + target.put(entry.getKey(), entry.getValue()); + index++; + } + + if (index == sortByScoreIndex) { + target.put(getScoreFieldName(), META_TEXT_SCORE); + } + + return target; + } + + @Override + public boolean isSorted() { + return super.isSorted() || sortByScore; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java new file mode 100644 index 0000000000..677575c9e4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java @@ -0,0 +1,151 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import java.util.Set; + +import org.springframework.data.domain.ExampleMatcher; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * {@link ExampleMatcher} implementation for query by example (QBE). Unlike plain {@link ExampleMatcher} this untyped + * counterpart does not enforce type matching when executing the query. This allows to query unrelated example documents + * as references for querying collections as long as the used field/property names match. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public class UntypedExampleMatcher implements ExampleMatcher { + + private final ExampleMatcher delegate; + + private UntypedExampleMatcher(ExampleMatcher delegate) { + this.delegate = delegate; + } + + public static UntypedExampleMatcher matching() { + return new UntypedExampleMatcher(ExampleMatcher.matching()); + } + + public static UntypedExampleMatcher matchingAny() { + return new UntypedExampleMatcher(ExampleMatcher.matchingAny()); + } + + public static UntypedExampleMatcher matchingAll() { + return new UntypedExampleMatcher(ExampleMatcher.matchingAll()); + } + + public UntypedExampleMatcher withIgnorePaths(String... ignoredPaths) { + return new UntypedExampleMatcher(delegate.withIgnorePaths(ignoredPaths)); + } + + public UntypedExampleMatcher withStringMatcher(StringMatcher defaultStringMatcher) { + return new UntypedExampleMatcher(delegate.withStringMatcher(defaultStringMatcher)); + } + + public UntypedExampleMatcher withIgnoreCase() { + return new UntypedExampleMatcher(delegate.withIgnoreCase()); + } + + public UntypedExampleMatcher withIgnoreCase(boolean defaultIgnoreCase) { + return new UntypedExampleMatcher(delegate.withIgnoreCase(defaultIgnoreCase)); + } + + public UntypedExampleMatcher withMatcher(String propertyPath, + MatcherConfigurer matcherConfigurer) { + return new UntypedExampleMatcher(delegate.withMatcher(propertyPath, matcherConfigurer)); + } + + public UntypedExampleMatcher withMatcher(String propertyPath, GenericPropertyMatcher genericPropertyMatcher) { + return new UntypedExampleMatcher(delegate.withMatcher(propertyPath, genericPropertyMatcher)); + } + + public UntypedExampleMatcher withTransformer(String propertyPath, PropertyValueTransformer propertyValueTransformer) { + return new UntypedExampleMatcher(delegate.withTransformer(propertyPath, propertyValueTransformer)); + } + + public UntypedExampleMatcher withIgnoreCase(String... propertyPaths) { + return new UntypedExampleMatcher(delegate.withIgnoreCase(propertyPaths)); + } + + public UntypedExampleMatcher withIncludeNullValues() { + return new UntypedExampleMatcher(delegate.withIncludeNullValues()); + } + + public UntypedExampleMatcher withIgnoreNullValues() { + return new UntypedExampleMatcher(delegate.withIgnoreNullValues()); + } + + public UntypedExampleMatcher withNullHandler(NullHandler nullHandler) { + return new UntypedExampleMatcher(delegate.withNullHandler(nullHandler)); + } + + public NullHandler getNullHandler() { + return delegate.getNullHandler(); + } + + public StringMatcher getDefaultStringMatcher() { + return delegate.getDefaultStringMatcher(); + } + + public boolean isIgnoreCaseEnabled() { + return delegate.isIgnoreCaseEnabled(); + } + + public boolean isIgnoredPath(String path) { + return delegate.isIgnoredPath(path); + } + + public Set getIgnoredPaths() { + return delegate.getIgnoredPaths(); + } + + public PropertySpecifiers getPropertySpecifiers() { + return delegate.getPropertySpecifiers(); + } + + public boolean isAllMatching() { + return delegate.isAllMatching(); + } + + public boolean isAnyMatching() { + return delegate.isAnyMatching(); + } + + public MatchMode getMatchMode() { + return delegate.getMatchMode(); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + UntypedExampleMatcher that = (UntypedExampleMatcher) o; + + return ObjectUtils.nullSafeEquals(delegate, that.delegate); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(delegate); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java index 00e35280b8..32d98f5804 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.query; -import static org.springframework.util.ObjectUtils.*; - +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -24,57 +23,66 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import org.bson.Document; import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Class to easily construct MongoDB update clauses. - * + * * @author Thomas Risberg * @author Mark Pollack * @author Oliver Gierke * @author Becca Gaspard * @author Christoph Strobl * @author Thomas Darimont + * @author Alexey Plotnik + * @author Mark Paluch + * @author Pavel Vodrazka */ -public class Update { +public class Update implements UpdateDefinition { public enum Position { LAST, FIRST } - private Set keysToUpdate = new HashSet(); - private Map modifierOps = new LinkedHashMap(); - private Map pushCommandBuilders = new LinkedHashMap(1); + private boolean isolated = false; + private final Set keysToUpdate = new HashSet<>(); + private final Map modifierOps = new LinkedHashMap<>(); + private Map pushCommandBuilders = Collections.emptyMap(); + private List arrayFilters = Collections.emptyList(); /** * Static factory method to create an Update using the provided key - * - * @param key - * @return + * + * @param key the field to update. + * @return new instance of {@link Update}. */ - public static Update update(String key, Object value) { + public static Update update(String key, @Nullable Object value) { return new Update().set(key, value); } /** - * Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making + * Creates an {@link Update} instance from the given {@link Document}. Allows to explicitly exclude fields from making * it into the created {@link Update} object. Note, that this will set attributes directly and not use - * {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To - * create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each + * {@literal $set}. This means fields not given in the {@link Document} will be nulled when executing the update. To + * create an only-updating {@link Update} instance of a {@link Document}, call {@link #set(String, Object)} for each * value in it. - * - * @param object the source {@link DBObject} to create the update from. + * + * @param object the source {@link Document} to create the update from. * @param exclude the fields to exclude. - * @return + * @return new instance of {@link Update}. */ - public static Update fromDBObject(DBObject object, String... exclude) { + public static Update fromDocument(Document object, String... exclude) { Update update = new Update(); List excludeList = Arrays.asList(exclude); @@ -87,8 +95,8 @@ public static Update fromDBObject(DBObject object, String... exclude) { Object value = object.get(key); update.modifierOps.put(key, value); - if (isKeyword(key) && value instanceof DBObject) { - update.keysToUpdate.addAll(((DBObject) value).keySet()); + if (isKeyword(key) && value instanceof Document document) { + update.keysToUpdate.addAll(document.keySet()); } else { update.keysToUpdate.add(key); } @@ -99,36 +107,38 @@ public static Update fromDBObject(DBObject object, String... exclude) { /** * Update using the {@literal $set} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/set/ - * @param key - * @param value - * @return + * + * @param key the field name. + * @param value can be {@literal null}. In this case the property remains in the db with a {@literal null} value. To + * remove it use {@link #unset(String)}. + * @return this. + * @see MongoDB Update operator: $set */ - public Update set(String key, Object value) { + public Update set(String key, @Nullable Object value) { addMultiFieldOperation("$set", key, value); return this; } /** * Update using the {@literal $setOnInsert} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/setOnInsert/ - * @param key - * @param value - * @return + * + * @param key the field name. + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Update operator: + * $setOnInsert */ - public Update setOnInsert(String key, Object value) { + public Update setOnInsert(String key, @Nullable Object value) { addMultiFieldOperation("$setOnInsert", key, value); return this; } /** * Update using the {@literal $unset} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/unset/ - * @param key - * @return + * + * @param key the field name. + * @return this. + * @see MongoDB Update operator: $unset */ public Update unset(String key) { addMultiFieldOperation("$unset", key, 1); @@ -137,26 +147,31 @@ public Update unset(String key) { /** * Update using the {@literal $inc} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/inc/ - * @param key - * @param inc - * @return + * + * @param key the field name. + * @param inc must not be {@literal null}. + * @return this. + * @see MongoDB Update operator: $inc */ public Update inc(String key, Number inc) { addMultiFieldOperation("$inc", key, inc); return this; } + @Override + public void inc(String key) { + inc(key, 1L); + } + /** * Update using the {@literal $push} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/push/ - * @param key - * @param value - * @return + * + * @param key the field name. + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Update operator: $push */ - public Update push(String key, Object value) { + public Update push(String key, @Nullable Object value) { addMultiFieldOperation("$push", key, value); return this; } @@ -165,41 +180,31 @@ public Update push(String key, Object value) { * Update using {@code $push} modifier.
                    * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values as well as using * {@code $position}. - * - * @see http://docs.mongodb.org/manual/reference/operator/update/push/ - * @see http://docs.mongodb.org/manual/reference/operator/update/each/ - * @param key + * + * @param key the field name. * @return {@link PushOperatorBuilder} for given key + * @see MongoDB Update operator: $push + * @see MongoDB Update operator: $each */ public PushOperatorBuilder push(String key) { if (!pushCommandBuilders.containsKey(key)) { + + if (pushCommandBuilders == Collections.EMPTY_MAP) { + pushCommandBuilders = new LinkedHashMap<>(1); + } + pushCommandBuilders.put(key, new PushOperatorBuilder(key)); } return pushCommandBuilders.get(key); } - /** - * Update using the {@code $pushAll} update modifier.
                    - * Note: In mongodb 2.4 the usage of {@code $pushAll} has been deprecated in favor of {@code $push $each}. - * {@link #push(String)}) returns a builder that can be used to populate the {@code $each} object. - * - * @see http://docs.mongodb.org/manual/reference/operator/update/pushAll/ - * @param key - * @param values - * @return - */ - public Update pushAll(String key, Object[] values) { - addMultiFieldOperation("$pushAll", key, Arrays.copyOf(values, values.length)); - return this; - } - /** * Update using {@code $addToSet} modifier.
                    * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values - * - * @param key - * @return + * + * @param key the field name. + * @return new instance of {@link AddToSetBuilder}. * @since 1.5 */ public AddToSetBuilder addToSet(String key) { @@ -208,24 +213,25 @@ public AddToSetBuilder addToSet(String key) { /** * Update using the {@literal $addToSet} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/addToSet/ - * @param key - * @param value - * @return + * + * @param key the field name. + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Update operator: + * $addToSet */ - public Update addToSet(String key, Object value) { + public Update addToSet(String key, @Nullable Object value) { addMultiFieldOperation("$addToSet", key, value); return this; } /** * Update using the {@literal $pop} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/pop/ - * @param key - * @param pos - * @return + * + * @param key the field name. + * @param pos must not be {@literal null}. + * @return this. + * @see MongoDB Update operator: $pop */ public Update pop(String key, Position pos) { addMultiFieldOperation("$pop", key, pos == Position.FIRST ? -1 : 1); @@ -234,37 +240,39 @@ public Update pop(String key, Position pos) { /** * Update using the {@literal $pull} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/pull/ - * @param key - * @param value - * @return + * + * @param key the field name. + * @param value can be {@literal null}. + * @return this. + * @see MongoDB Update operator: $pull */ - public Update pull(String key, Object value) { + public Update pull(String key, @Nullable Object value) { addMultiFieldOperation("$pull", key, value); return this; } /** * Update using the {@literal $pullAll} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/pullAll/ - * @param key - * @param values - * @return + * + * @param key the field name. + * @param values must not be {@literal null}. + * @return this. + * @see MongoDB Update operator: + * $pullAll */ public Update pullAll(String key, Object[] values) { - addMultiFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length)); + addMultiFieldOperation("$pullAll", key, Arrays.asList(values)); return this; } /** * Update using the {@literal $rename} update modifier - * - * @see http://docs.mongodb.org/manual/reference/operator/update/rename/ - * @param oldName - * @param newName - * @return + * + * @param oldName must not be {@literal null}. + * @param newName must not be {@literal null}. + * @return this. + * @see MongoDB Update operator: + * $rename */ public Update rename(String oldName, String newName) { addMultiFieldOperation("$rename", oldName, newName); @@ -273,11 +281,12 @@ public Update rename(String oldName, String newName) { /** * Update given key to current date using {@literal $currentDate} modifier. - * - * @see http://docs.mongodb.org/manual/reference/operator/update/currentDate/ - * @param key - * @return + * + * @param key the field name. + * @return this. * @since 1.6 + * @see MongoDB Update operator: + * $currentDate */ public Update currentDate(String key) { @@ -287,82 +296,162 @@ public Update currentDate(String key) { /** * Update given key to current date using {@literal $currentDate : { $type : "timestamp" }} modifier. - * - * @see http://docs.mongodb.org/manual/reference/operator/update/currentDate/ - * @param key - * @return + * + * @param key the field name. + * @return this. * @since 1.6 + * @see MongoDB Update operator: + * $currentDate */ public Update currentTimestamp(String key) { - addMultiFieldOperation("$currentDate", key, new BasicDBObject("$type", "timestamp")); + addMultiFieldOperation("$currentDate", key, new Document("$type", "timestamp")); return this; } /** * Multiply the value of given key by the given number. - * - * @see http://docs.mongodb.org/manual/reference/operator/update/mul/ + * * @param key must not be {@literal null}. * @param multiplier must not be {@literal null}. - * @return + * @return this. * @since 1.7 + * @see MongoDB Update operator: $mul */ public Update multiply(String key, Number multiplier) { - Assert.notNull(multiplier, "Multiplier must not be 'null'."); + Assert.notNull(multiplier, "Multiplier must not be null"); addMultiFieldOperation("$mul", key, multiplier.doubleValue()); return this; } + /** + * Update given key to the {@code value} if the {@code value} is greater than the current value of the field. + * + * @param key must not be {@literal null}. + * @param value must not be {@literal null}. + * @return this. + * @since 1.10 + * @see Comparison/Sort Order + * @see MongoDB Update operator: $max + */ + public Update max(String key, Object value) { + + Assert.notNull(value, "Value for max operation must not be null"); + addMultiFieldOperation("$max", key, value); + return this; + } + + /** + * Update given key to the {@code value} if the {@code value} is less than the current value of the field. + * + * @param key must not be {@literal null}. + * @param value must not be {@literal null}. + * @return this. + * @since 1.10 + * @see Comparison/Sort Order + * @see MongoDB Update operator: $min + */ + public Update min(String key, Object value) { + + Assert.notNull(value, "Value for min operation must not be null"); + addMultiFieldOperation("$min", key, value); + return this; + } + /** * The operator supports bitwise {@code and}, bitwise {@code or}, and bitwise {@code xor} operations. - * - * @param key - * @return + * + * @param key the field name. + * @return this. * @since 1.7 */ public BitwiseOperatorBuilder bitwise(String key) { return new BitwiseOperatorBuilder(this, key); } - public DBObject getUpdateObject() { - return new BasicDBObject(modifierOps); + /** + * Prevents a write operation that affects multiple documents from yielding to other reads or writes + * once the first document is written.
                    + * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}. + * + * @return this. + * @since 2.0 + */ + public Update isolated() { + + isolated = true; + return this; } /** - * This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}. - * - * @param operator - * @param key - * @param value - * @deprectaed Use {@link #addMultiFieldOperation(String, String, Object)} instead. + * Filter elements in an array that match the given criteria for update. {@link CriteriaDefinition} is passed directly + * to the driver without further type or field mapping. + * + * @param criteria must not be {@literal null}. + * @return this. + * @since 2.2 */ - @Deprecated - protected void addFieldOperation(String operator, String key, Object value) { + public Update filterArray(CriteriaDefinition criteria) { - Assert.hasText(key, "Key/Path for update must not be null or blank."); + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } - modifierOps.put(operator, new BasicDBObject(key, value)); - this.keysToUpdate.add(key); + this.arrayFilters.add(criteria::getCriteriaObject); + return this; + } + + /** + * Filter elements in an array that match the given criteria for update. {@code expression} is used directly with the + * driver without further type or field mapping. + * + * @param identifier the positional operator identifier filter criteria name. + * @param expression the positional operator filter expression. + * @return this. + * @since 2.2 + */ + public Update filterArray(String identifier, Object expression) { + + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + + this.arrayFilters.add(() -> new Document(identifier, expression)); + return this; + } + + public Boolean isIsolated() { + return isolated; + } + + public Document getUpdateObject() { + return new Document(modifierOps); + } + + public List getArrayFilters() { + return Collections.unmodifiableList(this.arrayFilters); } - protected void addMultiFieldOperation(String operator, String key, Object value) { + @Override + public boolean hasArrayFilters() { + return !this.arrayFilters.isEmpty(); + } - Assert.hasText(key, "Key/Path for update must not be null or blank."); + protected void addMultiFieldOperation(String operator, String key, @Nullable Object value) { + + Assert.hasText(key, "Key/Path for update must not be null or blank"); Object existingValue = this.modifierOps.get(operator); - DBObject keyValueMap; + Document keyValueMap; if (existingValue == null) { - keyValueMap = new BasicDBObject(); + keyValueMap = new Document(); this.modifierOps.put(operator, keyValueMap); + } else if (existingValue instanceof Document document) { + keyValueMap = document; } else { - if (existingValue instanceof BasicDBObject) { - keyValueMap = (BasicDBObject) existingValue; - } else { - throw new InvalidDataAccessApiUsageException( - "Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass()); - } + throw new InvalidDataAccessApiUsageException( + "Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass()); } keyValueMap.put(key, value); @@ -371,9 +460,9 @@ protected void addMultiFieldOperation(String operator, String key, Object value) /** * Determine if a given {@code key} will be touched on execution. - * - * @param key - * @return + * + * @param key the field name. + * @return {@literal true} if given field is updated. */ public boolean modifies(String key) { return this.keysToUpdate.contains(key); @@ -381,29 +470,21 @@ public boolean modifies(String key) { /** * Inspects given {@code key} for '$'. - * - * @param key - * @return + * + * @param key the field name. + * @return {@literal true} if given key is prefixed. */ private static boolean isKeyword(String key) { return StringUtils.startsWithIgnoreCase(key, "$"); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { - return getUpdateObject().hashCode(); + return Objects.hash(getUpdateObject(), isolated); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -414,21 +495,28 @@ public boolean equals(Object obj) { } Update that = (Update) obj; - return this.getUpdateObject().equals(that.getUpdateObject()); + if (this.isolated != that.isolated) { + return false; + } + + return Objects.equals(this.getUpdateObject(), that.getUpdateObject()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { - return SerializationUtils.serializeToJsonSafely(getUpdateObject()); + + Document doc = getUpdateObject(); + + if (isIsolated()) { + doc.append("$isolated", 1); + } + + return SerializationUtils.serializeToJsonSafely(doc); } /** * Modifiers holds a distinct collection of {@link Modifier} - * + * * @author Christoph Strobl * @author Thomas Darimont */ @@ -437,7 +525,7 @@ public static class Modifiers { private Map modifiers; public Modifiers() { - this.modifiers = new LinkedHashMap(1); + this.modifiers = new LinkedHashMap<>(1); } public Collection getModifiers() { @@ -448,19 +536,21 @@ public void addModifier(Modifier modifier) { this.modifiers.put(modifier.getKey(), modifier); } - /* (non-Javadoc) - * @see java.lang.Object#hashCode() + /** + * @return true if no modifiers present. + * @since 2.0 */ + public boolean isEmpty() { + return modifiers.isEmpty(); + } + @Override public int hashCode() { - return nullSafeHashCode(modifiers); + return Objects.hashCode(modifiers); } - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -471,17 +561,21 @@ public boolean equals(Object obj) { } Modifiers that = (Modifiers) obj; + return Objects.equals(this.modifiers, that.modifiers); + } - return this.modifiers.equals(that.modifiers); + @Override + public String toString() { + return SerializationUtils.serializeToJsonSafely(this.modifiers); } } /** * Marker interface of nested commands. - * + * * @author Christoph Strobl */ - public static interface Modifier { + public interface Modifier { /** * @return the command to send eg. {@code $push} @@ -492,19 +586,65 @@ public static interface Modifier { * @return value to be sent with command */ Object getValue(); + + /** + * @return a safely serialized JSON representation. + * @since 2.0 + */ + default String toJsonString() { + return SerializationUtils.serializeToJsonSafely(Collections.singletonMap(getKey(), getValue())); + } + } + + /** + * Abstract {@link Modifier} implementation with defaults for {@link Object#equals(Object)}, {@link Object#hashCode()} + * and {@link Object#toString()}. + * + * @author Christoph Strobl + * @since 2.0 + */ + private static abstract class AbstractModifier implements Modifier { + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(getKey()) + ObjectUtils.nullSafeHashCode(getValue()); + } + + @Override + public boolean equals(@Nullable Object that) { + + if (this == that) { + return true; + } + + if (that == null || getClass() != that.getClass()) { + return false; + } + + if (!Objects.equals(getKey(), ((Modifier) that).getKey())) { + return false; + } + + return Objects.deepEquals(getValue(), ((Modifier) that).getValue()); + } + + @Override + public String toString() { + return toJsonString(); + } } /** * Implementation of {@link Modifier} representing {@code $each}. - * + * * @author Christoph Strobl * @author Thomas Darimont */ - private static class Each implements Modifier { + private static class Each extends AbstractModifier { private Object[] values; - public Each(Object... values) { + Each(Object... values) { this.values = extractValues(values); } @@ -514,87 +654,130 @@ private Object[] extractValues(Object[] values) { return values; } - if (values.length == 1 && values[0] instanceof Collection) { - return ((Collection) values[0]).toArray(); + if (values.length == 1 && values[0] instanceof Collection collection) { + return collection.toArray(); } return Arrays.copyOf(values, values.length); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey() - */ @Override public String getKey() { return "$each"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue() - */ @Override public Object getValue() { return this.values; } + } + + /** + * {@link Modifier} implementation used to propagate {@code $position}. + * + * @author Christoph Strobl + * @since 1.7 + */ + private static class PositionModifier extends AbstractModifier { + + private final int position; + + PositionModifier(int position) { + this.position = position; + } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override - public int hashCode() { - return nullSafeHashCode(values); + public String getKey() { + return "$position"; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object that) { + public Object getValue() { + return position; + } + } - if (this == that) { - return true; - } + /** + * Implementation of {@link Modifier} representing {@code $slice}. + * + * @author Mark Paluch + * @since 1.10 + */ + private static class Slice extends AbstractModifier { - if (that == null || getClass() != that.getClass()) { - return false; - } + private int count; + + Slice(int count) { + this.count = count; + } + + @Override + public String getKey() { + return "$slice"; + } - return nullSafeEquals(values, ((Each) that).values); + @Override + public Object getValue() { + return this.count; } } /** - * {@link Modifier} implementation used to propagate {@code $position}. - * - * @author Christoph Strobl - * @since 1.7 + * Implementation of {@link Modifier} representing {@code $sort}. + * + * @author Pavel Vodrazka + * @author Mark Paluch + * @since 1.10 */ - private static class PositionModifier implements Modifier { + private static class SortModifier extends AbstractModifier { - private final int position; + private final Object sort; - public PositionModifier(int position) { - this.position = position; + /** + * Creates a new {@link SortModifier} instance given {@link Direction}. + * + * @param direction must not be {@literal null}. + */ + SortModifier(Direction direction) { + + Assert.notNull(direction, "Direction must not be null"); + this.sort = direction.isAscending() ? 1 : -1; + } + + /** + * Creates a new {@link SortModifier} instance given {@link Sort}. + * + * @param sort must not be {@literal null}. + */ + SortModifier(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + for (Order order : sort) { + + if (order.isIgnoreCase()) { + throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case;" + + " MongoDB does not support sorting ignoring case currently", order.getProperty())); + } + } + + this.sort = sort; } @Override public String getKey() { - return "$position"; + return "$sort"; } @Override public Object getValue() { - return position; + return this.sort; } } /** * Builder for creating {@code $push} modifiers - * + * * @author Christoph Strobl * @author Thomas Darimont */ @@ -610,9 +793,9 @@ public class PushOperatorBuilder { /** * Propagates {@code $each} to {@code $push} - * + * * @param values - * @return + * @return never {@literal null}. */ public Update each(Object... values) { @@ -620,32 +803,76 @@ public Update each(Object... values) { return Update.this.push(key, this.modifiers); } + /** + * Propagates {@code $slice} to {@code $push}. {@code $slice} requires the {@code $each operator}.
                    + * If {@literal count} is zero, {@code $slice} updates the array to an empty array.
                    + * If {@literal count} is negative, {@code $slice} updates the array to contain only the last {@code count} + * elements.
                    + * If {@literal count} is positive, {@code $slice} updates the array to contain only the first {@code count} + * elements.
                    + * + * @param count + * @return never {@literal null}. + * @since 1.10 + */ + public PushOperatorBuilder slice(int count) { + + this.modifiers.addModifier(new Slice(count)); + return this; + } + + /** + * Propagates {@code $sort} to {@code $push}. {@code $sort} requires the {@code $each} operator. Forces elements to + * be sorted by values in given {@literal direction}. + * + * @param direction must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public PushOperatorBuilder sort(Direction direction) { + + Assert.notNull(direction, "Direction must not be null"); + this.modifiers.addModifier(new SortModifier(direction)); + return this; + } + + /** + * Propagates {@code $sort} to {@code $push}. {@code $sort} requires the {@code $each} operator. Forces document + * elements to be sorted in given {@literal order}. + * + * @param sort must not be {@literal null}. + * @return never {@literal null}. + * @since 1.10 + */ + public PushOperatorBuilder sort(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + this.modifiers.addModifier(new SortModifier(sort)); + return this; + } + /** * Forces values to be added at the given {@literal position}. - * - * @param position needs to be greater than or equal to zero. - * @return + * + * @param position the position offset. As of MongoDB 3.6 use a negative value to indicate starting from the end, + * counting (but not including) the last element of the array. + * @return never {@literal null}. * @since 1.7 */ public PushOperatorBuilder atPosition(int position) { - if (position < 0) { - throw new IllegalArgumentException("Position must be greater than or equal to zero."); - } - this.modifiers.addModifier(new PositionModifier(position)); - return this; } /** * Forces values to be added at given {@literal position}. - * + * * @param position can be {@literal null} which will be appended at the last position. - * @return + * @return never {@literal null}. * @since 1.7 */ - public PushOperatorBuilder atPosition(Position position) { + public PushOperatorBuilder atPosition(@Nullable Position position) { if (position == null || Position.LAST.equals(position)) { return this; @@ -658,36 +885,27 @@ public PushOperatorBuilder atPosition(Position position) { /** * Propagates {@link #value(Object)} to {@code $push} - * - * @param values - * @return + * + * @param value + * @return never {@literal null}. */ public Update value(Object value) { - return Update.this.push(key, value); + + if (this.modifiers.isEmpty()) { + return Update.this.push(key, value); + } + + this.modifiers.addModifier(new Each(Collections.singletonList(value))); + return Update.this.push(key, this.modifiers); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { - - int result = 17; - - result += 31 * result + getOuterType().hashCode(); - result += 31 * result + nullSafeHashCode(key); - result += 31 * result + nullSafeHashCode(modifiers); - - return result; + return Objects.hash(getOuterType(), key, modifiers); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -699,11 +917,11 @@ public boolean equals(Object obj) { PushOperatorBuilder that = (PushOperatorBuilder) obj; - if (!getOuterType().equals(that.getOuterType())) { + if (!Objects.equals(getOuterType(), that.getOuterType())) { return false; } - return nullSafeEquals(this.key, that.key) && nullSafeEquals(this.modifiers, that.modifiers); + return Objects.equals(this.key, that.key) && Objects.equals(this.modifiers, that.modifiers); } private Update getOuterType() { @@ -713,7 +931,7 @@ private Update getOuterType() { /** * Builder for creating {@code $addToSet} modifier. - * + * * @author Christoph Strobl * @since 1.5 */ @@ -727,9 +945,9 @@ public AddToSetBuilder(String key) { /** * Propagates {@code $each} to {@code $addToSet} - * - * @param values - * @return + * + * @param values must not be {@literal null}. + * @return never {@literal null}. */ public Update each(Object... values) { return Update.this.addToSet(this.key, new Each(values)); @@ -737,9 +955,9 @@ public Update each(Object... values) { /** * Propagates {@link #value(Object)} to {@code $addToSet} - * - * @param values - * @return + * + * @param value + * @return never {@literal null}. */ public Update value(Object value) { return Update.this.addToSet(this.key, value); @@ -762,19 +980,19 @@ private enum BitwiseOperator { @Override public String toString() { return super.toString().toLowerCase(); - }; + } } /** * Creates a new {@link BitwiseOperatorBuilder}. - * + * * @param reference must not be {@literal null} * @param key must not be {@literal null} */ protected BitwiseOperatorBuilder(Update reference, String key) { - Assert.notNull(reference, "Reference must not be null!"); - Assert.notNull(key, "Key must not be null!"); + Assert.notNull(reference, "Reference must not be null"); + Assert.notNull(key, "Key must not be null"); this.reference = reference; this.key = key; @@ -782,9 +1000,9 @@ protected BitwiseOperatorBuilder(Update reference, String key) { /** * Updates to the result of a bitwise and operation between the current value and the given one. - * + * * @param value - * @return + * @return never {@literal null}. */ public Update and(long value) { @@ -794,9 +1012,9 @@ public Update and(long value) { /** * Updates to the result of a bitwise or operation between the current value and the given one. - * + * * @param value - * @return + * @return never {@literal null}. */ public Update or(long value) { @@ -806,9 +1024,9 @@ public Update or(long value) { /** * Updates to the result of a bitwise xor operation between the current value and the given one. - * + * * @param value - * @return + * @return never {@literal null}. */ public Update xor(long value) { @@ -817,7 +1035,7 @@ public Update xor(long value) { } private void addFieldOperation(BitwiseOperator operator, Number value) { - reference.addMultiFieldOperation(BIT_OPERATOR, key, new BasicDBObject(operator.toString(), value)); + reference.addMultiFieldOperation(BIT_OPERATOR, key, new Document(operator.toString(), value)); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java new file mode 100644 index 0000000000..5aafffeb82 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java @@ -0,0 +1,91 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import java.util.List; + +import org.bson.Document; + +/** + * Interface fixing must have operations for {@literal updates} as implemented via {@link Update}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public interface UpdateDefinition { + + /** + * If {@literal true} prevents a write operation that affects multiple documents from yielding to + * other reads or writes once the first document is written.
                    + * + * @return {@literal true} if update isolated is set. + */ + Boolean isIsolated(); + + /** + * @return the actual update in its native {@link Document} format. Never {@literal null}. + */ + Document getUpdateObject(); + + /** + * Check if a given {@literal key} is modified by applying the update. + * + * @param key must not be {@literal null}. + * @return {@literal true} if the actual {@link UpdateDefinition} attempts to modify the given {@literal key}. + */ + boolean modifies(String key); + + /** + * Increment the value of a given {@literal key} by {@code 1}. + * + * @param key must not be {@literal null}. + */ + void inc(String key); + + /** + * Get the specification which elements to modify in an array field. {@link ArrayFilter} are passed directly to the + * driver without further type or field mapping. + * + * @return never {@literal null}. + * @since 2.2 + */ + List getArrayFilters(); + + /** + * @return {@literal true} if {@link UpdateDefinition} contains {@link #getArrayFilters() array filters}. + * @since 2.2 + */ + default boolean hasArrayFilters() { + return !getArrayFilters().isEmpty(); + } + + /** + * A filter to specify which elements to modify in an array field. + * + * @since 2.2 + */ + interface ArrayFilter { + + /** + * Get the {@link Document} representation of the filter to apply. The returned {@link Document} is used directly + * with the driver without further type or field mapping. + * + * @return never {@literal null}. + */ + Document asDocument(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/package-info.java index 911f2fea63..d3f67790a1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/package-info.java @@ -1,5 +1,6 @@ /** * MongoDB specific query and update support. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.query; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java new file mode 100644 index 0000000000..b59c20c6b6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java @@ -0,0 +1,70 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Value object representing a MongoDB-specific JSON schema which is the default {@link MongoJsonSchema} implementation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class DefaultMongoJsonSchema implements MongoJsonSchema { + + private final JsonSchemaObject root; + + @Nullable // + private final Document encryptionMetadata; + + DefaultMongoJsonSchema(JsonSchemaObject root) { + this(root, null); + } + + /** + * Create new instance of {@link DefaultMongoJsonSchema}. + * + * @param root the schema root element. + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + DefaultMongoJsonSchema(JsonSchemaObject root, @Nullable Document encryptionMetadata) { + + Assert.notNull(root, "Root schema object must not be null"); + + this.root = root; + this.encryptionMetadata = encryptionMetadata; + } + + @Override + public Document schemaDocument() { + + Document schemaDocument = new Document(); + + // we want this to be the first element rendered, so it reads nice when printed to json + if (!CollectionUtils.isEmpty(encryptionMetadata)) { + schemaDocument.append("encryptMetadata", encryptionMetadata); + } + + schemaDocument.putAll(root.toDocument()); + + return schemaDocument; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java new file mode 100644 index 0000000000..0407bac272 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * JSON schema backed by a {@link org.bson.Document} object. + * + * @author Mark Paluch + * @since 2.1 + */ +class DocumentJsonSchema implements MongoJsonSchema { + + private final Document document; + + DocumentJsonSchema(Document document) { + + Assert.notNull(document, "Document must not be null"); + this.document = document; + } + + @Override + public Document schemaDocument() { + return new Document(document); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java new file mode 100644 index 0000000000..26dbd7dffb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -0,0 +1,1262 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.core.EncryptionAlgorithms; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.DateJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NullJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.StringJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.TimestampJsonSchemaObject; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * {@link JsonSchemaProperty} implementation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class IdentifiableJsonSchemaProperty implements JsonSchemaProperty { + + protected final String identifier; + protected final T jsonSchemaObjectDelegate; + + /** + * Creates a new {@link IdentifiableJsonSchemaProperty} for {@code identifier} and {@code jsonSchemaObject}. + * + * @param identifier must not be {@literal null}. + * @param jsonSchemaObject must not be {@literal null}. + */ + IdentifiableJsonSchemaProperty(String identifier, T jsonSchemaObject) { + + Assert.notNull(identifier, "Identifier must not be null"); + Assert.notNull(jsonSchemaObject, "JsonSchemaObject must not be null"); + + this.identifier = identifier; + this.jsonSchemaObjectDelegate = jsonSchemaObject; + } + + @Override + public String getIdentifier() { + return identifier; + } + + @Override + public Document toDocument() { + return new Document(identifier, jsonSchemaObjectDelegate.toDocument()); + } + + @Override + public Set getTypes() { + return jsonSchemaObjectDelegate.getTypes(); + } + + /** + * Convenience {@link JsonSchemaProperty} implementation without a {@code type} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class UntypedJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + UntypedJsonSchemaProperty(String identifier, UntypedJsonSchemaObject jsonSchemaObject) { + super(identifier, jsonSchemaObject); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#possibleValues(Collection) + */ + public UntypedJsonSchemaProperty possibleValues(Object... possibleValues) { + return possibleValues(Arrays.asList(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#allOf(Collection) + */ + public UntypedJsonSchemaProperty allOf(JsonSchemaObject... allOf) { + return allOf(new LinkedHashSet<>(Arrays.asList(allOf))); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#anyOf(Collection) + */ + public UntypedJsonSchemaProperty anyOf(JsonSchemaObject... anyOf) { + return anyOf(new LinkedHashSet<>(Arrays.asList(anyOf))); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#oneOf(Collection) + */ + public UntypedJsonSchemaProperty oneOf(JsonSchemaObject... oneOf) { + return oneOf(new LinkedHashSet<>(Arrays.asList(oneOf))); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#possibleValues(Collection) + */ + public UntypedJsonSchemaProperty possibleValues(Collection possibleValues) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.possibleValues(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#allOf(Collection) + */ + public UntypedJsonSchemaProperty allOf(Collection allOf) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.allOf(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#anyOf(Collection) + */ + public UntypedJsonSchemaProperty anyOf(Collection anyOf) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.anyOf(anyOf)); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#oneOf(Collection) + */ + public UntypedJsonSchemaProperty oneOf(Collection oneOf) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.oneOf(oneOf)); + } + + /** + * @param notMatch must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public UntypedJsonSchemaProperty notMatch(JsonSchemaObject notMatch) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.notMatch(notMatch)); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#description(String) + */ + public UntypedJsonSchemaProperty description(String description) { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#generateDescription() + */ + public UntypedJsonSchemaProperty generatedDescription() { + return new UntypedJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'string'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class StringJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + /** + * @param identifier identifier the {@literal property} name or {@literal patternProperty} regex. Must not be + * {@literal null} nor {@literal empty}. + * @param schemaObject must not be {@literal null}. + */ + StringJsonSchemaProperty(String identifier, StringJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param length + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#minLength(int) + */ + public StringJsonSchemaProperty minLength(int length) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.minLength(length)); + } + + /** + * @param length + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#maxLength(int) + */ + public StringJsonSchemaProperty maxLength(int length) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.maxLength(length)); + } + + /** + * @param pattern must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#matching(String) + */ + public StringJsonSchemaProperty matching(String pattern) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.matching(pattern)); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#possibleValues(Collection) + */ + public StringJsonSchemaProperty possibleValues(String... possibleValues) { + return possibleValues(Arrays.asList(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#allOf(Collection) + */ + public StringJsonSchemaProperty allOf(JsonSchemaObject... allOf) { + return allOf(new LinkedHashSet<>(Arrays.asList(allOf))); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#anyOf(Collection) + */ + public StringJsonSchemaProperty anyOf(JsonSchemaObject... anyOf) { + return anyOf(new LinkedHashSet<>(Arrays.asList(anyOf))); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#oneOf(Collection) + */ + public StringJsonSchemaProperty oneOf(JsonSchemaObject... oneOf) { + return oneOf(new LinkedHashSet<>(Arrays.asList(oneOf))); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#possibleValues(Collection) + */ + public StringJsonSchemaProperty possibleValues(Collection possibleValues) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.possibleValues(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#allOf(Collection) + */ + public StringJsonSchemaProperty allOf(Collection allOf) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.allOf(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#anyOf(Collection) + */ + public StringJsonSchemaProperty anyOf(Collection anyOf) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.anyOf(anyOf)); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#oneOf(Collection) + */ + public StringJsonSchemaProperty oneOf(Collection oneOf) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.oneOf(oneOf)); + } + + /** + * @param notMatch must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public StringJsonSchemaProperty notMatch(JsonSchemaObject notMatch) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.notMatch(notMatch)); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#description(String) + */ + public StringJsonSchemaProperty description(String description) { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link StringJsonSchemaProperty}. + * @see StringJsonSchemaObject#generateDescription() + */ + public StringJsonSchemaProperty generatedDescription() { + return new StringJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'object'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class ObjectJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + /** + * @param identifier identifier the {@literal property} name or {@literal patternProperty} regex. Must not be + * {@literal null} nor {@literal empty}. + * @param schemaObject must not be {@literal null}. + */ + ObjectJsonSchemaProperty(String identifier, ObjectJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param range must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + */ + public ObjectJsonSchemaProperty propertiesCount(Range range) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.propertiesCount(range)); + } + + /** + * @param count must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#minProperties(int) + */ + public ObjectJsonSchemaProperty minProperties(int count) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.minProperties(count)); + } + + /** + * @param count must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#maxProperties(int) + */ + public ObjectJsonSchemaProperty maxProperties(int count) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.maxProperties(count)); + } + + /** + * @param properties must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#required(String...) + */ + public ObjectJsonSchemaProperty required(String... properties) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.required(properties)); + } + + /** + * @param additionalPropertiesAllowed + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#additionalProperties(boolean) + */ + public ObjectJsonSchemaProperty additionalProperties(boolean additionalPropertiesAllowed) { + return new ObjectJsonSchemaProperty(identifier, + jsonSchemaObjectDelegate.additionalProperties(additionalPropertiesAllowed)); + } + + /** + * @param additionalProperties must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#additionalProperties(ObjectJsonSchemaObject) + */ + public ObjectJsonSchemaProperty additionalProperties(ObjectJsonSchemaObject additionalProperties) { + return new ObjectJsonSchemaProperty(identifier, + jsonSchemaObjectDelegate.additionalProperties(additionalProperties)); + } + + /** + * @param properties must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#properties(JsonSchemaProperty...) + */ + public ObjectJsonSchemaProperty properties(JsonSchemaProperty... properties) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.properties(properties)); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#possibleValues(Collection) + */ + public ObjectJsonSchemaProperty possibleValues(Object... possibleValues) { + return possibleValues(Arrays.asList(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#allOf(Collection) + */ + public ObjectJsonSchemaProperty allOf(JsonSchemaObject... allOf) { + return allOf(new LinkedHashSet<>(Arrays.asList(allOf))); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#anyOf(Collection) + */ + public ObjectJsonSchemaProperty anyOf(JsonSchemaObject... anyOf) { + return anyOf(new LinkedHashSet<>(Arrays.asList(anyOf))); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#oneOf(Collection) + */ + public ObjectJsonSchemaProperty oneOf(JsonSchemaObject... oneOf) { + return oneOf(new LinkedHashSet<>(Arrays.asList(oneOf))); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#possibleValues(Collection) + */ + public ObjectJsonSchemaProperty possibleValues(Collection possibleValues) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.possibleValues(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#allOf(Collection) + */ + public ObjectJsonSchemaProperty allOf(Collection allOf) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.allOf(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#anyOf(Collection) + */ + public ObjectJsonSchemaProperty anyOf(Collection anyOf) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.anyOf(anyOf)); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#oneOf(Collection) + */ + public ObjectJsonSchemaProperty oneOf(Collection oneOf) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.oneOf(oneOf)); + } + + /** + * @param notMatch must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public ObjectJsonSchemaProperty notMatch(JsonSchemaObject notMatch) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.notMatch(notMatch)); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#description(String) + */ + public ObjectJsonSchemaProperty description(String description) { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link ObjectJsonSchemaProperty}. + * @see ObjectJsonSchemaObject#generateDescription() + */ + public ObjectJsonSchemaProperty generatedDescription() { + return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + + public List getProperties() { + return jsonSchemaObjectDelegate.getProperties(); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'number'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class NumericJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + /** + * @param identifier identifier the {@literal property} name or {@literal patternProperty} regex. Must not be + * {@literal null} nor {@literal empty}. + * @param schemaObject must not be {@literal null}. + */ + public NumericJsonSchemaProperty(String identifier, NumericJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param value must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#multipleOf + */ + public NumericJsonSchemaProperty multipleOf(Number value) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.multipleOf(value)); + } + + /** + * @param range must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#within(Range) + */ + public NumericJsonSchemaProperty within(Range range) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.within(range)); + } + + /** + * @param min must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#gt(Number) + */ + public NumericJsonSchemaProperty gt(Number min) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.gt(min)); + } + + /** + * @param min must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#gte(Number) + */ + public NumericJsonSchemaProperty gte(Number min) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.gte(min)); + } + + /** + * @param max must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#lt(Number) + */ + public NumericJsonSchemaProperty lt(Number max) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.lt(max)); + } + + /** + * @param max must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#lte(Number) + */ + public NumericJsonSchemaProperty lte(Number max) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.lte(max)); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#possibleValues(Collection) + */ + public NumericJsonSchemaProperty possibleValues(Number... possibleValues) { + return possibleValues(new LinkedHashSet<>(Arrays.asList(possibleValues))); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#allOf(Collection) + */ + public NumericJsonSchemaProperty allOf(JsonSchemaObject... allOf) { + return allOf(Arrays.asList(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#anyOf(Collection) + */ + public NumericJsonSchemaProperty anyOf(JsonSchemaObject... anyOf) { + return anyOf(new LinkedHashSet<>(Arrays.asList(anyOf))); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#oneOf(Collection) + */ + public NumericJsonSchemaProperty oneOf(JsonSchemaObject... oneOf) { + return oneOf(new LinkedHashSet<>(Arrays.asList(oneOf))); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#possibleValues(Collection) + */ + public NumericJsonSchemaProperty possibleValues(Collection possibleValues) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.possibleValues(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#allOf(Collection) + */ + public NumericJsonSchemaProperty allOf(Collection allOf) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.allOf(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#anyOf(Collection) + */ + public NumericJsonSchemaProperty anyOf(Collection anyOf) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.anyOf(anyOf)); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#oneOf(Collection) + */ + public NumericJsonSchemaProperty oneOf(Collection oneOf) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.oneOf(oneOf)); + } + + /** + * @param notMatch must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public NumericJsonSchemaProperty notMatch(JsonSchemaObject notMatch) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.notMatch(notMatch)); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#description(String) + */ + public NumericJsonSchemaProperty description(String description) { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see NumericJsonSchemaObject#generateDescription() + */ + public NumericJsonSchemaProperty generatedDescription() { + return new NumericJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'array'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class ArrayJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + /** + * @param identifier identifier the {@literal property} name or {@literal patternProperty} regex. Must not be + * {@literal null} nor {@literal empty}. + * @param schemaObject must not be {@literal null}. + */ + public ArrayJsonSchemaProperty(String identifier, ArrayJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param uniqueItems + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#uniqueItems(boolean) + */ + public ArrayJsonSchemaProperty uniqueItems(boolean uniqueItems) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.uniqueItems(uniqueItems)); + } + + /** + * @param range must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#range(Range) + */ + public ArrayJsonSchemaProperty range(Range range) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.range(range)); + } + + /** + * @param count + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#minItems(int) + */ + public ArrayJsonSchemaProperty minItems(int count) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.minItems(count)); + } + + /** + * @param count + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#maxItems(int) + */ + public ArrayJsonSchemaProperty maxItems(int count) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.maxItems(count)); + } + + /** + * @param items must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#items(Collection) + */ + public ArrayJsonSchemaProperty items(JsonSchemaObject... items) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.items(Arrays.asList(items))); + } + + /** + * @param items must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#items(Collection) + */ + public ArrayJsonSchemaProperty items(Collection items) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.items(items)); + } + + /** + * @param additionalItemsAllowed + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#additionalItems(boolean) + */ + public ArrayJsonSchemaProperty additionalItems(boolean additionalItemsAllowed) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.additionalItems(additionalItemsAllowed)); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#possibleValues(Collection) + */ + public ArrayJsonSchemaProperty possibleValues(Object... possibleValues) { + return possibleValues(new LinkedHashSet<>(Arrays.asList(possibleValues))); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#allOf(Collection) + */ + public ArrayJsonSchemaProperty allOf(JsonSchemaObject... allOf) { + return allOf(new LinkedHashSet<>(Arrays.asList(allOf))); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#anyOf(Collection) + */ + public ArrayJsonSchemaProperty anyOf(JsonSchemaObject... anyOf) { + return anyOf(new LinkedHashSet<>(Arrays.asList(anyOf))); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#oneOf(Collection) + */ + public ArrayJsonSchemaProperty oneOf(JsonSchemaObject... oneOf) { + return oneOf(new LinkedHashSet<>(Arrays.asList(oneOf))); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#possibleValues(Collection) + */ + public ArrayJsonSchemaProperty possibleValues(Collection possibleValues) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.possibleValues(possibleValues)); + } + + /** + * @param allOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#allOf(Collection) + */ + public ArrayJsonSchemaProperty allOf(Collection allOf) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.allOf(allOf)); + } + + /** + * @param anyOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#anyOf(Collection) + */ + public ArrayJsonSchemaProperty anyOf(Collection anyOf) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.anyOf(anyOf)); + } + + /** + * @param oneOf must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#oneOf(Collection) + */ + public ArrayJsonSchemaProperty oneOf(Collection oneOf) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.oneOf(oneOf)); + } + + /** + * @param notMatch must not be {@literal null}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public ArrayJsonSchemaProperty notMatch(JsonSchemaObject notMatch) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.notMatch(notMatch)); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#description(String) + */ + public ArrayJsonSchemaProperty description(String description) { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link ArrayJsonSchemaProperty}. + * @see ArrayJsonSchemaObject#generateDescription() + */ + public ArrayJsonSchemaProperty generatedDescription() { + return new ArrayJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'boolean'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class BooleanJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + BooleanJsonSchemaProperty(String identifier, BooleanJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaProperty}. + * @see BooleanJsonSchemaObject#description(String) + */ + public BooleanJsonSchemaProperty description(String description) { + return new BooleanJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link BooleanJsonSchemaProperty}. + * @see BooleanJsonSchemaObject#generateDescription() + */ + public BooleanJsonSchemaProperty generatedDescription() { + return new BooleanJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'null'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class NullJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + NullJsonSchemaProperty(String identifier, NullJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link NullJsonSchemaProperty}. + * @see NullJsonSchemaObject#description(String) + */ + public NullJsonSchemaProperty description(String description) { + return new NullJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link NullJsonSchemaProperty}. + * @see NullJsonSchemaObject#generateDescription() + */ + public NullJsonSchemaProperty generatedDescription() { + return new NullJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'date'} property. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class DateJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + DateJsonSchemaProperty(String identifier, DateJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link DateJsonSchemaProperty}. + * @see DateJsonSchemaProperty#description(String) + */ + public DateJsonSchemaProperty description(String description) { + return new DateJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link DateJsonSchemaProperty}. + * @see DateJsonSchemaProperty#generatedDescription() + */ + public DateJsonSchemaProperty generatedDescription() { + return new DateJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Convenience {@link JsonSchemaProperty} implementation for a {@code type : 'timestamp'} property. + * + * @author Mark Paluch + * @since 2.1 + */ + public static class TimestampJsonSchemaProperty extends IdentifiableJsonSchemaProperty { + + TimestampJsonSchemaProperty(String identifier, TimestampJsonSchemaObject schemaObject) { + super(identifier, schemaObject); + } + + /** + * @param description must not be {@literal null}. + * @return new instance of {@link TimestampJsonSchemaProperty}. + * @see TimestampJsonSchemaProperty#description(String) + */ + public TimestampJsonSchemaProperty description(String description) { + return new TimestampJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.description(description)); + } + + /** + * @return new instance of {@link TimestampJsonSchemaProperty}. + * @see TimestampJsonSchemaProperty#generatedDescription() + */ + public TimestampJsonSchemaProperty generatedDescription() { + return new TimestampJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); + } + } + + /** + * Delegating {@link JsonSchemaProperty} implementation having a {@literal required} flag for evaluation during schema + * creation process. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class RequiredJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty delegate; + private final boolean required; + + RequiredJsonSchemaProperty(JsonSchemaProperty delegate, boolean required) { + + this.delegate = delegate; + this.required = required; + } + + @Override + public String getIdentifier() { + return delegate.getIdentifier(); + } + + @Override + public Set getTypes() { + return delegate.getTypes(); + } + + @Override + public Document toDocument() { + return delegate.toDocument(); + } + + @Override + public boolean isRequired() { + return required; + } + } + + /** + * {@link JsonSchemaProperty} implementation for encrypted fields. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class EncryptedJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty targetProperty; + private final @Nullable String algorithm; + private final @Nullable Object keyId; + private final @Nullable List keyIds; + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + */ + public EncryptedJsonSchemaProperty(JsonSchemaProperty target) { + this(target, null, null, null); + } + + private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable Object keyId, + @Nullable List keyIds) { + + Assert.notNull(target, "Target must not be null"); + this.targetProperty = target; + this.algorithm = algorithm; + this.keyId = keyId; + this.keyIds = keyIds; + } + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty target) { + return new EncryptedJsonSchemaProperty(target); + } + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} with {@literal Range} encryption, wrapping the given + * {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public static EncryptedJsonSchemaProperty rangeEncrypted(JsonSchemaProperty target) { + return new EncryptedJsonSchemaProperty(target).algorithm(EncryptionAlgorithms.RANGE); + } + + /** + * Use {@literal AEAD_AES_256_CBC_HMAC_SHA_512-Random} algorithm. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_random() { + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Random); + } + + /** + * Use {@literal AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic} algorithm. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_deterministic() { + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic); + } + + /** + * Use the given algorithm identified via its name. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty algorithm(String algorithm) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, keyIds); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keyId(String keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public EncryptedJsonSchemaProperty keyId(Object keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(UUID... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(Object... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + + @Override + public Document toDocument() { + + Document doc = targetProperty.toDocument(); + Document propertySpecification = doc.get(targetProperty.getIdentifier(), Document.class); + + Document enc = new Document(); + + if (!ObjectUtils.isEmpty(keyId)) { + enc.append("keyId", keyId); + } else if (!ObjectUtils.isEmpty(keyIds)) { + enc.append("keyId", keyIds); + } + + Type type = extractPropertyType(propertySpecification); + if (type != null) { + + propertySpecification.remove(type.representation()); + enc.append("bsonType", type.toBsonType().value()); // TODO: no samples with type -> is it bson type all the way? + } + + if (StringUtils.hasText(algorithm)) { + enc.append("algorithm", algorithm); + } + + propertySpecification.append("encrypt", enc); + + return doc; + } + + @Override + public String getIdentifier() { + return targetProperty.getIdentifier(); + } + + @Override + public Set getTypes() { + return targetProperty.getTypes(); + } + + @Nullable + private Type extractPropertyType(Document source) { + + if (source.containsKey("type")) { + return Type.of(source.get("type", String.class)); + } + if (source.containsKey("bsonType")) { + return Type.of(source.get("bsonType", String.class)); + } + + return null; + } + + public Object getKeyId() { + if (keyId != null) { + return keyId; + } + if (keyIds != null && keyIds.size() == 1) { + return keyIds.iterator().next(); + } + return null; + } + } + + /** + * {@link JsonSchemaProperty} implementation typically wrapping an {@link EncryptedJsonSchemaProperty encrypted + * property} to mark it as queryable. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty targetProperty; + private final QueryCharacteristics characteristics; + + public QueryableJsonSchemaProperty(JsonSchemaProperty target, QueryCharacteristics characteristics) { + this.targetProperty = target; + this.characteristics = characteristics; + } + + @Override + public Document toDocument() { + + Document doc = targetProperty.toDocument(); + Document propertySpecification = doc.get(targetProperty.getIdentifier(), Document.class); + + if (propertySpecification.containsKey("encrypt")) { + Document encrypt = propertySpecification.get("encrypt", Document.class); + List queries = characteristics.getCharacteristics().stream().map(QueryCharacteristic::toDocument) + .toList(); + encrypt.append("queries", queries); + } + + return doc; + } + + @Override + public String getIdentifier() { + return targetProperty.getIdentifier(); + } + + @Override + public Set getTypes() { + return targetProperty.getTypes(); + } + + boolean isEncrypted() { + return targetProperty instanceof EncryptedJsonSchemaProperty; + } + + public JsonSchemaProperty getTargetProperty() { + return targetProperty; + } + + public QueryCharacteristics getCharacteristics() { + return characteristics; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java new file mode 100644 index 0000000000..a84f361d37 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java @@ -0,0 +1,578 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.math.BigDecimal; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.regex.Pattern; + +import org.bson.BsonTimestamp; +import org.bson.Document; +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; +import org.bson.types.ObjectId; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.DateJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NullJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.StringJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.TimestampJsonSchemaObject; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +/** + * Interface that can be implemented by objects that know how to serialize themselves to JSON schema using + * {@link #toDocument()}. + *
                    + * This class also declares factory methods for type-specific {@link JsonSchemaObject schema objects} such as + * {@link #string()} or {@link #object()}. For example: + * + *
                    + * JsonSchemaProperty.object("address").properties(JsonSchemaProperty.string("city").minLength(3));
                    + * 
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface JsonSchemaObject { + + /** + * Get the set of types defined for this schema element.
                    + * The {@link Set} is likely to contain only one element in most cases. + * + * @return never {@literal null}. + */ + Set getTypes(); + + /** + * Get the MongoDB specific representation.
                    + * The Document may contain fields (eg. like {@literal bsonType}) not contained in the JsonSchema specification. It + * may also contain types not directly processable by the MongoDB java driver. Make sure to run the produced + * {@link Document} through the mapping infrastructure. + * + * @return never {@literal null}. + */ + Document toDocument(); + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'object'}. + * + * @return never {@literal null}. + */ + static ObjectJsonSchemaObject object() { + return new ObjectJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'string'}. + * + * @return never {@literal null}. + */ + static StringJsonSchemaObject string() { + return new StringJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'number'}. + * + * @return never {@literal null}. + */ + static NumericJsonSchemaObject number() { + return new NumericJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'array'}. + * + * @return never {@literal null}. + */ + static ArrayJsonSchemaObject array() { + return new ArrayJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'boolean'}. + * + * @return never {@literal null}. + */ + static BooleanJsonSchemaObject bool() { + return new BooleanJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'null'}. + * + * @return never {@literal null}. + */ + static NullJsonSchemaObject nil() { + return new NullJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'date'}. + * + * @return never {@literal null}. + */ + static DateJsonSchemaObject date() { + return new DateJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of {@code type : 'timestamp'}. + * + * @return never {@literal null}. + */ + static TimestampJsonSchemaObject timestamp() { + return new TimestampJsonSchemaObject(); + } + + /** + * Create a new {@link JsonSchemaObject} of given {@link Type}. + * + * @return never {@literal null}. + */ + static TypedJsonSchemaObject of(Type type) { + return TypedJsonSchemaObject.of(type); + } + + /** + * Create a new {@link UntypedJsonSchemaObject}. + * + * @return never {@literal null}. + */ + static UntypedJsonSchemaObject untyped() { + return new UntypedJsonSchemaObject(null, null, false); + } + + /** + * Create a new {@link JsonSchemaObject} matching the given {@code type}. + * + * @param type Java class to create a {@link JsonSchemaObject} for. May be {@literal null} to create + * {@link Type#nullType() null} type. + * @return never {@literal null}. + * @throws IllegalArgumentException if {@code type} is not supported. + */ + static TypedJsonSchemaObject of(@Nullable Class type) { + + if (type == null) { + return of(Type.nullType()); + } + + if (type.isArray() || ClassUtils.isAssignable(Collection.class, type)) { + + if (type.equals(byte[].class)) { + return of(Type.binaryType()); + } + + return of(Type.arrayType()); + } + + if (type.equals(Document.class) || ClassUtils.isAssignable(Map.class, type)) { + return of(Type.objectType()); + } + + if (type.equals(Object.class)) { + return of(Type.objectType()); + } + + if (type.equals(ObjectId.class)) { + return of(Type.objectIdType()); + } + + if (ClassUtils.isAssignable(String.class, type)) { + return of(Type.stringType()); + } + + if (ClassUtils.isAssignable(Date.class, type)) { + return of(Type.dateType()); + } + + if (ClassUtils.isAssignable(Binary.class, type)) { + return of(Type.binaryType()); + } + + if (ClassUtils.isAssignable(Code.class, type)) { + return of(Type.javascriptType()); + } + + if (ClassUtils.isAssignable(Decimal128.class, type)) { + return of(Type.bigDecimalType()); + } + + if (ClassUtils.isAssignable(BsonTimestamp.class, type) || ClassUtils.isAssignable(BSONTimestamp.class, type)) { + return of(Type.timestampType()); + } + + if (ClassUtils.isAssignable(Pattern.class, type)) { + return of(Type.regexType()); + } + + if (ClassUtils.isAssignable(Enum.class, type)) { + return of(Type.stringType()); + } + + Class resolved = ClassUtils.resolvePrimitiveIfNecessary(type); + if (ClassUtils.isAssignable(Boolean.class, resolved)) { + return of(Type.booleanType()); + } + + if (ClassUtils.isAssignable(Number.class, resolved)) { + + if (resolved.equals(Long.class)) { + return of(Type.longType()); + } + + if (resolved.equals(Float.class)) { + return of(Type.doubleType()); + } + + if (resolved.equals(Double.class)) { + return of(Type.doubleType()); + } + + if (resolved.equals(Integer.class)) { + return of(Type.intType()); + } + + if (resolved.equals(BigDecimal.class)) { + return of(Type.bigDecimalType()); + } + + return of(Type.numberType()); + } + + throw new IllegalArgumentException(String.format("No JSON schema type found for %s", type)); + } + + /** + * Type represents either a JSON schema {@literal type} or a MongoDB specific {@literal bsonType}. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface Type { + + // BSON TYPES + Type OBJECT_ID = bsonTypeOf("objectId"); + Type REGULAR_EXPRESSION = bsonTypeOf("regex"); + Type DOUBLE = bsonTypeOf("double"); + Type BINARY_DATA = bsonTypeOf("binData"); + Type DATE = bsonTypeOf("date"); + Type JAVA_SCRIPT = bsonTypeOf("javascript"); + Type INT_32 = bsonTypeOf("int"); + Type INT_64 = bsonTypeOf("long"); + Type DECIMAL_128 = bsonTypeOf("decimal"); + Type TIMESTAMP = bsonTypeOf("timestamp"); + + Set BSON_TYPES = new HashSet<>(Arrays.asList(OBJECT_ID, REGULAR_EXPRESSION, DOUBLE, BINARY_DATA, DATE, + JAVA_SCRIPT, INT_32, INT_64, DECIMAL_128, TIMESTAMP)); + + // JSON SCHEMA TYPES + Type OBJECT = jsonTypeOf("object"); + Type ARRAY = jsonTypeOf("array"); + Type NUMBER = jsonTypeOf("number"); + Type BOOLEAN = jsonTypeOf("boolean"); + Type STRING = jsonTypeOf("string"); + Type NULL = jsonTypeOf("null"); + + Set JSON_TYPES = new HashSet<>(Arrays.asList(OBJECT, ARRAY, NUMBER, BOOLEAN, STRING, NULL)); + + /** + * @return a constant {@link Type} representing {@code bsonType : 'objectId' }. + */ + static Type objectIdType() { + return OBJECT_ID; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'regex' }. + */ + static Type regexType() { + return REGULAR_EXPRESSION; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'double' }. + */ + static Type doubleType() { + return DOUBLE; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'binData' }. + */ + static Type binaryType() { + return BINARY_DATA; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'date' }. + */ + static Type dateType() { + return DATE; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'javascript' }. + */ + static Type javascriptType() { + return JAVA_SCRIPT; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'int' }. + */ + static Type intType() { + return INT_32; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'long' }. + */ + static Type longType() { + return INT_64; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'decimal128' }. + */ + static Type bigDecimalType() { + return DECIMAL_128; + } + + /** + * @return a constant {@link Type} representing {@code bsonType : 'timestamp' }. + */ + static Type timestampType() { + return TIMESTAMP; + } + + /** + * @return a constant {@link Type} representing {@code type : 'object' }. + */ + static Type objectType() { + return OBJECT; + } + + /** + * @return a constant {@link Type} representing {@code type : 'array' }. + */ + static Type arrayType() { + return ARRAY; + } + + /** + * @return a constant {@link Type} representing {@code type : 'number' }. + */ + static Type numberType() { + return NUMBER; + } + + /** + * @return a constant {@link Type} representing {@code type : 'boolean' }. + */ + static Type booleanType() { + return BOOLEAN; + } + + /** + * @return a constant {@link Type} representing {@code type : 'string' }. + */ + static Type stringType() { + return STRING; + } + + /** + * @return a constant {@link Type} representing {@code type : 'null' }. + */ + static Type nullType() { + return NULL; + } + + /** + * @return new {@link Type} representing the given {@code bsonType}. + */ + static Type bsonTypeOf(String name) { + return new BsonType(name); + } + + /** + * @return new {@link Type} representing the given {@code type}. + */ + static Type jsonTypeOf(String name) { + return new JsonType(name); + } + + /** + * Create a {@link Type} with its default {@link Type#representation() representation} via the name. + * + * @param name must not be {@literal null}. + * @return the matching type instance. + * @since 2.2 + */ + static Type of(String name) { + + Type type = jsonTypeOf(name); + if (jsonTypes().contains(type)) { + return type; + } + + return bsonTypeOf(name); + } + + /** + * @return all known JSON types. + */ + static Set jsonTypes() { + return JSON_TYPES; + } + + /** + * @return all known BSON types. + */ + static Set bsonTypes() { + return BSON_TYPES; + } + + /** + * Get the {@link Type} representation. Either {@code type} or {@code bsonType}. + * + * @return never {@literal null}. + */ + String representation(); + + /** + * Get the {@link Type} value. Like {@literal string}, {@literal number},... + * + * @return never {@literal null}. + */ + Object value(); + + /** + * Get the {@literal bsonType} representation of the given type. + * + * @return never {@literal null}. + * @since 2.2 + */ + default Type toBsonType() { + + if (representation().equals("bsonType")) { + return this; + } + + if (value().equals(Type.booleanType().value())) { + return bsonTypeOf("bool"); + } + if (value().equals(Type.numberType().value())) { + return bsonTypeOf("long"); + } + + return bsonTypeOf((String) value()); + } + + /** + * @author Christpoh Strobl + * @since 2.1 + */ + class JsonType implements Type { + + private final String name; + + public JsonType(String name) { + this.name = name; + } + + @Override + public String representation() { + return "type"; + } + + @Override + public String value() { + return name; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + JsonType jsonType = (JsonType) o; + + return ObjectUtils.nullSafeEquals(name, jsonType.name); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(name); + } + } + + /** + * @author Christpoh Strobl + * @since 2.1 + */ + class BsonType implements Type { + + private final String name; + + BsonType(String name) { + this.name = name; + } + + @Override + public String representation() { + return "bsonType"; + } + + @Override + public String value() { + return name; + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + BsonType bsonType = (BsonType) o; + + return ObjectUtils.nullSafeEquals(name, bsonType.name); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(name); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java new file mode 100644 index 0000000000..a854c6184a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java @@ -0,0 +1,323 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Collection; +import java.util.List; + +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.BooleanJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.DateJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NullJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NumericJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.RequiredJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.StringJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.TimestampJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.UntypedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; + +/** + * A {@literal property} or {@literal patternProperty} within a {@link JsonSchemaObject} of {@code type : 'object'}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface JsonSchemaProperty extends JsonSchemaObject { + + /** + * The identifier can be either the property name or the regex expression properties have to match when used along + * with {@link ObjectJsonSchemaObject#patternProperties(JsonSchemaProperty...)}. + * + * @return never {@literal null}. + */ + String getIdentifier(); + + /** + * @return {@literal false} by default. + * @since 2.2 + */ + default boolean isRequired() { + return false; + } + + /** + * Creates a new {@link UntypedJsonSchemaProperty} with given {@literal identifier} without {@code type}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link UntypedJsonSchemaProperty}. + */ + static UntypedJsonSchemaProperty untyped(String identifier) { + return new UntypedJsonSchemaProperty(identifier, JsonSchemaObject.untyped()); + } + + /** + * Turns the given target property into an {@link EncryptedJsonSchemaProperty ecrypted} one. + * + * @param property must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 2.2 + */ + static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty property) { + return EncryptedJsonSchemaProperty.encrypted(property); + } + + /** + * Turns the given target property into a {@link QueryableJsonSchemaProperty queryable} one, eg. for {@literal range} + * encrypted properties. + * + * @param property the queryable property. Must not be {@literal null}. + * @param queries predefined query characteristics. + * @since 4.5 + */ + static QueryableJsonSchemaProperty queryable(JsonSchemaProperty property, List queries) { + return new QueryableJsonSchemaProperty(property, new QueryCharacteristics(queries)); + } + + /** + * Creates a new {@link StringJsonSchemaProperty} with given {@literal identifier} of {@code type : 'string'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link StringJsonSchemaProperty}. + */ + static StringJsonSchemaProperty string(String identifier) { + return new StringJsonSchemaProperty(identifier, JsonSchemaObject.string()); + } + + /** + * Creates a new {@link ObjectJsonSchemaProperty} with given {@literal identifier} of {@code type : 'object'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link ObjectJsonSchemaProperty}. + */ + static ObjectJsonSchemaProperty object(String identifier) { + return new ObjectJsonSchemaProperty(identifier, JsonSchemaObject.object()); + } + + /** + * Creates a new {@link JsonSchemaProperty} with given {@literal identifier} of {@code bsonType : 'objectId'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 2.2 + */ + static JsonSchemaProperty objectId(String identifier) { + return JsonSchemaProperty.named(identifier).ofType(Type.objectIdType()); + } + + /** + * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of {@code type : 'number'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NumericJsonSchemaProperty}. + */ + static NumericJsonSchemaProperty number(String identifier) { + return new NumericJsonSchemaProperty(identifier, JsonSchemaObject.number()); + } + + /** + * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of {@code bsonType : 'int'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NumericJsonSchemaProperty}. + */ + static NumericJsonSchemaProperty int32(String identifier) { + return new NumericJsonSchemaProperty(identifier, new NumericJsonSchemaObject(Type.intType())); + } + + /** + * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of {@code bsonType : 'long'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NumericJsonSchemaProperty}. + */ + static NumericJsonSchemaProperty int64(String identifier) { + return new NumericJsonSchemaProperty(identifier, new NumericJsonSchemaObject(Type.longType())); + } + + /** + * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of {@code bsonType : 'double'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NumericJsonSchemaProperty}. + */ + static NumericJsonSchemaProperty float64(String identifier) { + return new NumericJsonSchemaProperty(identifier, new NumericJsonSchemaObject(Type.doubleType())); + } + + /** + * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of + * {@code bsonType : 'decimal128'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NumericJsonSchemaProperty}. + */ + static NumericJsonSchemaProperty decimal128(String identifier) { + return new NumericJsonSchemaProperty(identifier, new NumericJsonSchemaObject(Type.bigDecimalType())); + } + + /** + * Creates a new {@link ArrayJsonSchemaProperty} with given {@literal identifier} of {@code type : 'array'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link ArrayJsonSchemaProperty}. + */ + static ArrayJsonSchemaProperty array(String identifier) { + return new ArrayJsonSchemaProperty(identifier, JsonSchemaObject.array()); + } + + /** + * Creates a new {@link BooleanJsonSchemaProperty} with given {@literal identifier} of {@code type : 'boolean'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link BooleanJsonSchemaProperty}. + */ + static BooleanJsonSchemaProperty bool(String identifier) { + return new BooleanJsonSchemaProperty(identifier, JsonSchemaObject.bool()); + } + + /** + * Creates a new {@link BooleanJsonSchemaProperty} with given {@literal identifier} of {@code type : 'null'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link NullJsonSchemaProperty}. + */ + static NullJsonSchemaProperty nil(String identifier) { + return new NullJsonSchemaProperty(identifier, JsonSchemaObject.nil()); + } + + /** + * Creates a new {@link DateJsonSchemaProperty} with given {@literal identifier} of {@code type : 'date'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link DateJsonSchemaProperty}. + */ + static DateJsonSchemaProperty date(String identifier) { + return new DateJsonSchemaProperty(identifier, JsonSchemaObject.date()); + } + + /** + * Creates a new {@link TimestampJsonSchemaProperty} with given {@literal identifier} of {@code type : 'timestamp'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link TimestampJsonSchemaProperty}. + */ + static TimestampJsonSchemaProperty timestamp(String identifier) { + return new TimestampJsonSchemaProperty(identifier, JsonSchemaObject.timestamp()); + } + + /** + * Obtain a builder to create a {@link JsonSchemaProperty}. + * + * @param identifier must not be {@literal null}. + * @return new instance of {@link JsonSchemaPropertyBuilder}. + */ + static JsonSchemaPropertyBuilder named(String identifier) { + return new JsonSchemaPropertyBuilder(identifier); + } + + /** + * Turns the given {@link JsonSchemaProperty} into a required on. + * + * @param property must not be {@literal null}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 2.2 + */ + static JsonSchemaProperty required(JsonSchemaProperty property) { + return new RequiredJsonSchemaProperty(property, true); + } + + /** + * Merges multiple {@link JsonSchemaProperty} with potentially different attributes into one. + * + * @param properties must not be {@literal null}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 3.4 + */ + static JsonSchemaProperty merged(Collection properties) { + return new MergedJsonSchemaProperty(properties); + } + + /** + * Builder for {@link IdentifiableJsonSchemaProperty}. + */ + class JsonSchemaPropertyBuilder { + + private final String identifier; + + JsonSchemaPropertyBuilder(String identifier) { + this.identifier = identifier; + } + + /** + * Configure a {@link Type} for the property. + * + * @param type must not be {@literal null}. + * @return new instance of {@link IdentifiableJsonSchemaProperty}. + */ + public IdentifiableJsonSchemaProperty ofType(Type type) { + return new IdentifiableJsonSchemaProperty<>(identifier, TypedJsonSchemaObject.of(type)); + } + + /** + * Configure the {@link Type} for the property by deriving it from the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link IdentifiableJsonSchemaProperty}. + * @since 2.2 + */ + public IdentifiableJsonSchemaProperty ofType(@Nullable Class type) { + return new IdentifiableJsonSchemaProperty<>(identifier, JsonSchemaObject.of(type)); + } + + /** + * Configure a {@link TypedJsonSchemaObject} for the property. + * + * @param schemaObject must not be {@literal null}. + * @return new instance of {@link IdentifiableJsonSchemaProperty}. + */ + public IdentifiableJsonSchemaProperty with(TypedJsonSchemaObject schemaObject) { + return new IdentifiableJsonSchemaProperty<>(identifier, schemaObject); + } + + /** + * @return an untyped {@link IdentifiableJsonSchemaProperty}. + */ + public IdentifiableJsonSchemaProperty withoutType() { + return new IdentifiableJsonSchemaProperty<>(identifier, UntypedJsonSchemaObject.newInstance()); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java new file mode 100644 index 0000000000..a6fc3ab8bd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import org.bson.Document; + +/** + * {@link MongoJsonSchema} implementation that is capable of merging properties from different schemas into a single + * one. + * + * @author Christoph Strobl + * @since 3.4 + */ +class MergedJsonSchema implements MongoJsonSchema { + + private final List schemaList; + private final BiFunction, Map, Document> mergeFunction; + + MergedJsonSchema(List schemaList, ConflictResolutionFunction conflictResolutionFunction) { + this(schemaList, new TypeUnifyingMergeFunction(conflictResolutionFunction)); + } + + MergedJsonSchema(List schemaList, + BiFunction, Map, Document> mergeFunction) { + + this.schemaList = new ArrayList<>(schemaList); + this.mergeFunction = mergeFunction; + } + + @Override + public MongoJsonSchema mergeWith(Collection sources) { + + schemaList.addAll(sources); + return this; + } + + @Override + public Document schemaDocument() { + + Document targetSchema = new Document(); + for (MongoJsonSchema schema : schemaList) { + targetSchema = mergeFunction.apply(targetSchema, schema.schemaDocument()); + } + + return targetSchema; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java new file mode 100644 index 0000000000..856ab772ee --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java @@ -0,0 +1,77 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; + +/** + * {@link JsonSchemaProperty} implementation that is capable of combining multiple properties with different values into + * a single one. + * + * @author Christoph Strobl + * @since 3.4 + */ +class MergedJsonSchemaProperty implements JsonSchemaProperty { + + private final Iterable properties; + private final BiFunction, Map, Document> mergeFunction; + + MergedJsonSchemaProperty(Iterable properties) { + this(properties, (k, a, b) -> { + throw new IllegalStateException( + String.format("Error resolving conflict for '%s'; No conflict resolution function defined", k)); + }); + } + + MergedJsonSchemaProperty(Iterable properties, + ConflictResolutionFunction conflictResolutionFunction) { + this(properties, new TypeUnifyingMergeFunction(conflictResolutionFunction)); + } + + MergedJsonSchemaProperty(Iterable properties, + BiFunction, Map, Document> mergeFunction) { + + this.properties = properties; + this.mergeFunction = mergeFunction; + } + + @Override + public Set getTypes() { + return Collections.emptySet(); + } + + @Override + public Document toDocument() { + + Document document = new Document(); + + for (JsonSchemaProperty property : properties) { + document = mergeFunction.apply(document, property.toDocument()); + } + return document; + } + + @Override + public String getIdentifier() { + return properties.iterator().next().getIdentifier(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java new file mode 100644 index 0000000000..f64218cc56 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java @@ -0,0 +1,494 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.bson.Document; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Interface defining MongoDB-specific JSON schema object. New objects can be built with {@link #builder()}, for + * example: + * + *
                    + * MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname")
                    + * 		.properties(string("firstname").possibleValues("luke", "han"),
                    + * 				object("address").properties(string("postCode").minLength(4).maxLength(5))
                    + *
                    + * 		).build();
                    + * 
                    + * + * resulting in the following schema: + * + *
                    + *  {
                    +  "type": "object",
                    +  "required": [ "firstname", "lastname" ],
                    +  "properties": {
                    +    "firstname": {
                    +      "type": "string", "enum": [ "luke", "han" ],
                    +    },
                    +    "address": {
                    +      "type": "object",
                    +      "properties": {
                    +        "postCode": { "type": "string", "minLength": 4, "maxLength": 5 }
                    +      }
                    +    }
                    +  }
                    +}
                    + * 
                    + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see UntypedJsonSchemaObject + * @see TypedJsonSchemaObject + */ +public interface MongoJsonSchema { + + /** + * Create the {@code $jsonSchema} {@link Document} containing the specified {@link #schemaDocument()}.
                    + * Property and field names need to be mapped to the domain type ones by running the {@link Document} through a + * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return new Document("$jsonSchema", schemaDocument()); + } + + /** + * Create the {@link Document} defining the schema.
                    + * Property and field names need to be mapped to the domain type property by running the {@link Document} through a + * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. + * + * @return never {@literal null}. + * @since 3.3 + */ + Document schemaDocument(); + + /** + * Create a new {@link MongoJsonSchema} for a given root object. + * + * @param root must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + */ + static MongoJsonSchema of(JsonSchemaObject root) { + return new DefaultMongoJsonSchema(root); + } + + /** + * Create a new {@link MongoJsonSchema} for a given root {@link Document} containing the schema definition. + * + * @param document must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + */ + static MongoJsonSchema of(Document document) { + return new DocumentJsonSchema(document); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + static MongoJsonSchema merge(MongoJsonSchema... sources) { + return merge((path, left, right) -> { + throw new IllegalStateException(String.format("Cannot merge schema for path '%s' holding values '%s' and '%s'", + path.dotPath(), left, right)); + }, sources); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + static MongoJsonSchema merge(ConflictResolutionFunction mergeFunction, MongoJsonSchema... sources) { + return new MergedJsonSchema(Arrays.asList(sources), mergeFunction); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(MongoJsonSchema... sources) { + return mergeWith(Arrays.asList(sources)); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(Collection sources) { + return mergeWith(sources, (path, left, right) -> { + throw new IllegalStateException(String.format("Cannot merge schema for path '%s' holding values '%s' and '%s'", + path.dotPath(), left, right)); + }); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(Collection sources, + ConflictResolutionFunction conflictResolutionFunction) { + + List schemaList = new ArrayList<>(sources.size() + 1); + schemaList.add(this); + schemaList.addAll(new ArrayList<>(sources)); + return new MergedJsonSchema(schemaList, conflictResolutionFunction); + } + + /** + * Obtain a new {@link MongoJsonSchemaBuilder} to fluently define the schema. + * + * @return new instance of {@link MongoJsonSchemaBuilder}. + */ + static MongoJsonSchemaBuilder builder() { + return new MongoJsonSchemaBuilder(); + } + + /** + * A resolution function that is called on conflicting paths when trying to merge properties with different values + * into a single value. + * + * @author Christoph Strobl + * @since 3.4 + */ + @FunctionalInterface + interface ConflictResolutionFunction { + + /** + * Resolve the conflict for two values under the same {@code path}. + * + * @param path the {@link Path} leading to the conflict. + * @param left can be {@literal null}. + * @param right can be {@literal null}. + * @return never {@literal null}. + */ + Resolution resolveConflict(Path path, @Nullable Object left, @Nullable Object right); + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface Path { + + /** + * @return the name of the currently processed element + */ + String currentElement(); + + /** + * @return the path leading to the currently processed element in dot {@literal '.'} notation. + */ + String dotPath(); + } + + /** + * The result after processing a conflict when merging schemas. May indicate to {@link #SKIP skip} the entry + * entirely. + * + * @author Christoph Strobl + * @since 3.4 + */ + interface Resolution extends Map.Entry { + + @Override + default Object setValue(Object value) { + throw new IllegalStateException("Cannot set value result; Maybe you missed to override the method"); + } + + /** + * Resolution + */ + Resolution SKIP = new Resolution() { + + @Override + public String getKey() { + throw new IllegalStateException("No key for skipped result"); + } + + @Override + public Object getValue() { + throw new IllegalStateException("No value for skipped result"); + } + + @Override + public Object setValue(Object value) { + throw new IllegalStateException("Cannot set value on skipped result"); + } + }; + + /** + * Obtain a {@link Resolution} that will skip the entry and proceed computation. + * + * @return never {@literal null}. + */ + static Resolution skip() { + return SKIP; + } + + /** + * Construct a resolution for a {@link Path} using the given {@code value}. + * + * @param path the conflicting path. + * @param value the value to apply. + * @return + */ + static Resolution ofValue(Path path, Object value) { + + Assert.notNull(path, "Path must not be null"); + + return ofValue(path.currentElement(), value); + } + + /** + * Construct a resolution from a {@code key} and {@code value}. + * + * @param key name of the path segment, typically {@link Path#currentElement()} + * @param value the value to apply. + * @return + */ + static Resolution ofValue(String key, Object value) { + + return new Resolution() { + @Override + public String getKey() { + return key; + } + + @Override + public Object getValue() { + return value; + } + }; + } + } + } + + /** + * {@link MongoJsonSchemaBuilder} provides a fluent API for defining a {@link MongoJsonSchema}. + * + * @author Christoph Strobl + */ + class MongoJsonSchemaBuilder { + + private ObjectJsonSchemaObject root; + + @Nullable // + private Document encryptionMetadata; + + MongoJsonSchemaBuilder() { + root = new ObjectJsonSchemaObject(); + } + + /** + * @param count + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#minProperties(int) + */ + public MongoJsonSchemaBuilder minProperties(int count) { + + root = root.minProperties(count); + return this; + } + + /** + * @param count + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#maxProperties(int) + */ + public MongoJsonSchemaBuilder maxProperties(int count) { + + root = root.maxProperties(count); + return this; + } + + /** + * @param properties must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#required(String...) + */ + public MongoJsonSchemaBuilder required(String... properties) { + + root = root.required(properties); + return this; + } + + /** + * @param additionalPropertiesAllowed + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#additionalProperties(boolean) + */ + public MongoJsonSchemaBuilder additionalProperties(boolean additionalPropertiesAllowed) { + + root = root.additionalProperties(additionalPropertiesAllowed); + return this; + } + + /** + * @param schema must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#additionalProperties(ObjectJsonSchemaObject) + */ + public MongoJsonSchemaBuilder additionalProperties(ObjectJsonSchemaObject schema) { + + root = root.additionalProperties(schema); + return this; + } + + /** + * @param properties must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#properties(JsonSchemaProperty...) + */ + public MongoJsonSchemaBuilder properties(JsonSchemaProperty... properties) { + + root = root.properties(properties); + return this; + } + + /** + * @param properties must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#patternProperties(JsonSchemaProperty...) + */ + public MongoJsonSchemaBuilder patternProperties(JsonSchemaProperty... properties) { + + root = root.patternProperties(properties); + return this; + } + + /** + * @param property must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#property(JsonSchemaProperty) + */ + public MongoJsonSchemaBuilder property(JsonSchemaProperty property) { + + root = root.property(property); + return this; + } + + /** + * @param possibleValues must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see ObjectJsonSchemaObject#possibleValues(Collection) + */ + public MongoJsonSchemaBuilder possibleValues(Set possibleValues) { + + root = root.possibleValues(possibleValues); + return this; + } + + /** + * @param allOf must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see UntypedJsonSchemaObject#allOf(Collection) + */ + public MongoJsonSchemaBuilder allOf(Set allOf) { + + root = root.allOf(allOf); + return this; + } + + /** + * @param anyOf must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see UntypedJsonSchemaObject#anyOf(Collection) + */ + public MongoJsonSchemaBuilder anyOf(Set anyOf) { + + root = root.anyOf(anyOf); + return this; + } + + /** + * @param oneOf must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see UntypedJsonSchemaObject#oneOf(Collection) + */ + public MongoJsonSchemaBuilder oneOf(Set oneOf) { + + root = root.oneOf(oneOf); + return this; + } + + /** + * @param notMatch must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see UntypedJsonSchemaObject#notMatch(JsonSchemaObject) + */ + public MongoJsonSchemaBuilder notMatch(JsonSchemaObject notMatch) { + + root = root.notMatch(notMatch); + return this; + } + + /** + * @param description must not be {@literal null}. + * @return {@code this} {@link MongoJsonSchemaBuilder}. + * @see UntypedJsonSchemaObject#description(String) + */ + public MongoJsonSchemaBuilder description(String description) { + + root = root.description(description); + return this; + } + + /** + * Define the {@literal encryptMetadata} element of the schema. + * + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + public void encryptionMetadata(@Nullable Document encryptionMetadata) { + this.encryptionMetadata = encryptionMetadata; + } + + /** + * Obtain the {@link MongoJsonSchema}. + * + * @return new instance of {@link MongoJsonSchema}. + */ + public MongoJsonSchema build() { + return new DefaultMongoJsonSchema(root, encryptionMetadata); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java new file mode 100644 index 0000000000..8604ba9d6c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java @@ -0,0 +1,40 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import org.bson.Document; + +/** + * Defines the specific character of a query that can be executed. Mainly used to define the characteristic of queryable + * encrypted fields. + * + * @author Christoph Strobl + * @since 4.5 + */ +public interface QueryCharacteristic { + + /** + * @return the query type, eg. {@literal range}. + */ + String queryType(); + + /** + * @return the raw {@link Document} representation of the instance. + */ + default Document toDocument() { + return new Document("queryType", queryType()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java new file mode 100644 index 0000000000..4ec775c5e7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java @@ -0,0 +1,263 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.lang.Nullable; + +/** + * Encapsulation of individual {@link QueryCharacteristic query characteristics} used to define queries that can be + * executed when using queryable encryption. + * + * @author Christoph Strobl + * @since 4.5 + */ +public class QueryCharacteristics implements Iterable { + + /** + * instance indicating none + */ + private static final QueryCharacteristics NONE = new QueryCharacteristics(Collections.emptyList()); + + private final List characteristics; + + QueryCharacteristics(List characteristics) { + this.characteristics = characteristics; + } + + /** + * @return marker instance indicating no characteristics have been defined. + */ + public static QueryCharacteristics none() { + return NONE; + } + + /** + * Create new {@link QueryCharacteristics} from given list of {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(List characteristics) { + return new QueryCharacteristics(List.copyOf(characteristics)); + } + + /** + * Create new {@link QueryCharacteristics} from given {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(QueryCharacteristic... characteristics) { + return new QueryCharacteristics(Arrays.asList(characteristics)); + } + + /** + * @return the list of {@link QueryCharacteristic characteristics}. + */ + public List getCharacteristics() { + return characteristics; + } + + @Override + public Iterator iterator() { + return this.characteristics.iterator(); + } + + /** + * Create a new {@link RangeQuery range query characteristic} used to define range queries against an encrypted field. + * + * @param targeted field type + * @return new instance of {@link RangeQuery}. + */ + public static RangeQuery range() { + return new RangeQuery<>(); + } + + /** + * Create a new {@link EqualityQuery equality query characteristic} used to define equality queries against an + * encrypted field. + * + * @param targeted field type + * @return new instance of {@link EqualityQuery}. + */ + public static EqualityQuery equality() { + return new EqualityQuery<>(null); + } + + /** + * {@link QueryCharacteristic} for equality comparison. + * + * @param + * @since 4.5 + */ + public static class EqualityQuery implements QueryCharacteristic { + + private final @Nullable Long contention; + + /** + * Create new instance of {@link EqualityQuery}. + * + * @param contention can be {@literal null}. + */ + public EqualityQuery(@Nullable Long contention) { + this.contention = contention; + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link EqualityQuery}. + */ + public EqualityQuery contention(long contention) { + return new EqualityQuery<>(contention); + } + + @Override + public String queryType() { + return "equality"; + } + + @Override + public Document toDocument() { + return QueryCharacteristic.super.toDocument().append("contention", contention); + } + } + + /** + * {@link QueryCharacteristic} for range comparison. + * + * @param + * @since 4.5 + */ + public static class RangeQuery implements QueryCharacteristic { + + private final @Nullable Range valueRange; + private final @Nullable Integer trimFactor; + private final @Nullable Long sparsity; + private final @Nullable Long precision; + private final @Nullable Long contention; + + private RangeQuery() { + this(Range.unbounded(), null, null, null, null); + } + + /** + * Create new instance of {@link RangeQuery}. + * + * @param valueRange + * @param trimFactor + * @param sparsity + * @param contention + */ + public RangeQuery(@Nullable Range valueRange, @Nullable Integer trimFactor, @Nullable Long sparsity, + @Nullable Long precision, @Nullable Long contention) { + this.valueRange = valueRange; + this.trimFactor = trimFactor; + this.sparsity = sparsity; + this.precision = precision; + this.contention = contention; + } + + /** + * @param lower the lower value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery min(T lower) { + + Range range = Range.of(Bound.inclusive(lower), + valueRange != null ? valueRange.getUpperBound() : Bound.unbounded()); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param upper the upper value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery max(T upper) { + + Range range = Range.of(valueRange != null ? valueRange.getLowerBound() : Bound.unbounded(), + Bound.inclusive(upper)); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param trimFactor value to control the throughput of concurrent inserts and updates. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery trimFactor(int trimFactor) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param sparsity value to control the value density within the index. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery sparsity(long sparsity) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery contention(long contention) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param precision digits considered comparing floating point numbers. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery precision(long precision) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + @Override + public String queryType() { + return "range"; + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument() { + + Document target = QueryCharacteristic.super.toDocument(); + if (contention != null) { + target.append("contention", contention); + } + if (trimFactor != null) { + target.append("trimFactor", trimFactor); + } + if (valueRange != null) { + target.append("min", valueRange.getLowerBound().getValue().orElse((T) BsonNull.VALUE)).append("max", + valueRange.getUpperBound().getValue().orElse((T) BsonNull.VALUE)); + } + if (sparsity != null) { + target.append("sparsity", sparsity); + } + + return target; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java new file mode 100644 index 0000000000..95f116619f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java @@ -0,0 +1,172 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Path; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Merge function considering BSON type hints. Conflicts are resolved through a {@link ConflictResolutionFunction}. + * + * @author Christoph Strobl + * @since 3.4 + */ +class TypeUnifyingMergeFunction implements BiFunction, Map, Document> { + + private final ConflictResolutionFunction conflictResolutionFunction; + + public TypeUnifyingMergeFunction(ConflictResolutionFunction conflictResolutionFunction) { + this.conflictResolutionFunction = conflictResolutionFunction; + } + + @Override + public Document apply(Map left, Map right) { + return merge(SimplePath.root(), left, right); + } + + @SuppressWarnings("unchecked") + Document merge(SimplePath path, Map left, Map right) { + + Document target = new Document(left); + + for (String key : right.keySet()) { + + SimplePath currentPath = path.append(key); + if (isTypeKey(key)) { + + Object unifiedExistingType = getUnifiedExistingType(key, target); + + if (unifiedExistingType != null) { + if (!ObjectUtils.nullSafeEquals(unifiedExistingType, right.get(key))) { + resolveConflict(currentPath, left, right, target); + } + continue; + } + } + + if (!target.containsKey(key)) { + target.put(key, right.get(key)); + continue; + } + + Object existingEntry = target.get(key); + Object newEntry = right.get(key); + if (existingEntry instanceof Map && newEntry instanceof Map) { + target.put(key, merge(currentPath, (Map) existingEntry, (Map) newEntry)); + } else if (!ObjectUtils.nullSafeEquals(existingEntry, newEntry)) { + resolveConflict(currentPath, left, right, target); + } + } + + return target; + } + + private void resolveConflict(Path path, Map left, Map right, Document target) { + applyConflictResolution(path, target, conflictResolutionFunction.resolveConflict(path, left, right)); + } + + private void applyConflictResolution(Path path, Document target, Resolution resolution) { + + if (Resolution.SKIP.equals(resolution) || resolution.getValue() == null) { + target.remove(path.currentElement()); + return; + } + + if (isTypeKey(resolution.getKey())) { + target.put(getTypeKeyToUse(resolution.getKey(), target), resolution.getValue()); + } else { + target.put(resolution.getKey(), resolution.getValue()); + } + } + + private static boolean isTypeKey(String key) { + return "bsonType".equals(key) || "type".equals(key); + } + + private static String getTypeKeyToUse(String key, Document source) { + + if ("bsonType".equals(key) && source.containsKey("type")) { + return "type"; + } + if ("type".equals(key) && source.containsKey("bsonType")) { + return "bsonType"; + } + return key; + } + + @Nullable + private static Object getUnifiedExistingType(String key, Document source) { + return source.get(getTypeKeyToUse(key, source)); + } + + /** + * Trivial {@link List} based {@link Path} implementation. + * + * @author Christoph Strobl + * @since 3.4 + */ + static class SimplePath implements Path { + + private final List path; + + SimplePath(List path) { + this.path = path; + } + + static SimplePath root() { + return new SimplePath(Collections.emptyList()); + } + + static SimplePath of(List path, String next) { + + List fullPath = new ArrayList<>(path.size() + 1); + fullPath.addAll(path); + fullPath.add(next); + return new SimplePath(fullPath); + } + + public SimplePath append(String next) { + return of(this.path, next); + } + + @Override + public String currentElement() { + return CollectionUtils.lastElement(path); + } + + @Override + public String dotPath() { + return StringUtils.collectionToDelimitedString(path, "."); + } + + @Override + public String toString() { + return dotPath(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java new file mode 100644 index 0000000000..abf8b0b8a2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java @@ -0,0 +1,1371 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link JsonSchemaObject} of a given {@link org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Michał Kurcius + * @since 2.1 + */ +public class TypedJsonSchemaObject extends UntypedJsonSchemaObject { + + protected final Set types; + + /** + * @param type can be {@literal null}. + * @param description can be {@literal null}. + * @param restrictions can be {@literal null}. + */ + TypedJsonSchemaObject(@Nullable Type type, @Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + + this(type != null ? Collections.singleton(type) : Collections.emptySet(), description, generateDescription, + restrictions); + } + + /** + * @param types must not be {@literal null}. + * @param description can be {@literal null}. + * @param restrictions can be {@literal null}. Defaults to {@link Restrictions#empty()}. + */ + TypedJsonSchemaObject(Set types, @Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + + super(restrictions, description, generateDescription); + + Assert.notNull(types, "Types must not be null Please consider using 'Collections.emptySet()'"); + + this.types = types; + } + + /** + * Creates new {@link TypedJsonSchemaObject} of given types. + * + * @param types must not be {@literal null}. + * @return + */ + public static TypedJsonSchemaObject of(Type... types) { + + Assert.notNull(types, "Types must not be null"); + Assert.noNullElements(types, "Types must not contain null"); + + return new TypedJsonSchemaObject(new LinkedHashSet<>(Arrays.asList(types)), null, false, Restrictions.empty()); + } + + @Override + public Set getTypes() { + return types; + } + + /** + * Set the {@literal description}. + * + * @param description must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject description(String description) { + return new TypedJsonSchemaObject(types, description, generateDescription, restrictions); + } + + /** + * Auto generate the {@literal description} if not explicitly set. + * + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject generatedDescription() { + return new TypedJsonSchemaObject(types, description, true, restrictions); + } + + /** + * {@literal enum}erates all possible values of the field. + * + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject possibleValues(Collection possibleValues) { + return new TypedJsonSchemaObject(types, description, generateDescription, + restrictions.possibleValues(possibleValues)); + } + + /** + * The field value must match all specified schemas. + * + * @param allOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject allOf(Collection allOf) { + return new TypedJsonSchemaObject(types, description, generateDescription, restrictions.allOf(allOf)); + } + + /** + * The field value must match at least one of the specified schemas. + * + * @param anyOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject anyOf(Collection anyOf) { + return new TypedJsonSchemaObject(types, description, generateDescription, restrictions.anyOf(anyOf)); + } + + /** + * The field value must match exactly one of the specified schemas. + * + * @param oneOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject oneOf(Collection oneOf) { + return new TypedJsonSchemaObject(types, description, generateDescription, restrictions.oneOf(oneOf)); + } + + /** + * The field value must not match the specified schemas. + * + * @param notMatch must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + @Override + public TypedJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new TypedJsonSchemaObject(types, description, generateDescription, restrictions.notMatch(notMatch)); + } + + /** + * Create the JSON schema complying {@link Document} representation. This includes {@literal type}, + * {@literal description} and the fields of {@link Restrictions#toDocument()} if set. + */ + @Override + public Document toDocument() { + + Document document = new Document(); + + if (!CollectionUtils.isEmpty(types)) { + + Type theType = types.iterator().next(); + if (types.size() == 1) { + document.append(theType.representation(), theType.value()); + } else { + document.append(theType.representation(), types.stream().map(Type::value).collect(Collectors.toList())); + } + } + + getOrCreateDescription().ifPresent(val -> document.append("description", val)); + document.putAll(restrictions.toDocument()); + + return document; + } + + private Optional getOrCreateDescription() { + + if (description != null) { + return description.isEmpty() ? Optional.empty() : Optional.of(description); + } + + return generateDescription ? Optional.ofNullable(generateDescription()) : Optional.empty(); + } + + /** + * Customization hook for creating description out of defined values.
                    + * Called by {@link #toDocument()} when no explicit {@link #description} is set. + * + * @return can be {@literal null}. + */ + @Nullable + protected String generateDescription() { + return null; + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'object'} schema elements.
                    + * Provides programmatic access to schema specifics like {@literal required, properties, patternProperties,...} via a + * fluent API producing immutable {@link JsonSchemaObject schema objects}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class ObjectJsonSchemaObject extends TypedJsonSchemaObject { + + private @Nullable Range propertiesCount; + private @Nullable Object additionalProperties; + private List requiredProperties = Collections.emptyList(); + private List properties = Collections.emptyList(); + private List patternProperties = Collections.emptyList(); + + public ObjectJsonSchemaObject() { + this(null, false, null); + } + + /** + * @param description can be {@literal null}. + * @param restrictions can be {@literal null}; + */ + ObjectJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.objectType(), description, generateDescription, restrictions); + } + + /** + * Define the {@literal minProperties} and {@literal maxProperties} via the given {@link Range}.
                    + * In-/Exclusions via {@link Bound#isInclusive() range bounds} are not taken into account. + * + * @param range must not be {@literal null}. Consider {@link Range#unbounded()} instead. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject propertiesCount(Range range) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.propertiesCount = range; + return newInstance; + } + + /** + * Define the {@literal minProperties}. + * + * @param count the allowed minimal number of properties. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject minProperties(int count) { + + Bound upper = this.propertiesCount != null ? this.propertiesCount.getUpperBound() : Bound.unbounded(); + return propertiesCount(Range.of(Bound.inclusive(count), upper)); + } + + /** + * Define the {@literal maxProperties}. + * + * @param count the allowed maximum number of properties. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject maxProperties(int count) { + + Bound lower = this.propertiesCount != null ? this.propertiesCount.getLowerBound() : Bound.unbounded(); + return propertiesCount(Range.of(lower, Bound.inclusive(count))); + } + + /** + * Define the Object’s {@literal required} properties. + * + * @param properties the names of required properties. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject required(String... properties) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.requiredProperties = new ArrayList<>(this.requiredProperties.size() + properties.length); + newInstance.requiredProperties.addAll(this.requiredProperties); + newInstance.requiredProperties.addAll(Arrays.asList(properties)); + + return newInstance; + } + + /** + * If set to {@literal false}, additional fields besides + * {@link #properties(JsonSchemaProperty...)}/{@link #patternProperties(JsonSchemaProperty...)} are not allowed. + * + * @param additionalPropertiesAllowed + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject additionalProperties(boolean additionalPropertiesAllowed) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.additionalProperties = additionalPropertiesAllowed; + + return newInstance; + } + + /** + * If specified, additional fields must validate against the given schema. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject additionalProperties(ObjectJsonSchemaObject schema) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.additionalProperties = schema; + return newInstance; + } + + /** + * Append the objects properties along with the {@link JsonSchemaObject} validating against. + * + * @param properties must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject properties(JsonSchemaProperty... properties) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.properties = new ArrayList<>(this.properties.size() + properties.length); + newInstance.properties.addAll(this.properties); + newInstance.properties.addAll(Arrays.asList(properties)); + + return newInstance; + } + + /** + * Append regular expression patterns along with the {@link JsonSchemaObject} matching properties validating + * against. + * + * @param regularExpressions must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject patternProperties(JsonSchemaProperty... regularExpressions) { + + ObjectJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.patternProperties = new ArrayList<>(this.patternProperties.size() + regularExpressions.length); + newInstance.patternProperties.addAll(this.patternProperties); + newInstance.patternProperties.addAll(Arrays.asList(regularExpressions)); + + return newInstance; + } + + /** + * Append the objects property along with the {@link JsonSchemaObject} validating against. + * + * @param property must not be {@literal null}. + * @return new instance of {@link ObjectJsonSchemaObject}. + */ + public ObjectJsonSchemaObject property(JsonSchemaProperty property) { + return properties(property); + } + + @Override + public ObjectJsonSchemaObject possibleValues(Collection possibleValues) { + return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public ObjectJsonSchemaObject allOf(Collection allOf) { + return newInstance(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public ObjectJsonSchemaObject anyOf(Collection anyOf) { + return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public ObjectJsonSchemaObject oneOf(Collection oneOf) { + return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public ObjectJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public ObjectJsonSchemaObject description(String description) { + return newInstance(description, generateDescription, restrictions); + } + + @Override + public ObjectJsonSchemaObject generatedDescription() { + return newInstance(description, true, restrictions); + } + + public List getProperties() { + return properties; + } + + @Override + public Document toDocument() { + + Document doc = new Document(super.toDocument()); + Collection allRequiredProperties = requiredProperties(); + if (!CollectionUtils.isEmpty(allRequiredProperties)) { + doc.append("required", new ArrayList<>(allRequiredProperties)); + } + + if (propertiesCount != null) { + + propertiesCount.getLowerBound().getValue().ifPresent(it -> doc.append("minProperties", it)); + propertiesCount.getUpperBound().getValue().ifPresent(it -> doc.append("maxProperties", it)); + } + + if (!CollectionUtils.isEmpty(properties)) { + doc.append("properties", reduceToDocument(properties)); + } + + if (!CollectionUtils.isEmpty(patternProperties)) { + doc.append("patternProperties", reduceToDocument(patternProperties)); + } + + if (additionalProperties != null) { + + doc.append("additionalProperties", + additionalProperties instanceof JsonSchemaObject schemaObject ? schemaObject.toDocument() + : additionalProperties); + } + return doc; + } + + private Collection requiredProperties() { + + Set target = new LinkedHashSet<>(); + target.addAll(requiredProperties); + properties.stream().filter(JsonSchemaProperty::isRequired).forEach(it -> target.add(it.getIdentifier())); + return target; + } + + private ObjectJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, + Restrictions restrictions) { + + ObjectJsonSchemaObject newInstance = new ObjectJsonSchemaObject(description, generateDescription, restrictions); + + newInstance.properties = this.properties; + newInstance.requiredProperties = this.requiredProperties; + newInstance.additionalProperties = this.additionalProperties; + newInstance.propertiesCount = this.propertiesCount; + newInstance.patternProperties = this.patternProperties; + + return newInstance; + } + + private Document reduceToDocument(Collection source) { + + return source.stream() // + .map(JsonSchemaProperty::toDocument) // + .collect(Document::new, Document::putAll, (target, propertyDocument) -> {}); + } + + @Override + protected String generateDescription() { + + String description = "Must be an object"; + + if (propertiesCount != null) { + description += String.format(" with %s properties", propertiesCount); + } + + if (!CollectionUtils.isEmpty(requiredProperties)) { + + if (requiredProperties.size() == 1) { + description += String.format(" where %sis mandatory", requiredProperties.iterator().next()); + } else { + description += String.format(" where %s are mandatory", + StringUtils.collectionToDelimitedString(requiredProperties, ", ")); + } + } + if (additionalProperties instanceof Boolean booleanValue) { + description += (booleanValue ? " " : " not ") + "allowing additional properties"; + } + + if (!CollectionUtils.isEmpty(properties)) { + description += String.format(" defining restrictions for %s", StringUtils.collectionToDelimitedString( + properties.stream().map(JsonSchemaProperty::getIdentifier).collect(Collectors.toList()), ", ")); + } + + if (!CollectionUtils.isEmpty(patternProperties)) { + description += String.format(" defining restrictions for patterns %s", StringUtils.collectionToDelimitedString( + patternProperties.stream().map(JsonSchemaProperty::getIdentifier).collect(Collectors.toList()), ", ")); + } + + return description + "."; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'number'}, {@code bsonType : 'int'}, + * {@code bsonType : 'long'}, {@code bsonType : 'double'} and {@code bsonType : 'decimal128'} schema elements.
                    + * Provides programmatic access to schema specifics like {@literal multipleOf, minimum, maximum,...} via a fluent API + * producing immutable {@link JsonSchemaObject schema objects}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class NumericJsonSchemaObject extends TypedJsonSchemaObject { + + private static final Set NUMERIC_TYPES = Set.of(Type.doubleType(), Type.intType(), Type.longType(), Type.numberType(), Type.bigDecimalType()); + + @Nullable Number multipleOf; + @Nullable Range range; + + NumericJsonSchemaObject() { + this(Type.numberType()); + } + + NumericJsonSchemaObject(Type type) { + this(type, null, false); + } + + private NumericJsonSchemaObject(Type type, @Nullable String description, boolean generateDescription) { + this(Collections.singleton(type), description, generateDescription, null); + } + + private NumericJsonSchemaObject(Set types, @Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + + super(validateTypes(types), description, generateDescription, restrictions); + } + + /** + * Set the value a valid field value must be the multiple of. + * + * @param value must not be {@literal null}. + * @return must not be {@literal null}. + */ + public NumericJsonSchemaObject multipleOf(Number value) { + + Assert.notNull(value, "Value must not be null"); + NumericJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.multipleOf = value; + + return newInstance; + } + + /** + * Set the {@link Range} of valid field values translating to {@literal minimum}, {@literal exclusiveMinimum}, + * {@literal maximum} and {@literal exclusiveMaximum}. + * + * @param range must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaObject}. + */ + public NumericJsonSchemaObject within(Range range) { + + Assert.notNull(range, "Range must not be null"); + + NumericJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.range = range; + + return newInstance; + } + + /** + * Set {@literal minimum} to given {@code min} value and {@literal exclusiveMinimum} to {@literal true}. + * + * @param min must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaObject}. + */ + @SuppressWarnings("unchecked") + public NumericJsonSchemaObject gt(Number min) { + + Assert.notNull(min, "Min must not be null"); + + Bound upper = this.range != null ? this.range.getUpperBound() : Bound.unbounded(); + return within(Range.of(createBound(min, false), upper)); + } + + /** + * Set {@literal minimum} to given {@code min} value and {@literal exclusiveMinimum} to {@literal false}. + * + * @param min must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaObject}. + */ + @SuppressWarnings("unchecked") + public NumericJsonSchemaObject gte(Number min) { + + Assert.notNull(min, "Min must not be null"); + + Bound upper = this.range != null ? this.range.getUpperBound() : Bound.unbounded(); + return within(Range.of(createBound(min, true), upper)); + } + + /** + * Set {@literal maximum} to given {@code max} value and {@literal exclusiveMaximum} to {@literal true}. + * + * @param max must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaObject}. + */ + @SuppressWarnings("unchecked") + public NumericJsonSchemaObject lt(Number max) { + + Assert.notNull(max, "Max must not be null"); + + Bound lower = this.range != null ? this.range.getLowerBound() : Bound.unbounded(); + return within(Range.of(lower, createBound(max, false))); + } + + /** + * Set {@literal maximum} to given {@code max} value and {@literal exclusiveMaximum} to {@literal false}. + * + * @param max must not be {@literal null}. + * @return new instance of {@link NumericJsonSchemaObject}. + */ + @SuppressWarnings("unchecked") + public NumericJsonSchemaObject lte(Number max) { + + Assert.notNull(max, "Max must not be null"); + + Bound lower = this.range != null ? this.range.getLowerBound() : Bound.unbounded(); + return within(Range.of(lower, createBound(max, true))); + } + + @Override + public NumericJsonSchemaObject possibleValues(Collection possibleValues) { + return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public NumericJsonSchemaObject allOf(Collection allOf) { + return newInstance(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public NumericJsonSchemaObject anyOf(Collection anyOf) { + return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public NumericJsonSchemaObject oneOf(Collection oneOf) { + return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public NumericJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public NumericJsonSchemaObject description(String description) { + return newInstance(description, generateDescription, restrictions); + } + + @Override + public NumericJsonSchemaObject generatedDescription() { + return newInstance(description, true, restrictions); + } + + @Override + public Document toDocument() { + + Document doc = new Document(super.toDocument()); + + if (multipleOf != null) { + doc.append("multipleOf", multipleOf); + } + + if (range != null) { + + if (range.getLowerBound().isBounded()) { + + range.getLowerBound().getValue().ifPresent(it -> doc.append("minimum", it)); + if (!range.getLowerBound().isInclusive()) { + doc.append("exclusiveMinimum", true); + } + } + + if (range.getUpperBound().isBounded()) { + + range.getUpperBound().getValue().ifPresent(it -> doc.append("maximum", it)); + if (!range.getUpperBound().isInclusive()) { + doc.append("exclusiveMaximum", true); + } + } + } + + return doc; + } + + private NumericJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, + Restrictions restrictions) { + + NumericJsonSchemaObject newInstance = new NumericJsonSchemaObject(types, description, generateDescription, + restrictions); + + newInstance.multipleOf = this.multipleOf; + newInstance.range = this.range; + + return newInstance; + + } + + private static Bound createBound(Number number, boolean inclusive) { + + if (number instanceof Long longValue) { + return inclusive ? Bound.inclusive(longValue) : Bound.exclusive(longValue); + } + if (number instanceof Double doubleValue) { + return inclusive ? Bound.inclusive(doubleValue) : Bound.exclusive(doubleValue); + } + if (number instanceof Float floatValue) { + return inclusive ? Bound.inclusive(floatValue) : Bound.exclusive(floatValue); + } + if (number instanceof Integer integerValue) { + return inclusive ? Bound.inclusive(integerValue) : Bound.exclusive(integerValue); + } + if (number instanceof BigDecimal bigDecimalValue) { + return inclusive ? Bound.inclusive(bigDecimalValue) : Bound.exclusive(bigDecimalValue); + } + + throw new IllegalArgumentException("Unsupported numeric value"); + } + + private static Set validateTypes(Set types) { + + types.forEach(type -> { + Assert.isTrue(NUMERIC_TYPES.contains(type), + () -> String.format("%s is not a valid numeric type; Expected one of %s", type, NUMERIC_TYPES)); + }); + + return types; + } + + @Override + protected String generateDescription() { + + String description = "Must be a numeric value"; + + if (multipleOf != null) { + description += String.format(" multiple of %s", multipleOf); + } + if (range != null) { + description += String.format(" within range %s", range); + } + + return description + "."; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'string'} schema elements.
                    + * Provides programmatic access to schema specifics like {@literal minLength, maxLength, pattern,...} via a fluent API + * producing immutable {@link JsonSchemaObject schema objects}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class StringJsonSchemaObject extends TypedJsonSchemaObject { + + @Nullable Range length; + @Nullable String pattern; + + StringJsonSchemaObject() { + this(null, false, null); + } + + private StringJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.stringType(), description, generateDescription, restrictions); + } + + /** + * Define the valid length range ({@literal minLength} and {@literal maxLength}) for a valid field. + * + * @param range must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaObject}. + */ + public StringJsonSchemaObject length(Range range) { + + Assert.notNull(range, "Range must not be null"); + + StringJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.length = range; + + return newInstance; + } + + /** + * Define the valid length range ({@literal minLength}) for a valid field. + * + * @param length + * @return new instance of {@link StringJsonSchemaObject}. + */ + public StringJsonSchemaObject minLength(int length) { + + Bound upper = this.length != null ? this.length.getUpperBound() : Bound.unbounded(); + return length(Range.of(Bound.inclusive(length), upper)); + } + + /** + * Define the valid length range ({@literal maxLength}) for a valid field. + * + * @param length + * @return new instance of {@link StringJsonSchemaObject}. + */ + public StringJsonSchemaObject maxLength(int length) { + + Bound lower = this.length != null ? this.length.getLowerBound() : Bound.unbounded(); + return length(Range.of(lower, Bound.inclusive(length))); + } + + /** + * Define the regex pattern to validate field values against. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link StringJsonSchemaObject}. + */ + public StringJsonSchemaObject matching(String pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + StringJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.pattern = pattern; + + return newInstance; + } + + @Override + public StringJsonSchemaObject possibleValues(Collection possibleValues) { + return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public StringJsonSchemaObject allOf(Collection allOf) { + return newInstance(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public StringJsonSchemaObject anyOf(Collection anyOf) { + return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public StringJsonSchemaObject oneOf(Collection oneOf) { + return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public StringJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public StringJsonSchemaObject description(String description) { + return newInstance(description, generateDescription, restrictions); + } + + @Override + public StringJsonSchemaObject generatedDescription() { + return newInstance(description, true, restrictions); + } + + @Override + public Document toDocument() { + + Document doc = new Document(super.toDocument()); + + if (length != null) { + + length.getLowerBound().getValue().ifPresent(it -> doc.append("minLength", it)); + length.getUpperBound().getValue().ifPresent(it -> doc.append("maxLength", it)); + } + + if (StringUtils.hasText(pattern)) { + doc.append("pattern", pattern); + } + + return doc; + } + + private StringJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, + Restrictions restrictions) { + + StringJsonSchemaObject newInstance = new StringJsonSchemaObject(description, generateDescription, restrictions); + + newInstance.length = this.length; + newInstance.pattern = this.pattern; + + return newInstance; + } + + @Override + protected String generateDescription() { + + String description = "Must be a string"; + + if (length != null) { + description += String.format(" with length %s", length); + } + if (pattern != null) { + description += String.format(" matching %s", pattern); + } + + return description + "."; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'array'} schema elements.
                    + * Provides programmatic access to schema specifics like {@literal range, minItems, maxItems,...} via a fluent API + * producing immutable {@link JsonSchemaObject schema objects}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ + public static class ArrayJsonSchemaObject extends TypedJsonSchemaObject { + + private @Nullable Boolean uniqueItems; + private @Nullable Boolean additionalItems; + private @Nullable Range range; + private Collection items = Collections.emptyList(); + + ArrayJsonSchemaObject() { + this(null, false, null); + } + + private ArrayJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Collections.singleton(Type.arrayType()), description, generateDescription, restrictions); + } + + /** + * Define the whether the array must contain unique items. + * + * @param uniqueItems + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject uniqueItems(boolean uniqueItems) { + + ArrayJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.uniqueItems = uniqueItems; + + return newInstance; + } + + /** + * Define the {@literal minItems} and {@literal maxItems} via the given {@link Range}.
                    + * In-/Exclusions via {@link Bound#isInclusive() range bounds} are not taken into account. + * + * @param range must not be {@literal null}. Consider {@link Range#unbounded()} instead. + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject range(Range range) { + + ArrayJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.range = range; + + return newInstance; + } + + /** + * Define the {@literal maxItems}. + * + * @param count the allowed minimal number of array items. + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject minItems(int count) { + + Bound upper = this.range != null ? this.range.getUpperBound() : Bound.unbounded(); + return range(Range.of(Bound.inclusive(count), upper)); + } + + /** + * Define the {@literal maxItems}. + * + * @param count the allowed maximal number of array items. + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject maxItems(int count) { + + Bound lower = this.range != null ? this.range.getLowerBound() : Bound.unbounded(); + return range(Range.of(lower, Bound.inclusive(count))); + } + + /** + * Define the {@code items} allowed in the array. + * + * @param items the allowed items in the array. + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject items(Collection items) { + + ArrayJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.items = new ArrayList<>(items); + + return newInstance; + } + + /** + * If set to {@literal false}, no additional items besides {@link #items(Collection)} are allowed. + * + * @param additionalItemsAllowed {@literal true} to allow additional items in the array, {@literal false} otherwise. + * @return new instance of {@link ArrayJsonSchemaObject}. + */ + public ArrayJsonSchemaObject additionalItems(boolean additionalItemsAllowed) { + + ArrayJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); + newInstance.additionalItems = additionalItemsAllowed; + + return newInstance; + } + + @Override + public ArrayJsonSchemaObject possibleValues(Collection possibleValues) { + return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public ArrayJsonSchemaObject allOf(Collection allOf) { + return newInstance(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public ArrayJsonSchemaObject anyOf(Collection anyOf) { + return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public ArrayJsonSchemaObject oneOf(Collection oneOf) { + return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public ArrayJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public ArrayJsonSchemaObject description(String description) { + return newInstance(description, generateDescription, restrictions); + } + + @Override + public ArrayJsonSchemaObject generatedDescription() { + return newInstance(description, true, restrictions); + } + + @Override + public Document toDocument() { + + Document doc = new Document(super.toDocument()); + + if (!CollectionUtils.isEmpty(items)) { + doc.append("items", items.size() == 1 ? items.iterator().next().toDocument() + : items.stream().map(JsonSchemaObject::toDocument).collect(Collectors.toList())); + } + + if (range != null) { + + range.getLowerBound().getValue().ifPresent(it -> doc.append("minItems", it)); + range.getUpperBound().getValue().ifPresent(it -> doc.append("maxItems", it)); + } + + if (ObjectUtils.nullSafeEquals(uniqueItems, Boolean.TRUE)) { + doc.append("uniqueItems", true); + } + + if (additionalItems != null) { + doc.append("additionalItems", additionalItems); + } + + return doc; + } + + private ArrayJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, + Restrictions restrictions) { + + ArrayJsonSchemaObject newInstance = new ArrayJsonSchemaObject(description, generateDescription, restrictions); + + newInstance.uniqueItems = this.uniqueItems; + newInstance.range = this.range; + newInstance.items = this.items; + newInstance.additionalItems = this.additionalItems; + + return newInstance; + } + + @Override + protected String generateDescription() { + + String description = "Must be an array"; + + if (ObjectUtils.nullSafeEquals(uniqueItems, Boolean.TRUE)) { + description += " of unique values"; + } + + if (ObjectUtils.nullSafeEquals(additionalItems, Boolean.TRUE)) { + description += " with additional items"; + } + + if (ObjectUtils.nullSafeEquals(additionalItems, Boolean.FALSE)) { + description += " with no additional items"; + } + + if (range != null) { + description += String.format(" having size %s", range); + } + + if (!ObjectUtils.isEmpty(items)) { + description += String.format(" with items %s", StringUtils.collectionToDelimitedString( + items.stream().map(JsonSchemaObject::toDocument).collect(Collectors.toList()), ", ")); + } + + return description + "."; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'boolean'} schema elements.
                    + * Provides programmatic access to schema specifics via a fluent API producing immutable {@link JsonSchemaObject + * schema objects}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ + public static class BooleanJsonSchemaObject extends TypedJsonSchemaObject { + + BooleanJsonSchemaObject() { + this(null, false, null); + } + + private BooleanJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.booleanType(), description, generateDescription, restrictions); + } + + @Override + public BooleanJsonSchemaObject possibleValues(Collection possibleValues) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public BooleanJsonSchemaObject allOf(Collection allOf) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public BooleanJsonSchemaObject anyOf(Collection anyOf) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public BooleanJsonSchemaObject oneOf(Collection oneOf) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public BooleanJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public BooleanJsonSchemaObject description(String description) { + return new BooleanJsonSchemaObject(description, generateDescription, restrictions); + } + + @Override + public BooleanJsonSchemaObject generatedDescription() { + return new BooleanJsonSchemaObject(description, true, restrictions); + } + + @Override + protected String generateDescription() { + return "Must be a boolean"; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'null'} schema elements.
                    + * Provides programmatic access to schema specifics via a fluent API producing immutable {@link JsonSchemaObject + * schema objects}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ + static class NullJsonSchemaObject extends TypedJsonSchemaObject { + + NullJsonSchemaObject() { + this(null, false, null); + } + + private NullJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.nullType(), description, generateDescription, restrictions); + } + + @Override + public NullJsonSchemaObject possibleValues(Collection possibleValues) { + return new NullJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public NullJsonSchemaObject allOf(Collection allOf) { + return new NullJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public NullJsonSchemaObject anyOf(Collection anyOf) { + return new NullJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public NullJsonSchemaObject oneOf(Collection oneOf) { + return new NullJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public NullJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new NullJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public NullJsonSchemaObject description(String description) { + return new NullJsonSchemaObject(description, generateDescription, restrictions); + } + + @Override + public NullJsonSchemaObject generatedDescription() { + return new NullJsonSchemaObject(description, true, restrictions); + } + + @Override + protected String generateDescription() { + return "Must be null"; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'date'} schema elements.
                    + * Provides programmatic access to schema specifics via a fluent API producing immutable {@link JsonSchemaObject + * schema objects}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static class DateJsonSchemaObject extends TypedJsonSchemaObject { + + DateJsonSchemaObject() { + this(null, false, null); + } + + private DateJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.dateType(), description, generateDescription, restrictions); + } + + @Override + public DateJsonSchemaObject possibleValues(Collection possibleValues) { + return new DateJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); + } + + @Override + public DateJsonSchemaObject allOf(Collection allOf) { + return new DateJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public DateJsonSchemaObject anyOf(Collection anyOf) { + return new DateJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public DateJsonSchemaObject oneOf(Collection oneOf) { + return new DateJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public DateJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new DateJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public DateJsonSchemaObject description(String description) { + return new DateJsonSchemaObject(description, generateDescription, restrictions); + } + + @Override + public DateJsonSchemaObject generatedDescription() { + return new DateJsonSchemaObject(description, true, restrictions); + } + + @Override + protected String generateDescription() { + return "Must be a date"; + } + } + + /** + * {@link JsonSchemaObject} implementation of {@code type : 'timestamp'} schema elements.
                    + * Provides programmatic access to schema specifics via a fluent API producing immutable {@link JsonSchemaObject + * schema objects}. + * + * @author Mark Paluch + * @since 2.1 + */ + static class TimestampJsonSchemaObject extends TypedJsonSchemaObject { + + TimestampJsonSchemaObject() { + this(null, false, null); + } + + private TimestampJsonSchemaObject(@Nullable String description, boolean generateDescription, + @Nullable Restrictions restrictions) { + super(Type.timestampType(), description, generateDescription, restrictions); + } + + @Override + public TimestampJsonSchemaObject possibleValues(Collection possibleValues) { + return new TimestampJsonSchemaObject(description, generateDescription, + restrictions.possibleValues(possibleValues)); + } + + @Override + public TimestampJsonSchemaObject allOf(Collection allOf) { + return new TimestampJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); + } + + @Override + public TimestampJsonSchemaObject anyOf(Collection anyOf) { + return new TimestampJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); + } + + @Override + public TimestampJsonSchemaObject oneOf(Collection oneOf) { + return new TimestampJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); + } + + @Override + public TimestampJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new TimestampJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); + } + + @Override + public TimestampJsonSchemaObject description(String description) { + return new TimestampJsonSchemaObject(description, generateDescription, restrictions); + } + + @Override + public TimestampJsonSchemaObject generatedDescription() { + return new TimestampJsonSchemaObject(description, true, restrictions); + } + + @Override + protected String generateDescription() { + return "Must be a timestamp"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java new file mode 100644 index 0000000000..54ca29e0e3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java @@ -0,0 +1,292 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Common base for {@link JsonSchemaObject} with shared types and {@link JsonSchemaObject#toDocument()} implementation. + * Schema objects are immutable. Calling methods to configure properties creates a new object instance. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class UntypedJsonSchemaObject implements JsonSchemaObject { + + protected final Restrictions restrictions; + protected final @Nullable String description; + protected final boolean generateDescription; + + UntypedJsonSchemaObject(@Nullable Restrictions restrictions, @Nullable String description, + boolean generateDescription) { + + this.description = description; + this.restrictions = restrictions != null ? restrictions : Restrictions.empty(); + this.generateDescription = generateDescription; + } + + /** + * Create a new instance of {@link UntypedJsonSchemaObject}. + * + * @return the new {@link UntypedJsonSchemaObject}. + */ + public static UntypedJsonSchemaObject newInstance() { + return new UntypedJsonSchemaObject(null, null, false); + } + + @Override + public Set getTypes() { + return Collections.emptySet(); + } + + /** + * Set the {@literal description}. + * + * @param description must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject description(String description) { + return new UntypedJsonSchemaObject(restrictions, description, generateDescription); + } + + /** + * Auto generate the {@literal description} if not explicitly set. + * + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject generatedDescription() { + return new UntypedJsonSchemaObject(restrictions, description, true); + } + + /** + * {@literal enum}erates all possible values of the field. + * + * @param possibleValues must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject possibleValues(Collection possibleValues) { + return new UntypedJsonSchemaObject(restrictions.possibleValues(possibleValues), description, generateDescription); + } + + /** + * The field value must match all specified schemas. + * + * @param allOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject allOf(Collection allOf) { + return new UntypedJsonSchemaObject(restrictions.allOf(allOf), description, generateDescription); + } + + /** + * The field value must match at least one of the specified schemas. + * + * @param anyOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject anyOf(Collection anyOf) { + return new UntypedJsonSchemaObject(restrictions.anyOf(anyOf), description, generateDescription); + } + + /** + * The field value must match exactly one of the specified schemas. + * + * @param oneOf must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject oneOf(Collection oneOf) { + return new UntypedJsonSchemaObject(restrictions.oneOf(oneOf), description, generateDescription); + } + + /** + * The field value must not match the specified schemas. + * + * @param notMatch must not be {@literal null}. + * @return new instance of {@link TypedJsonSchemaObject}. + */ + public UntypedJsonSchemaObject notMatch(JsonSchemaObject notMatch) { + return new UntypedJsonSchemaObject(restrictions.notMatch(notMatch), description, generateDescription); + } + + /** + * Create the JSON schema complying {@link Document} representation. This includes {@literal type}, + * {@literal description} and the fields of {@link Restrictions#toDocument()} if set. + */ + @Override + public Document toDocument() { + + Document document = new Document(); + + getOrCreateDescription().ifPresent(val -> document.append("description", val)); + + document.putAll(restrictions.toDocument()); + + return document; + } + + private Optional getOrCreateDescription() { + + if (description != null) { + return description.isEmpty() ? Optional.empty() : Optional.of(description); + } + + return generateDescription ? Optional.ofNullable(generateDescription()) : Optional.empty(); + } + + /** + * Customization hook for creating description out of defined values.
                    + * Called by {@link #toDocument()} when no explicit {@link #description} is set. + * + * @return can be {@literal null}. + */ + @Nullable + protected String generateDescription() { + return null; + } + + /** + * {@link Restrictions} encapsulates common JSON schema restrictions like {@literal enum}, {@literal allOf}, … that + * are not tied to a specific type. + * + * @author Christoph Strobl + * @since 2.1 + */ + static class Restrictions { + + private final Collection possibleValues; + private final Collection allOf; + private final Collection anyOf; + private final Collection oneOf; + private final @Nullable JsonSchemaObject notMatch; + + Restrictions(Collection possibleValues, Collection allOf, + Collection anyOf, Collection oneOf, JsonSchemaObject notMatch) { + + this.possibleValues = possibleValues; + this.allOf = allOf; + this.anyOf = anyOf; + this.oneOf = oneOf; + this.notMatch = notMatch; + } + + /** + * @return new empty {@link Restrictions}. + */ + static Restrictions empty() { + + return new Restrictions(Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), + Collections.emptySet(), null); + } + + /** + * @param possibleValues must not be {@literal null}. + * @return + */ + Restrictions possibleValues(Collection possibleValues) { + + Assert.notNull(possibleValues, "PossibleValues must not be null"); + return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); + } + + /** + * @param allOf must not be {@literal null}. + * @return + */ + Restrictions allOf(Collection allOf) { + + Assert.notNull(allOf, "AllOf must not be null"); + return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); + } + + /** + * @param anyOf must not be {@literal null}. + * @return + */ + Restrictions anyOf(Collection anyOf) { + + Assert.notNull(anyOf, "AnyOf must not be null"); + return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); + } + + /** + * @param oneOf must not be {@literal null}. + * @return + */ + Restrictions oneOf(Collection oneOf) { + + Assert.notNull(oneOf, "OneOf must not be null"); + return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); + } + + /** + * @param notMatch must not be {@literal null}. + * @return + */ + Restrictions notMatch(JsonSchemaObject notMatch) { + + Assert.notNull(notMatch, "NotMatch must not be null"); + return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); + } + + /** + * Create the JSON schema complying {@link Document} representation. This includes {@literal enum}, + * {@literal allOf}, {@literal anyOf}, {@literal oneOf}, {@literal notMatch} if set. + * + * @return never {@literal null} + */ + Document toDocument() { + + Document document = new Document(); + + if (!CollectionUtils.isEmpty(possibleValues)) { + document.append("enum", possibleValues); + } + + if (!CollectionUtils.isEmpty(allOf)) { + document.append("allOf", render(allOf)); + } + + if (!CollectionUtils.isEmpty(anyOf)) { + document.append("anyOf", render(anyOf)); + } + + if (!CollectionUtils.isEmpty(oneOf)) { + document.append("oneOf", render(oneOf)); + } + + if (notMatch != null) { + document.append("not", notMatch.toDocument()); + } + + return document; + } + + private static List render(Collection objects) { + return objects.stream().map(JsonSchemaObject::toDocument).collect(Collectors.toList()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/package-info.java new file mode 100644 index 0000000000..380d92af09 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/package-info.java @@ -0,0 +1,6 @@ +/** + * MongoDB-specific JSON schema implementation classes. + */ +@org.springframework.lang.NonNullApi +@org.springframework.lang.NonNullFields +package org.springframework.data.mongodb.core.schema; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java index 2f5d77bf84..d443d4bcea 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,29 +19,31 @@ /** * Value object for MongoDB JavaScript functions implementation that can be saved or directly executed. - * + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public class ExecutableMongoScript { private final String code; /** * Creates new {@link ExecutableMongoScript}. - * + * * @param code must not be {@literal null} or empty. */ public ExecutableMongoScript(String code) { - Assert.hasText(code, "Code must not be null or empty!"); + Assert.hasText(code, "Code must not be null or empty"); this.code = code; } /** * Returns the actual script code. - * + * * @return will never be {@literal null} or empty. */ public String getCode() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java index 3165a8659e..c344a07cae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,20 +20,22 @@ /** * An {@link ExecutableMongoScript} assigned to a name that allows calling the function by its {@literal name} once it - * has been saved to the {@link com.mongodb.DB} instance. - * + * has been saved to the {@link com.mongodb.client.MongoDatabase} instance. + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public class NamedMongoScript { private final @Id String name; private final ExecutableMongoScript script; /** - * Creates new {@link NamedMongoScript} that can be saved to the {@link com.mongodb.DB} instance. - * + * Creates new {@link NamedMongoScript} that can be saved to the {@link com.mongodb.client.MongoDatabase} instance. + * * @param name must not be {@literal null} or empty. * @param rawScript the {@link String} representation of the {@literal JavaScript} function. Must not be * {@literal null} or empty. @@ -44,14 +46,14 @@ public NamedMongoScript(String name, String rawScript) { /** * Creates new {@link NamedMongoScript}. - * + * * @param name must not be {@literal null} or empty. * @param script must not be {@literal null}. */ public NamedMongoScript(String name, ExecutableMongoScript script) { - Assert.hasText(name, "Name must not be null or empty!"); - Assert.notNull(script, "ExecutableMongoScript must not be null!"); + Assert.hasText(name, "Name must not be null or empty"); + Assert.notNull(script, "ExecutableMongoScript must not be null"); this.name = name; this.script = script; @@ -59,7 +61,7 @@ public NamedMongoScript(String name, ExecutableMongoScript script) { /** * Returns the actual script code. - * + * * @return will never be {@literal null}. */ public String getCode() { @@ -68,7 +70,7 @@ public String getCode() { /** * Returns the underlying {@link ExecutableMongoScript}. - * + * * @return will never be {@literal null}. */ public ExecutableMongoScript getScript() { @@ -77,7 +79,7 @@ public ExecutableMongoScript getScript() { /** * Returns the name of the script. - * + * * @return will never be {@literal null} or empty. */ public String getName() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/package-info.java new file mode 100644 index 0000000000..34eb8ea890 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/package-info.java @@ -0,0 +1,8 @@ +/** + * Abstraction classes javascript function execution within MongoDB Server. + * + * @since 1.7 + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.script; + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java index b323b9cf3b..b4550ee8de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,12 +23,16 @@ import org.springframework.expression.spel.ast.Literal; import org.springframework.expression.spel.ast.MethodReference; import org.springframework.expression.spel.ast.Operator; +import org.springframework.expression.spel.ast.OperatorNot; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * A value object for nodes in an expression. Allows iterating ove potentially available child {@link ExpressionNode}s. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ public class ExpressionNode implements Iterable { @@ -39,14 +43,14 @@ public class ExpressionNode implements Iterable { /** * Creates a new {@link ExpressionNode} from the given {@link SpelNode} and {@link ExpressionState}. - * + * * @param node must not be {@literal null}. * @param state must not be {@literal null}. */ protected ExpressionNode(SpelNode node, ExpressionState state) { - Assert.notNull(node, "SpelNode must not be null!"); - Assert.notNull(state, "ExpressionState must not be null!"); + Assert.notNull(node, "SpelNode must not be null"); + Assert.notNull(state, "ExpressionState must not be null"); this.node = node; this.state = state; @@ -55,7 +59,7 @@ protected ExpressionNode(SpelNode node, ExpressionState state) { /** * Factory method to create {@link ExpressionNode}'s according to the given {@link SpelNode} and * {@link ExpressionState}. - * + * * @param node * @param state must not be {@literal null}. * @return an {@link ExpressionNode} for the given {@link SpelNode} or {@literal null} if {@literal null} was given @@ -63,20 +67,20 @@ protected ExpressionNode(SpelNode node, ExpressionState state) { */ public static ExpressionNode from(SpelNode node, ExpressionState state) { - if (node == null) { - return null; + if (node instanceof Operator operator) { + return new OperatorNode(operator, state); } - if (node instanceof Operator) { - return new OperatorNode((Operator) node, state); + if (node instanceof MethodReference methodReference) { + return new MethodReferenceNode(methodReference, state); } - if (node instanceof MethodReference) { - return new MethodReferenceNode((MethodReference) node, state); + if (node instanceof Literal literal) { + return new LiteralNode(literal, state); } - if (node instanceof Literal) { - return new LiteralNode((Literal) node, state); + if (node instanceof OperatorNot operatorNot) { + return new NotOperatorNode(operatorNot, state); } return new ExpressionNode(node, state); @@ -84,7 +88,7 @@ public static ExpressionNode from(SpelNode node, ExpressionState state) { /** * Returns the name of the {@link ExpressionNode}. - * + * * @return */ public String getName() { @@ -93,38 +97,48 @@ public String getName() { /** * Returns whether the current {@link ExpressionNode} is backed by the given type. - * + * * @param type must not be {@literal null}. * @return */ public boolean isOfType(Class type) { - Assert.notNull(type, "Type must not be empty!"); + Assert.notNull(type, "Type must not be empty"); return type.isAssignableFrom(node.getClass()); } /** * Returns whether the given {@link ExpressionNode} is representing the same backing node type as the current one. - * + * * @param node * @return */ - boolean isOfSameTypeAs(ExpressionNode node) { - return node == null ? false : this.node.getClass().equals(node.node.getClass()); + boolean isOfSameTypeAs(@Nullable ExpressionNode node) { + return node != null && this.node.getClass().equals(node.node.getClass()); } /** * Returns whether the {@link ExpressionNode} is a mathematical operation. - * + * * @return */ public boolean isMathematicalOperation() { return false; } + /** + * Returns whether the {@link ExpressionNode} is a logical conjunction operation like {@code &&, ||}. + * + * @return + * @since 1.10 + */ + public boolean isLogicalOperator() { + return false; + } + /** * Returns whether the {@link ExpressionNode} is a literal. - * + * * @return */ public boolean isLiteral() { @@ -133,16 +147,17 @@ public boolean isLiteral() { /** * Returns the value of the current node. - * + * * @return */ + @Nullable public Object getValue() { return node.getValue(state); } /** * Returns whether the current node has child nodes. - * + * * @return */ public boolean hasChildren() { @@ -151,31 +166,31 @@ public boolean hasChildren() { /** * Returns the child {@link ExpressionNode} with the given index. - * + * * @param index must not be negative. * @return */ public ExpressionNode getChild(int index) { - Assert.isTrue(index >= 0); + Assert.isTrue(index >= 0, "Index must be greater or equal to zero"); return from(node.getChild(index), state); } /** * Returns whether the {@link ExpressionNode} has a first child node that is not of the given type. - * + * * @param type must not be {@literal null}. * @return */ public boolean hasfirstChildNotOfType(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); return hasChildren() && !node.getChild(0).getClass().equals(type); } /** * Creates a new {@link ExpressionNode} from the given {@link SpelNode}. - * + * * @param node * @return */ @@ -183,10 +198,6 @@ protected ExpressionNode from(SpelNode node) { return from(node, state); } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java index 50a8a2dd97..8869f51e09 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,38 @@ */ package org.springframework.data.mongodb.core.spel; -import org.springframework.util.Assert; +import java.util.List; -import com.mongodb.BasicDBList; -import com.mongodb.DBObject; +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * The context for an {@link ExpressionNode} transformation. - * + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ public class ExpressionTransformationContextSupport { private final T currentNode; - private final ExpressionNode parentNode; - private final DBObject previousOperationObject; + private final @Nullable ExpressionNode parentNode; + private final @Nullable Document previousOperationObject; /** * Creates a new {@link ExpressionTransformationContextSupport} for the given {@link ExpressionNode}s and an optional * previous operation. - * + * * @param currentNode must not be {@literal null}. - * @param parentNode - * @param previousOperationObject + * @param parentNode may be {@literal null}. + * @param previousOperationObject may be {@literal null}. */ - public ExpressionTransformationContextSupport(T currentNode, ExpressionNode parentNode, - DBObject previousOperationObject) { + public ExpressionTransformationContextSupport(T currentNode, @Nullable ExpressionNode parentNode, + @Nullable Document previousOperationObject) { - Assert.notNull(currentNode, "currentNode must not be null!"); + Assert.notNull(currentNode, "currentNode must not be null"); this.currentNode = currentNode; this.parentNode = parentNode; @@ -52,7 +55,7 @@ public ExpressionTransformationContextSupport(T currentNode, ExpressionNode pare /** * Returns the current {@link ExpressionNode}. - * + * * @return */ public T getCurrentNode() { @@ -61,29 +64,31 @@ public T getCurrentNode() { /** * Returns the parent {@link ExpressionNode} or {@literal null} if none available. - * + * * @return */ + @Nullable public ExpressionNode getParentNode() { return parentNode; } /** - * Returns the previously accumulated operaton object or {@literal null} if none available. Rather than manually + * Returns the previously accumulated operation object or {@literal null} if none available. Rather than manually * adding stuff to the object prefer using {@link #addToPreviousOrReturn(Object)} to transparently do if one is * present. - * + * * @see #hasPreviousOperation() * @see #addToPreviousOrReturn(Object) * @return */ - public DBObject getPreviousOperationObject() { + @Nullable + public Document getPreviousOperationObject() { return previousOperationObject; } /** * Returns whether a previous operation is present. - * + * * @return */ public boolean hasPreviousOperation() { @@ -92,27 +97,30 @@ public boolean hasPreviousOperation() { /** * Returns whether the parent node is of the same operation as the current node. - * + * * @return */ public boolean parentIsSameOperation() { - return parentNode == null ? false : currentNode.isOfSameTypeAs(parentNode); + return parentNode != null && currentNode.isOfSameTypeAs(parentNode); } /** * Adds the given value to the previous operation and returns it. - * + * * @param value * @return */ - public DBObject addToPreviousOperation(Object value) { + public Document addToPreviousOperation(Object value) { + + Assert.state(previousOperationObject != null, "No previous operation available"); + extractArgumentListFrom(previousOperationObject).add(value); return previousOperationObject; } /** * Adds the given value to the previous operation if one is present or returns the value to add as is. - * + * * @param value * @return */ @@ -120,7 +128,7 @@ public Object addToPreviousOrReturn(Object value) { return hasPreviousOperation() ? addToPreviousOperation(value) : value; } - private BasicDBList extractArgumentListFrom(DBObject context) { - return (BasicDBList) context.get(context.keySet().iterator().next()); + private List extractArgumentListFrom(Document context) { + return (List) context.get(context.keySet().iterator().next()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java index a19239095e..512f753042 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,16 +16,16 @@ package org.springframework.data.mongodb.core.spel; /** - * SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an + * SPI interface to implement components that can transform an {@link ExpressionTransformationContextSupport} into an * object. - * + * * @author Oliver Gierke */ public interface ExpressionTransformer> { /** * Transforms the given {@link ExpressionTransformationContextSupport} into an Object. - * + * * @param context will never be {@literal null}. * @return */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java index 68c53860f3..030ef0d055 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,10 @@ */ package org.springframework.data.mongodb.core.spel; +import java.util.Set; + import org.springframework.expression.spel.ExpressionState; +import org.springframework.expression.spel.ast.BooleanLiteral; import org.springframework.expression.spel.ast.FloatLiteral; import org.springframework.expression.spel.ast.IntLiteral; import org.springframework.expression.spel.ast.Literal; @@ -23,19 +26,24 @@ import org.springframework.expression.spel.ast.NullLiteral; import org.springframework.expression.spel.ast.RealLiteral; import org.springframework.expression.spel.ast.StringLiteral; +import org.springframework.lang.Nullable; /** * A node representing a literal in an expression. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ public class LiteralNode extends ExpressionNode { + private static final Set> SUPPORTED_LITERAL_TYPES = Set.of(BooleanLiteral.class, FloatLiteral.class, + IntLiteral.class, LongLiteral.class, NullLiteral.class, RealLiteral.class, StringLiteral.class); private final Literal literal; /** * Creates a new {@link LiteralNode} from the given {@link Literal} and {@link ExpressionState}. - * + * * @param node must not be {@literal null}. * @param state must not be {@literal null}. */ @@ -46,27 +54,21 @@ public class LiteralNode extends ExpressionNode { /** * Returns whether the given {@link ExpressionNode} is a unary minus. - * + * * @param parent * @return */ - public boolean isUnaryMinus(ExpressionNode parent) { + public boolean isUnaryMinus(@Nullable ExpressionNode parent) { - if (!(parent instanceof OperatorNode)) { + if (!(parent instanceof OperatorNode operatorNode)) { return false; } - OperatorNode operator = (OperatorNode) parent; - return operator.isUnaryMinus() && operator.getRight() == null; + return operatorNode.isUnaryMinus(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionNode#isLiteral() - */ @Override public boolean isLiteral() { - return literal instanceof FloatLiteral || literal instanceof RealLiteral || literal instanceof IntLiteral - || literal instanceof LongLiteral || literal instanceof StringLiteral || literal instanceof NullLiteral; + return SUPPORTED_LITERAL_TYPES.contains(literal.getClass()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a32c49c3c1..5f1b0c4309 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,44 +15,249 @@ */ package org.springframework.data.mongodb.core.spel; +import static org.springframework.data.mongodb.core.spel.MethodReferenceNode.AggregationMethodReference.*; + import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.springframework.expression.spel.ExpressionState; import org.springframework.expression.spel.ast.MethodReference; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * An {@link ExpressionNode} representing a method reference. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Sebastien Gerard + * @author Christoph Strobl + * @author Mark Paluch + * @author Julia Lee */ public class MethodReferenceNode extends ExpressionNode { - private static final Map FUNCTIONS; + private static final Map FUNCTIONS; static { - Map map = new HashMap(); - - map.put("concat", "$concat"); // Concatenates two strings. - map.put("strcasecmp", "$strcasecmp"); // Compares two strings and returns an integer that reflects the comparison. - map.put("substr", "$substr"); // Takes a string and returns portion of that string. - map.put("toLower", "$toLower"); // Converts a string to lowercase. - map.put("toUpper", "$toUpper"); // Converts a string to uppercase. - - map.put("dayOfYear", "$dayOfYear"); // Converts a date to a number between 1 and 366. - map.put("dayOfMonth", "$dayOfMonth"); // Converts a date to a number between 1 and 31. - map.put("dayOfWeek", "$dayOfWeek"); // Converts a date to a number between 1 and 7. - map.put("year", "$year"); // Converts a date to the full year. - map.put("month", "$month"); // Converts a date into a number between 1 and 12. - map.put("week", "$week"); // Converts a date into a number between 0 and 53 - map.put("hour", "$hour"); // Converts a date into a number between 0 and 23. - map.put("minute", "$minute"); // Converts a date into a number between 0 and 59. - map.put("second", "$second"); // Converts a date into a number between 0 and 59. May be 60 to account for leap - // seconds. - map.put("millisecond", "$millisecond"); // Returns the millisecond portion of a date as an integer between 0 and + Map map = new HashMap(); + + // BOOLEAN OPERATORS + map.put("and", arrayArgRef().forOperator("$and")); + map.put("or", arrayArgRef().forOperator("$or")); + map.put("not", arrayArgRef().forOperator("$not")); + + // SET OPERATORS + map.put("setEquals", arrayArgRef().forOperator("$setEquals")); + map.put("setIntersection", arrayArgRef().forOperator("$setIntersection")); + map.put("setUnion", arrayArgRef().forOperator("$setUnion")); + map.put("setDifference", arrayArgRef().forOperator("$setDifference")); + // 2nd. + map.put("setIsSubset", arrayArgRef().forOperator("$setIsSubset")); + map.put("anyElementTrue", arrayArgRef().forOperator("$anyElementTrue")); + map.put("allElementsTrue", arrayArgRef().forOperator("$allElementsTrue")); + + // COMPARISON OPERATORS + map.put("cmp", arrayArgRef().forOperator("$cmp")); + map.put("eq", arrayArgRef().forOperator("$eq")); + map.put("gt", arrayArgRef().forOperator("$gt")); + map.put("gte", arrayArgRef().forOperator("$gte")); + map.put("lt", arrayArgRef().forOperator("$lt")); + map.put("lte", arrayArgRef().forOperator("$lte")); + map.put("ne", arrayArgRef().forOperator("$ne")); + + // DOCUMENT OPERATORS + map.put("rank", emptyRef().forOperator("$rank")); + map.put("denseRank", emptyRef().forOperator("$denseRank")); + map.put("documentNumber", emptyRef().forOperator("$documentNumber")); + map.put("shift", mapArgRef().forOperator("$shift").mappingParametersTo("output", "by", "default")); + + // ARITHMETIC OPERATORS + map.put("abs", singleArgRef().forOperator("$abs")); + map.put("add", arrayArgRef().forOperator("$add")); + map.put("ceil", singleArgRef().forOperator("$ceil")); + map.put("divide", arrayArgRef().forOperator("$divide")); + map.put("exp", singleArgRef().forOperator("$exp")); + map.put("floor", singleArgRef().forOperator("$floor")); + map.put("ln", singleArgRef().forOperator("$ln")); + map.put("log", arrayArgRef().forOperator("$log")); + map.put("log10", singleArgRef().forOperator("$log10")); + map.put("mod", arrayArgRef().forOperator("$mod")); + map.put("multiply", arrayArgRef().forOperator("$multiply")); + map.put("pow", arrayArgRef().forOperator("$pow")); + map.put("sqrt", singleArgRef().forOperator("$sqrt")); + map.put("subtract", arrayArgRef().forOperator("$subtract")); + map.put("trunc", singleArgRef().forOperator("$trunc")); + map.put("round", arrayArgRef().forOperator("$round")); + map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); + map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); + map.put("sin", singleArgRef().forOperator("$sin")); + map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("asin", singleArgRef().forOperator("$asin")); + map.put("asinh", singleArgRef().forOperator("$asinh")); + map.put("cos", singleArgRef().forOperator("$cos")); + map.put("cosh", singleArgRef().forOperator("$cosh")); + map.put("acos", singleArgRef().forOperator("$acos")); + map.put("acosh", singleArgRef().forOperator("$acosh")); + map.put("tan", singleArgRef().forOperator("$tan")); + map.put("tanh", singleArgRef().forOperator("$tanh")); + map.put("rand", emptyRef().forOperator("$rand")); + map.put("atan", singleArgRef().forOperator("$atan")); + map.put("atan2", arrayArgRef().forOperator("$atan2")); + map.put("atanh", singleArgRef().forOperator("$atanh")); + + // STRING OPERATORS + map.put("concat", arrayArgRef().forOperator("$concat")); + map.put("strcasecmp", arrayArgRef().forOperator("$strcasecmp")); + map.put("substr", arrayArgRef().forOperator("$substr")); + map.put("toLower", singleArgRef().forOperator("$toLower")); + map.put("toUpper", singleArgRef().forOperator("$toUpper")); + map.put("indexOfBytes", arrayArgRef().forOperator("$indexOfBytes")); + map.put("indexOfCP", arrayArgRef().forOperator("$indexOfCP")); + map.put("split", arrayArgRef().forOperator("$split")); + map.put("strLenBytes", singleArgRef().forOperator("$strLenBytes")); + map.put("strLenCP", singleArgRef().forOperator("$strLenCP")); + map.put("substrCP", arrayArgRef().forOperator("$substrCP")); + map.put("trim", mapArgRef().forOperator("$trim").mappingParametersTo("input", "chars")); + map.put("ltrim", mapArgRef().forOperator("$ltrim").mappingParametersTo("input", "chars")); + map.put("rtrim", mapArgRef().forOperator("$rtrim").mappingParametersTo("input", "chars")); + map.put("regexFind", mapArgRef().forOperator("$regexFind").mappingParametersTo("input", "regex", "options")); + map.put("regexFindAll", mapArgRef().forOperator("$regexFindAll").mappingParametersTo("input", "regex", "options")); + map.put("regexMatch", mapArgRef().forOperator("$regexMatch").mappingParametersTo("input", "regex", "options")); + map.put("replaceOne", mapArgRef().forOperator("$replaceOne").mappingParametersTo("input", "find", "replacement")); + map.put("replaceAll", mapArgRef().forOperator("$replaceAll").mappingParametersTo("input", "find", "replacement")); + + // TEXT SEARCH OPERATORS + map.put("meta", singleArgRef().forOperator("$meta")); + + // ARRAY OPERATORS + map.put("arrayElemAt", arrayArgRef().forOperator("$arrayElemAt")); + map.put("concatArrays", arrayArgRef().forOperator("$concatArrays")); + map.put("filter", mapArgRef().forOperator("$filter") // + .mappingParametersTo("input", "as", "cond")); + map.put("first", singleArgRef().forOperator("$first")); + map.put("isArray", singleArgRef().forOperator("$isArray")); + map.put("last", singleArgRef().forOperator("$last")); + map.put("size", singleArgRef().forOperator("$size")); + map.put("slice", arrayArgRef().forOperator("$slice")); + map.put("sortArray", mapArgRef().forOperator("$sortArray").mappingParametersTo("input", "sortBy")); + map.put("reverseArray", singleArgRef().forOperator("$reverseArray")); + map.put("reduce", mapArgRef().forOperator("$reduce").mappingParametersTo("input", "initialValue", "in")); + map.put("zip", mapArgRef().forOperator("$zip").mappingParametersTo("inputs", "useLongestLength", "defaults")); + map.put("in", arrayArgRef().forOperator("$in")); + map.put("arrayToObject", singleArgRef().forOperator("$arrayToObject")); + map.put("indexOfArray", arrayArgRef().forOperator("$indexOfArray")); + map.put("range", arrayArgRef().forOperator("$range")); + + // VARIABLE OPERATORS + map.put("map", mapArgRef().forOperator("$map") // + .mappingParametersTo("input", "as", "in")); + map.put("let", mapArgRef().forOperator("$let").mappingParametersTo("vars", "in")); + + // LITERAL OPERATORS + map.put("literal", singleArgRef().forOperator("$literal")); + + // DATE OPERATORS + map.put("dateAdd", + mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateSubtract", + mapArgRef().forOperator("$dateSubtract").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateDiff", mapArgRef().forOperator("$dateDiff").mappingParametersTo("startDate", "endDate", "unit", + "timezone", "startOfWeek")); + map.put("dateTrunc", mapArgRef().forOperator("$dateTrunc").mappingParametersTo("date", "unit", "binSize", + "startOfWeek", "timezone")); + map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); + map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); + map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); + map.put("year", singleArgRef().forOperator("$year")); + map.put("month", singleArgRef().forOperator("$month")); + map.put("week", singleArgRef().forOperator("$week")); + map.put("hour", singleArgRef().forOperator("$hour")); + map.put("minute", singleArgRef().forOperator("$minute")); + map.put("second", singleArgRef().forOperator("$second")); + map.put("millisecond", singleArgRef().forOperator("$millisecond")); + map.put("dateToString", mapArgRef().forOperator("$dateToString") // + .mappingParametersTo("format", "date")); + map.put("dateFromString", mapArgRef().forOperator("$dateFromString") // + .mappingParametersTo("dateString", "format", "timezone", "onError", "onNull")); + map.put("dateFromParts", mapArgRef().forOperator("$dateFromParts").mappingParametersTo("year", "month", "day", + "hour", "minute", "second", "millisecond", "timezone")); + map.put("isoDateFromParts", mapArgRef().forOperator("$dateFromParts").mappingParametersTo("isoWeekYear", "isoWeek", + "isoDayOfWeek", "hour", "minute", "second", "millisecond", "timezone")); + map.put("dateToParts", mapArgRef().forOperator("$dateToParts") // + .mappingParametersTo("date", "timezone", "iso8601")); + map.put("isoDayOfWeek", singleArgRef().forOperator("$isoDayOfWeek")); + map.put("isoWeek", singleArgRef().forOperator("$isoWeek")); + map.put("isoWeekYear", singleArgRef().forOperator("$isoWeekYear")); + map.put("tsIncrement", singleArgRef().forOperator("$tsIncrement")); + map.put("tsSecond", singleArgRef().forOperator("$tsSecond")); + + // CONDITIONAL OPERATORS + map.put("cond", mapArgRef().forOperator("$cond") // + .mappingParametersTo("if", "then", "else")); + map.put("ifNull", arrayArgRef().forOperator("$ifNull")); + + // GROUP OPERATORS + map.put("sum", arrayArgRef().forOperator("$sum")); + map.put("avg", arrayArgRef().forOperator("$avg")); + map.put("first", singleArgRef().forOperator("$first")); + map.put("last", singleArgRef().forOperator("$last")); + map.put("max", arrayArgRef().forOperator("$max")); + map.put("min", arrayArgRef().forOperator("$min")); + map.put("push", singleArgRef().forOperator("$push")); + map.put("addToSet", singleArgRef().forOperator("$addToSet")); + map.put("stdDevPop", arrayArgRef().forOperator("$stdDevPop")); + map.put("stdDevSamp", arrayArgRef().forOperator("$stdDevSamp")); + map.put("covariancePop", arrayArgRef().forOperator("$covariancePop")); + map.put("covarianceSamp", arrayArgRef().forOperator("$covarianceSamp")); + map.put("bottom", mapArgRef().forOperator("$bottom") // + .mappingParametersTo("output", "sortBy")); + map.put("bottomN", mapArgRef().forOperator("$bottomN") // + .mappingParametersTo("n", "output", "sortBy")); + map.put("firstN", mapArgRef().forOperator("$firstN") // + .mappingParametersTo("n", "input")); + map.put("lastN", mapArgRef().forOperator("$lastN") // + .mappingParametersTo("n", "input")); + map.put("top", mapArgRef().forOperator("$top") // + .mappingParametersTo("output", "sortBy")); + map.put("topN", mapArgRef().forOperator("$topN") // + .mappingParametersTo("n", "output", "sortBy")); + map.put("maxN", mapArgRef().forOperator("$maxN") // + .mappingParametersTo("n", "input")); + map.put("minN", mapArgRef().forOperator("$minN") // + .mappingParametersTo("n", "input")); + map.put("percentile", mapArgRef().forOperator("$percentile") // + .mappingParametersTo("input", "p", "method")); + map.put("median", mapArgRef().forOperator("$median") // + .mappingParametersTo("input", "method")); + + // TYPE OPERATORS + map.put("type", singleArgRef().forOperator("$type")); + + // OBJECT OPERATORS + map.put("objectToArray", singleArgRef().forOperator("$objectToArray")); + map.put("mergeObjects", arrayArgRef().forOperator("$mergeObjects")); + map.put("getField", mapArgRef().forOperator("$getField").mappingParametersTo("field", "input")); + map.put("setField", mapArgRef().forOperator("$setField").mappingParametersTo("field", "value", "input")); + + // CONVERT OPERATORS + map.put("convert", mapArgRef().forOperator("$convert") // + .mappingParametersTo("input", "to", "onError", "onNull")); + map.put("toBool", singleArgRef().forOperator("$toBool")); + map.put("toDate", singleArgRef().forOperator("$toDate")); + map.put("toDecimal", singleArgRef().forOperator("$toDecimal")); + map.put("toDouble", singleArgRef().forOperator("$toDouble")); + map.put("toInt", singleArgRef().forOperator("$toInt")); + map.put("toLong", singleArgRef().forOperator("$toLong")); + map.put("toObjectId", singleArgRef().forOperator("$toObjectId")); + map.put("toString", singleArgRef().forOperator("$toString")); + map.put("degreesToRadians", singleArgRef().forOperator("$degreesToRadians")); + + // expression operators + map.put("locf", singleArgRef().forOperator("$locf")); FUNCTIONS = Collections.unmodifiableMap(map); } @@ -62,14 +267,147 @@ public class MethodReferenceNode extends ExpressionNode { } /** - * Returns the name of the method. - * - * @return + * Return the {@link AggregationMethodReference}. + * + * @return can be {@literal null}. + * @since 1.10 */ - public String getMethodName() { + @Nullable + public AggregationMethodReference getMethodReference() { String name = getName(); String methodName = name.substring(0, name.indexOf('(')); return FUNCTIONS.get(methodName); } + + /** + * @author Christoph Strobl + * @since 1.10 + */ + public static final class AggregationMethodReference { + + private final @Nullable String mongoOperator; + private final @Nullable ArgumentType argumentType; + private final @Nullable String[] argumentMap; + + /** + * Creates new {@link AggregationMethodReference}. + * + * @param mongoOperator can be {@literal null}. + * @param argumentType can be {@literal null}. + * @param argumentMap can be {@literal null}. + */ + private AggregationMethodReference(@Nullable String mongoOperator, @Nullable ArgumentType argumentType, + @Nullable String[] argumentMap) { + + this.mongoOperator = mongoOperator; + this.argumentType = argumentType; + this.argumentMap = argumentMap; + } + + /** + * Get the MongoDB specific operator. + * + * @return can be {@literal null}. + */ + @Nullable + public String getMongoOperator() { + return this.mongoOperator; + } + + /** + * Get the {@link ArgumentType} used by the MongoDB. + * + * @return never {@literal null}. + */ + @Nullable + public ArgumentType getArgumentType() { + return this.argumentType; + } + + /** + * Get the property names in order order of appearance in resulting operation. + * + * @return never {@literal null}. + */ + public String[] getArgumentMap() { + return argumentMap != null ? argumentMap : new String[] {}; + } + + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#SINGLE} argument. + * + * @return never {@literal null}. + */ + static AggregationMethodReference singleArgRef() { + return new AggregationMethodReference(null, ArgumentType.SINGLE, null); + } + + /** + * Create a new {@link AggregationMethodReference} for an {@link ArgumentType#ARRAY} argument. + * + * @return never {@literal null}. + */ + static AggregationMethodReference arrayArgRef() { + return new AggregationMethodReference(null, ArgumentType.ARRAY, null); + } + + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#MAP} argument. + * + * @return never {@literal null}. + */ + static AggregationMethodReference mapArgRef() { + return new AggregationMethodReference(null, ArgumentType.MAP, null); + } + + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#EMPTY_DOCUMENT} argument. + * + * @return never {@literal null}. + * @since 3.3 + */ + static AggregationMethodReference emptyRef() { + return new AggregationMethodReference(null, ArgumentType.EMPTY_DOCUMENT, null); + } + + /** + * Create a new {@link AggregationMethodReference} for a given {@literal aggregationExpressionOperator} reusing + * previously set arguments. + * + * @param aggregationExpressionOperator should not be {@literal null}. + * @return never {@literal null}. + */ + AggregationMethodReference forOperator(String aggregationExpressionOperator) { + return new AggregationMethodReference(aggregationExpressionOperator, argumentType, argumentMap); + } + + /** + * Create a new {@link AggregationMethodReference} for mapping actual parameters within the AST to the given + * {@literal aggregationExpressionProperties} reusing previously set arguments.
                    + * NOTE: Can only be applied to {@link AggregationMethodReference} of type + * {@link ArgumentType#MAP}. + * + * @param aggregationExpressionProperties should not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException + */ + AggregationMethodReference mappingParametersTo(String... aggregationExpressionProperties) { + + Assert.isTrue(ObjectUtils.nullSafeEquals(argumentType, ArgumentType.MAP), + "Parameter mapping can only be applied to AggregationMethodReference with MAPPED ArgumentType"); + return new AggregationMethodReference(mongoOperator, argumentType, aggregationExpressionProperties); + } + + /** + * The actual argument type to use when mapping parameters to MongoDB specific format. + * + * @author Christoph Strobl + * @since 1.10 + */ + public enum ArgumentType { + SINGLE, ARRAY, MAP, EMPTY_DOCUMENT + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java new file mode 100644 index 0000000000..ea0608225f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java @@ -0,0 +1,44 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.spel; + +import org.springframework.expression.spel.ExpressionState; +import org.springframework.expression.spel.ast.OperatorNot; + +/** + * @author Christoph Strobl + * @since 1.10 + */ +public class NotOperatorNode extends ExpressionNode { + + private final OperatorNot operatorNode; + + /** + * Creates a new {@link ExpressionNode} from the given {@link OperatorNot} and {@link ExpressionState}. + * + * @param node must not be {@literal null}. + * @param state must not be {@literal null}. + */ + protected NotOperatorNode(OperatorNot node, ExpressionState state) { + + super(node, state); + this.operatorNode = node; + } + + public String getMongoOperator() { + return "$not"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java index 55a11bd7ec..7d242e777e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,37 @@ */ package org.springframework.data.mongodb.core.spel; -import java.util.Collections; -import java.util.HashMap; import java.util.Map; +import java.util.Set; import org.springframework.expression.spel.ExpressionState; -import org.springframework.expression.spel.ast.OpDivide; -import org.springframework.expression.spel.ast.OpMinus; -import org.springframework.expression.spel.ast.OpModulus; -import org.springframework.expression.spel.ast.OpMultiply; -import org.springframework.expression.spel.ast.OpPlus; -import org.springframework.expression.spel.ast.Operator; +import org.springframework.expression.spel.ast.*; /** * An {@link ExpressionNode} representing an operator. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch */ public class OperatorNode extends ExpressionNode { - private static final Map OPERATORS; - - static { - - Map map = new HashMap(6); + private static final Map OPERATORS = Map.ofEntries(Map.entry("+", "$add"), + Map.entry("-", "$subtract"), Map.entry("*", "$multiply"), Map.entry("/", "$divide"), Map.entry("%", "$mod"), + Map.entry("^", "$pow"), Map.entry("==", "$eq"), Map.entry("!=", "$ne"), Map.entry(">", "$gt"), + Map.entry(">=", "$gte"), Map.entry("<", "$lt"), Map.entry("<=", "$lte"), Map.entry("and", "$and"), + Map.entry("or", "$or")); - map.put("+", "$add"); - map.put("-", "$subtract"); - map.put("*", "$multiply"); - map.put("/", "$divide"); - map.put("%", "$mod"); - - OPERATORS = Collections.unmodifiableMap(map); - } + private static final Set SUPPORTED_MATH_OPERATORS = Set.of(OpMinus.class, OpPlus.class, OpMultiply.class, + OpDivide.class, OpModulus.class, OperatorPower.class, OpNE.class, OpEQ.class, OpGT.class, OpGE.class, OpLT.class, + OpLE.class); private final Operator operator; /** * Creates a new {@link OperatorNode} from the given {@link Operator} and {@link ExpressionState}. - * + * * @param node must not be {@literal null}. * @param state must not be {@literal null}. */ @@ -63,37 +54,44 @@ public class OperatorNode extends ExpressionNode { this.operator = node; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionNode#isMathematicalOperation() - */ @Override public boolean isMathematicalOperation() { - return operator instanceof OpMinus || operator instanceof OpPlus || operator instanceof OpMultiply - || operator instanceof OpDivide || operator instanceof OpModulus; + return SUPPORTED_MATH_OPERATORS.contains(operator.getClass()); + } + + @Override + public boolean isLogicalOperator() { + return operator instanceof OpOr || operator instanceof OpAnd; } /** * Returns whether the operator is unary. - * + * * @return */ public boolean isUnaryOperator() { - return operator.getRightOperand() == null; + return operator.getChildCount() == 1; } /** * Returns the Mongo expression of the operator. - * + * * @return */ public String getMongoOperator() { + + if (!OPERATORS.containsKey(operator.getOperatorName())) { + throw new IllegalArgumentException(String.format( + "Unknown operator name; Cannot translate %s into its MongoDB aggregation function representation", + operator.getOperatorName())); + } + return OPERATORS.get(operator.getOperatorName()); } /** * Returns whether the operator is a unary minus, e.g. -1. - * + * * @return */ public boolean isUnaryMinus() { @@ -102,7 +100,7 @@ public boolean isUnaryMinus() { /** * Returns the left operand as {@link ExpressionNode}. - * + * * @return */ public ExpressionNode getLeft() { @@ -111,7 +109,7 @@ public ExpressionNode getLeft() { /** * Returns the right operand as {@link ExpressionNode}. - * + * * @return */ public ExpressionNode getRight() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/package-info.java index f703ec9e48..fbfa2ae78b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/package-info.java @@ -1,5 +1,8 @@ /** * Support classes to transform SpEL expressions into MongoDB expressions. + * * @since 1.4 */ -package org.springframework.data.mongodb.core.spel; \ No newline at end of file +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.spel; + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java new file mode 100644 index 0000000000..c923cbb884 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * {@link GranularityDefinition Granularities} available for Time Series data. + * + * @author Christoph Strobl + * @since 3.3 + */ +public enum Granularity implements GranularityDefinition { + + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java new file mode 100644 index 0000000000..0e714470db --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * The Granularity of time series data that is closest to the time span between incoming measurements. + * + * @author Christoph Strobl + * @since 3.3 + */ +public interface GranularityDefinition { + + String name(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java new file mode 100644 index 0000000000..779ed4ec9f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java @@ -0,0 +1,84 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.validation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * {@link Validator} implementation based on {@link CriteriaDefinition query expressions}. + * + * @author Andreas Zink + * @author Christoph Strobl + * @since 2.1 + * @see Criteria + * @see Schema Validation + */ +class CriteriaValidator implements Validator { + + private final CriteriaDefinition criteria; + + private CriteriaValidator(CriteriaDefinition criteria) { + this.criteria = criteria; + } + + /** + * Creates a new {@link Validator} object, which is basically setup of query operators, based on a + * {@link CriteriaDefinition} instance. + * + * @param criteria the criteria to build the {@code validator} from. Must not be {@literal null}. + * @return new instance of {@link CriteriaValidator}. + * @throws IllegalArgumentException when criteria is {@literal null}. + */ + static CriteriaValidator of(CriteriaDefinition criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return new CriteriaValidator(criteria); + } + + @Override + public Document toDocument() { + return criteria.getCriteriaObject(); + } + + @Override + public String toString() { + return SerializationUtils.serializeToJsonSafely(toDocument()); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + CriteriaValidator that = (CriteriaValidator) o; + + return ObjectUtils.nullSafeEquals(criteria, that.criteria); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(criteria); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java new file mode 100644 index 0000000000..5e27b99ad6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.validation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Most trivial {@link Validator} implementation using plain {@link Document} to describe the desired document structure + * which can be either a {@code $jsonSchema} or query expression. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see Schema Validation + */ +class DocumentValidator implements Validator { + + private final Document validatorObject; + + private DocumentValidator(Document validatorObject) { + this.validatorObject = validatorObject; + } + + /** + * Create new {@link DocumentValidator} defining validation rules via a plain {@link Document}. + * + * @param validatorObject must not be {@literal null}. + * @throws IllegalArgumentException if validatorObject is {@literal null}. + */ + static DocumentValidator of(Document validatorObject) { + + Assert.notNull(validatorObject, "ValidatorObject must not be null"); + + return new DocumentValidator(new Document(validatorObject)); + } + + @Override + public Document toDocument() { + return new Document(validatorObject); + } + + @Override + public String toString() { + return SerializationUtils.serializeToJsonSafely(validatorObject); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + DocumentValidator that = (DocumentValidator) o; + + return ObjectUtils.nullSafeEquals(validatorObject, that.validatorObject); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(validatorObject); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java new file mode 100644 index 0000000000..61ef8c5b4f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.validation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * {@link Validator} implementation based on {@link MongoJsonSchema JSON Schema}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see Schema Validation + */ +class JsonSchemaValidator implements Validator { + + private final MongoJsonSchema schema; + + private JsonSchemaValidator(MongoJsonSchema schema) { + this.schema = schema; + } + + /** + * Create new {@link JsonSchemaValidator} defining validation rules via {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @throws IllegalArgumentException if schema is {@literal null}. + */ + static JsonSchemaValidator of(MongoJsonSchema schema) { + + Assert.notNull(schema, "Schema must not be null"); + + return new JsonSchemaValidator(schema); + } + + @Override + public Document toDocument() { + return schema.toDocument(); + } + + @Override + public String toString() { + return SerializationUtils.serializeToJsonSafely(toDocument()); + } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + JsonSchemaValidator that = (JsonSchemaValidator) o; + + return ObjectUtils.nullSafeEquals(schema, that.schema); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(schema); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java new file mode 100644 index 0000000000..9261642c70 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.validation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.util.Assert; + +/** + * Provides a {@code validator} object to be used for collection validation via + * {@link org.springframework.data.mongodb.core.CollectionOptions.ValidationOptions}. + * + * @author Andreas Zink + * @author Christoph Strobl + * @since 2.1 + * @see MongoDB Collection Options + */ +public interface Validator { + + /** + * Get the {@link Document} containing the validation specific rules. The document may contain fields that may require + * type and/or field name mapping. + * + * @return a MongoDB {@code validator} {@link Document}. Never {@literal null}. + */ + Document toDocument(); + + /** + * Creates a basic {@link Validator} checking documents against a given set of rules. + * + * @param validationRules must not be {@literal null}. + * @return new instance of {@link Validator}. + * @throws IllegalArgumentException if validationRules is {@literal null}. + */ + static Validator document(Document validationRules) { + + Assert.notNull(validationRules, "ValidationRules must not be null"); + return DocumentValidator.of(validationRules); + } + + /** + * Creates a new {@link Validator} checking documents against the structure defined in {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link Validator}. + * @throws IllegalArgumentException if schema is {@literal null}. + */ + static Validator schema(MongoJsonSchema schema) { + + Assert.notNull(schema, "Schema must not be null"); + return JsonSchemaValidator.of(schema); + } + + /** + * Creates a new {@link Validator} checking documents against a given query structure expressed by + * {@link CriteriaDefinition}.
                    + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link Validator}. + * @throws IllegalArgumentException if criteria is {@literal null}. + */ + static Validator criteria(CriteriaDefinition criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + return CriteriaValidator.of(criteria); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/package-info.java new file mode 100644 index 0000000000..002a4ee1fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/package-info.java @@ -0,0 +1,6 @@ +/** + * MongoDB schema validation specifics. + */ +@org.springframework.lang.NonNullApi +@org.springframework.lang.NonNullFields +package org.springframework.data.mongodb.core.validation; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java index de20bc35db..8f61be8659 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,9 @@ /** * Value object to abstract Ant paths. - * + * * @author Oliver Gierke + * @author Mark Paluch */ class AntPath { @@ -34,17 +35,19 @@ class AntPath { /** * Creates a new {@link AntPath} from the given path. - * + * * @param path must not be {@literal null}. */ public AntPath(String path) { - Assert.notNull(path); + + Assert.notNull(path, "Path must not be null"); + this.path = path; } /** * Returns whether the path is a pattern. - * + * * @return */ public boolean isPattern() { @@ -59,7 +62,7 @@ private static String stripPrefix(String path) { /** * Returns the regular expression equivalent of this Ant path. - * + * * @return */ public String toRegex() { @@ -95,10 +98,6 @@ private static String quote(String s, int start, int end) { return Pattern.quote(s.substring(start, end)); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return path; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java index 6a92fabf7b..54010a7c65 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,18 +16,20 @@ package org.springframework.data.mongodb.gridfs; import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.lang.Nullable; /** * GridFs-specific helper class to define {@link Criteria}s. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ public class GridFsCriteria extends Criteria { /** * Creates a new {@link GridFsCriteria} for the given key. - * - * @param key + * + * @param key must not be {@literal null}. */ public GridFsCriteria(String key) { super(key); @@ -35,8 +37,8 @@ public GridFsCriteria(String key) { /** * Creates a {@link GridFsCriteria} for restrictions on the file's metadata. - * - * @return + * + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereMetaData() { return new GridFsCriteria("metadata"); @@ -44,19 +46,20 @@ public static GridFsCriteria whereMetaData() { /** * Creates a {@link GridFsCriteria} for restrictions on a single file's metadata item. - * - * @param metadataKey - * @return + * + * @param metadataKey can be {@literal null}. + * @return new instance of {@link GridFsCriteria}. */ - public static GridFsCriteria whereMetaData(String metadataKey) { + public static GridFsCriteria whereMetaData(@Nullable String metadataKey) { + String extension = metadataKey == null ? "" : "." + metadataKey; return new GridFsCriteria(String.format("metadata%s", extension)); } /** * Creates a {@link GridFsCriteria} for restrictions on the file's name. - * - * @return + * + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereFilename() { return new GridFsCriteria("filename"); @@ -64,10 +67,10 @@ public static GridFsCriteria whereFilename() { /** * Creates a {@link GridFsCriteria} for restrictions on the file's content type. - * - * @return + * + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereContentType() { - return new GridFsCriteria("contentType"); + return new GridFsCriteria("metadata.".concat(GridFsResource.CONTENT_TYPE_FIELD)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java new file mode 100644 index 0000000000..f73c0c943f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * A common interface when dealing with GridFs items using Spring Data. + * + * @author Christoph Strobl + * @since 3.0 + */ +public interface GridFsObject { + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
                    + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null} depending on the implementation. + */ + @Nullable + ID getFileId(); + + /** + * The filename. + * + * @return + */ + String getFilename(); + + /** + * The actual file content. + * + * @return + * @throws IllegalStateException if the content cannot be obtained. + */ + CONTENT getContent(); + + /** + * Additional information like file metadata (eg. contentType). + * + * @return never {@literal null}. + */ + Options getOptions(); + + /** + * Additional, context relevant information. + * + * @author Christoph Strobl + */ + class Options { + + private final Document metadata; + private final int chunkSize; + + private Options(Document metadata, int chunkSize) { + + this.metadata = metadata; + this.chunkSize = chunkSize; + } + + /** + * Static factory to create empty options. + * + * @return new instance of {@link Options}. + */ + public static Options none() { + return new Options(new Document(), -1); + } + + /** + * Static factory method to create {@link Options} with given content type. + * + * @param contentType + * @return new instance of {@link Options}. + */ + public static Options typed(String contentType) { + return new Options(new Document("_contentType", contentType), -1); + } + + /** + * Static factory method to create {@link Options} by extracting information from the given {@link GridFSFile}. + * + * @param gridFSFile can be {@literal null}, returns {@link #none()} in that case. + * @return new instance of {@link Options}. + */ + public static Options from(@Nullable GridFSFile gridFSFile) { + return gridFSFile != null ? new Options(gridFSFile.getMetadata(), gridFSFile.getChunkSize()) : none(); + } + + /** + * Set the associated content type. + * + * @param contentType must not be {@literal null}. + * @return new instance of {@link Options}. + */ + public Options contentType(String contentType) { + + Options target = new Options(new Document(metadata), chunkSize); + target.metadata.put("_contentType", contentType); + return target; + } + + /** + * @param metadata + * @return new instance of {@link Options}. + */ + public Options metadata(Document metadata) { + return new Options(metadata, chunkSize); + } + + /** + * @param chunkSize the file chunk size to use. + * @return new instance of {@link Options}. + */ + public Options chunkSize(int chunkSize) { + return new Options(metadata, chunkSize); + } + + /** + * @return never {@literal null}. + */ + public Document getMetadata() { + return metadata; + } + + /** + * @return the chunk size to use. + */ + public int getChunkSize() { + return chunkSize; + } + + /** + * @return {@literal null} if not set. + */ + @Nullable + String getContentType() { + return (String) metadata.get("_contentType"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java index 3c94ef21ee..bf5a1d86e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,146 +16,205 @@ package org.springframework.data.mongodb.gridfs; import java.io.InputStream; -import java.util.List; +import org.bson.Document; +import org.bson.types.ObjectId; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.gridfs.GridFsUpload.GridFsUploadBuilder; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; -import com.mongodb.DBObject; -import com.mongodb.gridfs.GridFSDBFile; -import com.mongodb.gridfs.GridFSFile; +import com.mongodb.client.gridfs.GridFSFindIterable; /** * Collection of operations to store and read files from MongoDB GridFS. - * + * * @author Oliver Gierke * @author Philipp Schneider * @author Thomas Darimont * @author Martin Baumgartner + * @author Christoph Strobl + * @author Hartmut Lang */ public interface GridFsOperations extends ResourcePatternResolver { /** * Stores the given content into a file with the given name. - * + * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename); + default ObjectId store(InputStream content, String filename) { + return store(content, filename, null, null); + } /** * Stores the given content into a file with the given name. - * + * * @param content must not be {@literal null}. * @param metadata can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, Object metadata); + default ObjectId store(InputStream content, @Nullable Object metadata) { + return store(content, null, metadata); + } /** * Stores the given content into a file with the given name. - * + * * @param content must not be {@literal null}. * @param metadata can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, DBObject metadata); + default ObjectId store(InputStream content, @Nullable Document metadata) { + return store(content, null, metadata); + } /** * Stores the given content into a file with the given name and content type. - * + * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. * @param contentType can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename, String contentType); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType) { + return store(content, filename, contentType, null); + } /** * Stores the given content into a file with the given name using the given metadata. The metadata object will be * marshalled before writing. - * + * * @param content must not be {@literal null}. - * @param filename must not be {@literal null} or empty. + * @param filename can be {@literal null} or empty. * @param metadata can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename, Object metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata) { + return store(content, filename, null, metadata); + } /** * Stores the given content into a file with the given name and content type using the given metadata. The metadata * object will be marshalled before writing. - * + * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. * @param contentType can be {@literal null}. * @param metadata can be {@literal null} - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename, String contentType, Object metadata); + ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata); /** * Stores the given content into a file with the given name using the given metadata. - * + * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. * @param metadata can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename, DBObject metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata) { + return store(content, filename, null, metadata); + } /** * Stores the given content into a file with the given name and content type using the given metadata. - * + * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. - * @param contentType can be {@literal null}. + * @param contentType can be {@literal null}. If not empty, may override content type within {@literal metadata}. * @param metadata can be {@literal null}. - * @return the {@link GridFSFile} just created + * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - GridFSFile store(InputStream content, String filename, String contentType, DBObject metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, + @Nullable Document metadata) { + + GridFsUploadBuilder uploadBuilder = GridFsUpload.fromStream(content); + if (StringUtils.hasText(filename)) { + uploadBuilder.filename(filename); + } + if (!ObjectUtils.isEmpty(metadata)) { + uploadBuilder.metadata(metadata); + } + if (StringUtils.hasText(contentType)) { + uploadBuilder.contentType(contentType); + } + + return store(uploadBuilder.build()); + } + + /** + * Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given + * {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored + * with that id, otherwise the server auto creates a new id.
                    + * + * @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored. + * @param id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile} + * @return the id of the stored file. Either an auto created value or {@link GridFsObject#getFileId()}, but never + * {@literal null}. + * @since 3.0 + */ + T store(GridFsObject upload); /** * Returns all files matching the given query. Note, that currently {@link Sort} criterias defined at the * {@link Query} will not be regarded as MongoDB does not support ordering for GridFS file access. - * - * @see https://jira.mongodb.org/browse/JAVA-431 - * @param query - * @return + * + * @see MongoDB Jira: JAVA-431 + * @param query must not be {@literal null}. + * @return {@link GridFSFindIterable} to obtain results from. Eg. by calling + * {@link GridFSFindIterable#into(java.util.Collection)}. */ - List find(Query query); + GridFSFindIterable find(Query query); /** - * Returns a single file matching the given query or {@literal null} in case no file matches. - * - * @param query - * @return + * Returns a single {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given query or {@literal null} in + * case no file matches. + * + * @param query must not be {@literal null}. + * @return can be {@literal null}. */ - GridFSDBFile findOne(Query query); + @Nullable + com.mongodb.client.gridfs.model.GridFSFile findOne(Query query); /** * Deletes all files matching the given {@link Query}. - * - * @param query + * + * @param query must not be {@literal null}. */ void delete(Query query); /** - * Returns all {@link GridFsResource} with the given file name. - * - * @param filename - * @return the resource if it exists or {@literal null}. + * Returns the {@link GridFsResource} with the given file name. + * + * @param filename must not be {@literal null}. + * @return the resource. Use {@link org.springframework.core.io.Resource#exists()} to check if the returned + * {@link GridFsResource} is actually present. * @see ResourcePatternResolver#getResource(String) */ GridFsResource getResource(String filename); + /** + * Returns the {@link GridFsResource} for a {@link com.mongodb.client.gridfs.model.GridFSFile}. + * + * @param file must not be {@literal null}. + * @return the resource for the file. + * @since 2.1 + */ + GridFsResource getResource(com.mongodb.client.gridfs.model.GridFSFile file); + /** * Returns all {@link GridFsResource}s matching the given file name pattern. - * - * @param filenamePattern - * @return + * + * @param filenamePattern must not be {@literal null}. + * @return an empty array if none found. * @see ResourcePatternResolver#getResources(String) */ GridFsResource[] getResources(String filenamePattern); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java new file mode 100644 index 0000000000..b3d3771f3c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java @@ -0,0 +1,104 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSUploadOptions; + +/** + * Base class offering common tasks like query mapping and {@link GridFSUploadOptions} computation to be shared across + * imperative and reactive implementations. + * + * @author Christoph Strobl + * @since 2.2 + */ +class GridFsOperationsSupport { + + private final QueryMapper queryMapper; + private final MongoConverter converter; + + /** + * @param converter must not be {@literal null}. + */ + GridFsOperationsSupport(MongoConverter converter) { + + Assert.notNull(converter, "MongoConverter must not be null"); + + this.converter = converter; + this.queryMapper = new QueryMapper(converter); + } + + /** + * @param query pass the given query though a {@link QueryMapper} to apply type conversion. + * @return never {@literal null}. + */ + protected Document getMappedQuery(Document query) { + return queryMapper.getMappedObject(query, Optional.empty()); + } + + /** + * Compute the {@link GridFSUploadOptions} to be used from the given {@literal contentType} and {@literal metadata} + * {@link Document}. + * + * @param contentType can be {@literal null}. + * @param metadata can be {@literal null} + * @return never {@literal null}. + */ + protected GridFSUploadOptions computeUploadOptionsFor(@Nullable String contentType, @Nullable Document metadata) { + + Document targetMetadata = new Document(); + + if (StringUtils.hasText(contentType)) { + targetMetadata.put(GridFsResource.CONTENT_TYPE_FIELD, contentType); + } + + if (metadata != null) { + targetMetadata.putAll(metadata); + } + + GridFSUploadOptions options = new GridFSUploadOptions(); + options.metadata(targetMetadata); + + return options; + } + + /** + * Convert a given {@literal value} into a {@link Document}. + * + * @param value can be {@literal null}. + * @return an empty {@link Document} if the source value is {@literal null}. + */ + protected Document toDocument(@Nullable Object value) { + + if (value instanceof Document document) { + return document; + } + + Document document = new Document(); + if (value != null) { + converter.write(value, document); + } + return document; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java index 8a475367d3..0873432977 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,74 +15,188 @@ */ package org.springframework.data.mongodb.gridfs; +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStream; +import java.util.Optional; import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.Resource; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; -import com.mongodb.gridfs.GridFSDBFile; +import com.mongodb.MongoGridFSException; +import com.mongodb.client.gridfs.model.GridFSFile; /** - * {@link GridFSDBFile} based {@link Resource} implementation. - * + * {@link GridFSFile} based {@link Resource} implementation. + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Hartmut Lang + * @author Mark Paluch */ -public class GridFsResource extends InputStreamResource { +public class GridFsResource extends InputStreamResource implements GridFsObject { + + static final String CONTENT_TYPE_FIELD = "_contentType"; + private static final ByteArrayInputStream EMPTY_INPUT_STREAM = new ByteArrayInputStream(new byte[0]); + + private final @Nullable GridFSFile file; + private final String filename; + + /** + * Creates a new, absent {@link GridFsResource}. + * + * @param filename filename of the absent resource. + * @since 2.1 + */ + private GridFsResource(String filename) { + + super(EMPTY_INPUT_STREAM, String.format("GridFs resource [%s]", filename)); - private final GridFSDBFile file; + this.file = null; + this.filename = filename; + } /** - * Creates a new {@link GridFsResource} from the given {@link GridFSDBFile}. - * + * Creates a new {@link GridFsResource} from the given {@link GridFSFile}. + * * @param file must not be {@literal null}. */ - public GridFsResource(GridFSDBFile file) { - super(file.getInputStream()); + public GridFsResource(GridFSFile file) { + this(file, new ByteArrayInputStream(new byte[] {})); + } + + /** + * Creates a new {@link GridFsResource} from the given {@link GridFSFile} and {@link InputStream}. + * + * @param file must not be {@literal null}. + * @param inputStream must not be {@literal null}. + */ + public GridFsResource(GridFSFile file, InputStream inputStream) { + + super(inputStream, String.format("GridFs resource [%s]", file.getFilename())); + this.file = file; + this.filename = file.getFilename(); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#contentLength() + /** + * Obtain an absent {@link GridFsResource}. + * + * @param filename filename of the absent resource, must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 */ + public static GridFsResource absent(String filename) { + + Assert.notNull(filename, "Filename must not be null"); + + return new GridFsResource(filename); + } + + @Override + public InputStream getInputStream() throws IOException, IllegalStateException { + + verifyExists(); + return super.getInputStream(); + } + @Override public long contentLength() throws IOException { - return file.getLength(); + + verifyExists(); + return getGridFSFile().getLength(); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#getFilename() - */ @Override public String getFilename() throws IllegalStateException { - return file.getFilename(); + return this.filename; + } + + @Override + public boolean exists() { + return this.file != null; } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#lastModified() - */ @Override public long lastModified() throws IOException { - return file.getUploadDate().getTime(); + + verifyExists(); + return getGridFSFile().getUploadDate().getTime(); + } + + @Override + public String getDescription() { + return String.format("GridFs resource [%s]", this.getFilename()); } /** * Returns the {@link Resource}'s id. - * - * @return + * + * @return never {@literal null}. + * @throws IllegalStateException if the file does not {@link #exists()}. */ public Object getId() { - return file.getId(); + + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + + return getGridFSFile().getId(); + } + + @Override + public Object getFileId() { + + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + return BsonUtils.toJavaType(getGridFSFile().getId()); + } + + /** + * @return the underlying {@link GridFSFile}. Can be {@literal null} if absent. + * @since 2.2 + */ + @Nullable + public GridFSFile getGridFSFile() { + return this.file; } /** * Returns the {@link Resource}'s content type. - * - * @return + * + * @return never {@literal null}. + * @throws com.mongodb.MongoGridFSException in case no content type declared on {@link GridFSFile#getMetadata()} nor + * provided via {@link GridFSFile}. + * @throws IllegalStateException if the file does not {@link #exists()}. */ public String getContentType() { - return file.getContentType(); + + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + + return Optional.ofNullable(getGridFSFile().getMetadata()).map(it -> it.get(CONTENT_TYPE_FIELD, String.class)) + .orElseThrow(() -> new MongoGridFSException("No contentType data for this GridFS file")); + } + + @Override + public InputStream getContent() { + + try { + return getInputStream(); + } catch (IOException e) { + throw new IllegalStateException("Failed to obtain input stream for " + filename, e); + } + } + + @Override + public Options getOptions() { + return Options.from(getGridFSFile()); + } + + private void verifyExists() throws FileNotFoundException { + + if (!exists()) { + throw new FileNotFoundException(String.format("%s does not exist.", getDescription())); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java index 20781134ec..8187c7dbc3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,214 +21,173 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; +import org.bson.Document; +import org.bson.types.ObjectId; import org.springframework.core.io.support.ResourcePatternResolver; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.DBObject; -import com.mongodb.gridfs.GridFS; -import com.mongodb.gridfs.GridFSDBFile; -import com.mongodb.gridfs.GridFSFile; -import com.mongodb.gridfs.GridFSInputFile; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.gridfs.GridFSBucket; +import com.mongodb.client.gridfs.GridFSBuckets; +import com.mongodb.client.gridfs.GridFSFindIterable; +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.client.gridfs.model.GridFSUploadOptions; /** * {@link GridFsOperations} implementation to store content into MongoDB GridFS. - * + * * @author Oliver Gierke * @author Philipp Schneider * @author Thomas Darimont * @author Martin Baumgartner + * @author Christoph Strobl + * @author Mark Paluch + * @author Hartmut Lang + * @author Niklas Helge Hanft + * @author Denis Zavedeev */ -public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver { +public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOperations, ResourcePatternResolver { - private final MongoDbFactory dbFactory; - private final String bucket; - private final MongoConverter converter; - private final QueryMapper queryMapper; + private final Supplier bucketSupplier; /** - * Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}. - * + * Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}. + *

                    + * Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase() + * MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the + * same Template instance. + * * @param dbFactory must not be {@literal null}. * @param converter must not be {@literal null}. */ - public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter) { + public GridFsTemplate(MongoDatabaseFactory dbFactory, MongoConverter converter) { this(dbFactory, converter, null); } /** - * Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}. - * + * Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}. + *

                    + * Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase() + * MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the + * same Template instance. + * * @param dbFactory must not be {@literal null}. * @param converter must not be {@literal null}. - * @param bucket + * @param bucket can be {@literal null}. */ - public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter, String bucket) { - - Assert.notNull(dbFactory); - Assert.notNull(converter); - - this.dbFactory = dbFactory; - this.converter = converter; - this.bucket = bucket; - - this.queryMapper = new QueryMapper(converter); + public GridFsTemplate(MongoDatabaseFactory dbFactory, MongoConverter converter, @Nullable String bucket) { + this(converter, Lazy.of(() -> getGridFs(dbFactory, bucket))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String) + /** + * Creates a new {@link GridFsTemplate} using the given {@link MongoConverter} and {@link Supplier} providing the + * required {@link GridFSBucket}. + * + * @param converter must not be {@literal null}. + * @param gridFSBucket must not be {@literal null}. + * @since 4.2 */ - public GridFSFile store(InputStream content, String filename) { - return store(content, filename, (Object) null); - } + public GridFsTemplate(MongoConverter converter, Supplier gridFSBucket) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.Object) - */ + super(converter); - @Override - public GridFSFile store(InputStream content, Object metadata) { - return store(content, null, metadata); - } + Assert.notNull(gridFSBucket, "GridFSBucket supplier must not be null"); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, com.mongodb.DBObject) - */ - @Override - public GridFSFile store(InputStream content, DBObject metadata) { - return store(content, null, metadata); + this.bucketSupplier = gridFSBucket; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String) - */ - public GridFSFile store(InputStream content, String filename, String contentType) { - return store(content, filename, contentType, (Object) null); + @Override + public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata) { + return store(content, filename, contentType, toDocument(metadata)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.Object) - */ - public GridFSFile store(InputStream content, String filename, Object metadata) { - return store(content, filename, null, metadata); - } + @Override + @SuppressWarnings("unchecked") + public T store(GridFsObject upload) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String, java.lang.Object) - */ - public GridFSFile store(InputStream content, String filename, String contentType, Object metadata) { + GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(), + upload.getOptions().getMetadata()); - DBObject dbObject = null; + if (upload.getOptions().getChunkSize() > 0) { + uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize()); + } - if (metadata != null) { - dbObject = new BasicDBObject(); - converter.write(metadata, dbObject); + if (upload.getFileId() == null) { + return (T) getGridFs().uploadFromStream(upload.getFilename(), upload.getContent(), uploadOptions); } - return store(content, filename, contentType, dbObject); + getGridFs().uploadFromStream(BsonUtils.simpleToBsonValue(upload.getFileId()), upload.getFilename(), + upload.getContent(), uploadOptions); + return upload.getFileId(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.DBObject) - */ - public GridFSFile store(InputStream content, String filename, DBObject metadata) { - return this.store(content, filename, null, metadata); - } + @Override + public GridFSFindIterable find(Query query) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.DBObject) - */ - public GridFSFile store(InputStream content, String filename, String contentType, DBObject metadata) { + Assert.notNull(query, "Query must not be null"); - Assert.notNull(content); + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); - GridFSInputFile file = getGridFs().createFile(content); + GridFSFindIterable iterable = getGridFs().find(queryObject).sort(sortObject); - if (filename != null) { - file.setFilename(filename); + if (query.getSkip() > 0) { + iterable = iterable.skip(Math.toIntExact(query.getSkip())); } - if (metadata != null) { - file.setMetaData(metadata); + if (query.getLimit() > 0) { + iterable = iterable.limit(query.getLimit()); } - if (contentType != null) { - file.setContentType(contentType); - } + return iterable; + } - file.save(); - return file; + @Override + public GridFSFile findOne(Query query) { + return find(query).first(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#find(com.mongodb.DBObject) - */ - public List find(Query query) { + @Override + public void delete(Query query) { - if (query == null) { - return getGridFs().find((DBObject) null); + for (GridFSFile gridFSFile : find(query)) { + getGridFs().delete(gridFSFile.getId()); } - - DBObject queryObject = getMappedQuery(query.getQueryObject()); - DBObject sortObject = getMappedQuery(query.getSortObject()); - - return getGridFs().find(queryObject, sortObject); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#findOne(com.mongodb.DBObject) - */ - public GridFSDBFile findOne(Query query) { - return getGridFs().findOne(getMappedQuery(query)); + @Override + public ClassLoader getClassLoader() { + return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#delete(org.springframework.data.mongodb.core.query.Query) - */ - public void delete(Query query) { - getGridFs().remove(getMappedQuery(query)); - } + @Override + public GridFsResource getResource(String location) { - /* - * (non-Javadoc) - * @see org.springframework.core.io.ResourceLoader#getClassLoader() - */ - public ClassLoader getClassLoader() { - return dbFactory.getClass().getClassLoader(); + return Optional.ofNullable(findOne(query(whereFilename().is(location)))) // + .map(this::getResource) // + .orElseGet(() -> GridFsResource.absent(location)); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String) - */ - public GridFsResource getResource(String location) { + @Override + public GridFsResource getResource(GridFSFile file) { + + Assert.notNull(file, "GridFSFile must not be null"); - GridFSDBFile file = findOne(query(whereFilename().is(location))); - return file != null ? new GridFsResource(file) : null; + return new GridFsResource(file, getGridFs().openDownloadStream(file.getId())); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.support.ResourcePatternResolver#getResources(java.lang.String) - */ + @Override public GridFsResource[] getResources(String locationPattern) { if (!StringUtils.hasText(locationPattern)) { @@ -239,29 +198,28 @@ public GridFsResource[] getResources(String locationPattern) { if (path.isPattern()) { - List files = find(query(whereFilename().regex(path.toRegex()))); - List resources = new ArrayList(files.size()); + GridFSFindIterable files = find(query(whereFilename().regex(path.toRegex()))); + List resources = new ArrayList<>(); - for (GridFSDBFile file : files) { - resources.add(new GridFsResource(file)); + for (GridFSFile file : files) { + resources.add(getResource(file)); } - return resources.toArray(new GridFsResource[resources.size()]); + return resources.toArray(new GridFsResource[0]); } return new GridFsResource[] { getResource(locationPattern) }; } - private DBObject getMappedQuery(Query query) { - return query == null ? new Query().getQueryObject() : getMappedQuery(query.getQueryObject()); + private GridFSBucket getGridFs() { + return this.bucketSupplier.get(); } - private DBObject getMappedQuery(DBObject query) { - return query == null ? null : queryMapper.getMappedObject(query, null); - } + private static GridFSBucket getGridFs(MongoDatabaseFactory dbFactory, @Nullable String bucket) { + + Assert.notNull(dbFactory, "MongoDatabaseFactory must not be null"); - private GridFS getGridFs() { - DB db = dbFactory.getDb(); - return bucket == null ? new GridFS(db) : new GridFS(db, bucket); + MongoDatabase db = dbFactory.getMongoDatabase(); + return bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java new file mode 100644 index 0000000000..9f8d9a47d2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java @@ -0,0 +1,232 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import java.io.InputStream; +import java.util.function.Supplier; + +import org.bson.Document; +import org.bson.types.ObjectId; + +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Upload descriptor for a GridFS file upload. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class GridFsUpload implements GridFsObject { + + private final @Nullable ID id; + private final Lazy dataStream; + private final String filename; + private final Options options; + + private GridFsUpload(@Nullable ID id, Lazy dataStream, String filename, Options options) { + + Assert.notNull(dataStream, "Data Stream must not be null"); + Assert.notNull(filename, "Filename must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.id = id; + this.dataStream = dataStream; + this.filename = filename; + this.options = options; + } + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
                    + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null}. + * @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId() + */ + @Override + @Nullable + public ID getFileId() { + return id; + } + + @Override + public String getFilename() { + return filename; + } + + @Override + public InputStream getContent() { + return dataStream.orElse(InputStream.nullInputStream()); + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Create a new instance of {@link GridFsUpload} for the given {@link InputStream}. + * + * @param stream must not be {@literal null}. + * @return new instance of {@link GridFsUpload}. + */ + public static GridFsUploadBuilder fromStream(InputStream stream) { + return new GridFsUploadBuilder().content(stream); + } + + /** + * Builder to create {@link GridFsUpload} in a fluent way. + * + * @param the target id type. + */ + public static class GridFsUploadBuilder { + + private Object id; + private Lazy dataStream; + private String filename; + private Options options = Options.none(); + + private GridFsUploadBuilder() {} + + /** + * Define the content of the file to upload. + * + * @param stream the upload content. + * @return this. + */ + public GridFsUploadBuilder content(InputStream stream) { + + Assert.notNull(stream, "InputStream must not be null"); + + return content(() -> stream); + } + + /** + * Define the content of the file to upload. + * + * @param stream the upload content. + * @return this. + */ + public GridFsUploadBuilder content(Supplier stream) { + + Assert.notNull(stream, "InputStream Supplier must not be null"); + + this.dataStream = Lazy.of(stream); + return this; + } + + /** + * Set the id to use. + * + * @param id the id to save the content to. + * @param + * @return this. + */ + public GridFsUploadBuilder id(T1 id) { + + this.id = id; + return (GridFsUploadBuilder) this; + } + + /** + * Set the filename. + * + * @param filename the filename to use. + * @return this. + */ + public GridFsUploadBuilder filename(String filename) { + + this.filename = filename; + return this; + } + + /** + * Set additional file information. + * + * @param options must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder options(Options options) { + + Assert.notNull(options, "Options must not be null"); + + this.options = options; + return this; + } + + /** + * Set the file metadata. + * + * @param metadata must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder metadata(Document metadata) { + + this.options = this.options.metadata(metadata); + return this; + } + + /** + * Set the upload chunk size in bytes. + * + * @param chunkSize use negative number for default. + * @return this. + */ + public GridFsUploadBuilder chunkSize(int chunkSize) { + + this.options = this.options.chunkSize(chunkSize); + return this; + } + + /** + * Set id, filename, metadata and chunk size from given file. + * + * @param gridFSFile must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder gridFsFile(GridFSFile gridFSFile) { + + Assert.notNull(gridFSFile, "GridFSFile must not be null"); + + this.id = gridFSFile.getId(); + this.filename = gridFSFile.getFilename(); + this.options = this.options.metadata(gridFSFile.getMetadata()); + this.options = this.options.chunkSize(gridFSFile.getChunkSize()); + + return this; + } + + /** + * Set the content type. + * + * @param contentType must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder contentType(String contentType) { + + this.options = this.options.contentType(contentType); + return this; + } + + public GridFsUpload build() { + return new GridFsUpload(id, dataStream, filename, options); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java new file mode 100644 index 0000000000..9ee47e0bb9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java @@ -0,0 +1,236 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.gridfs.ReactiveGridFsUpload.ReactiveGridFsUploadBuilder; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Collection of operations to store and read files from MongoDB GridFS using reactive infrastructure. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public interface ReactiveGridFsOperations { + + /** + * Stores the given content into a file with the given name. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, String filename) { + return store(content, filename, (Object) null); + } + + /** + * Stores the given content into a file applying the given metadata. + * + * @param content must not be {@literal null}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable Object metadata) { + return store(content, null, metadata); + } + + /** + * Stores the given content into a file applying the given metadata. + * + * @param content must not be {@literal null}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable Document metadata) { + return store(content, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable String contentType) { + return store(content, filename, contentType, (Object) null); + } + + /** + * Stores the given content into a file with the given name using the given metadata. The metadata object will be + * marshalled before writing. + * + * @param content must not be {@literal null}. + * @param filename can be {@literal null} or empty. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable Object metadata) { + return store(content, filename, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type using the given metadata. The metadata + * object will be marshalled before writing. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. + * @param metadata can be {@literal null} + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata); + + /** + * Stores the given content into a file with the given name using the given metadata. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable Document metadata) { + return store(content, filename, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type using the given metadata. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. If not empty, may override content type within {@literal metadata}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Document metadata) { + + ReactiveGridFsUploadBuilder uploadBuilder = ReactiveGridFsUpload.fromPublisher(content); + + if (StringUtils.hasText(filename)) { + uploadBuilder.filename(filename); + } + if (!ObjectUtils.isEmpty(metadata)) { + uploadBuilder.metadata(metadata); + } + if (StringUtils.hasText(contentType)) { + uploadBuilder.contentType(contentType); + } + + return store(uploadBuilder.build()); + } + + /** + * Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given + * {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored + * with that id, otherwise the server auto creates a new id.
                    + * + * @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored. + * @param id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile} + * @return {@link Mono} emitting the id of the stored file which is either an auto created value or + * {@link GridFsObject#getFileId()}. + * @since 3.0 + */ + Mono store(GridFsObject> upload); + + /** + * Returns a {@link Flux} emitting all files matching the given query.
                    + * NOTE: Currently {@link Sort} criteria defined at the {@link Query} will not be regarded as MongoDB + * does not support ordering for GridFS file access. + * + * @see MongoDB Jira: JAVA-431 + * @param query must not be {@literal null}. + * @return {@link Flux#empty()} if no mach found. + */ + Flux find(Query query); + + /** + * Returns a {@link Mono} emitting a single {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given + * query or {@link Mono#empty()} in case no file matches.
                    + * NOTE: If more than one file matches the given query the resulting {@link Mono} emits an error. If + * you want to obtain the first found file use {@link #findFirst(Query)}. + * + * @param query must not be {@literal null}. + * @return {@link Mono#empty()} if not match found. + */ + Mono findOne(Query query); + + /** + * Returns a {@link Mono} emitting the frist {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given + * query or {@link Mono#empty()} in case no file matches. + * + * @param query must not be {@literal null}. + * @return {@link Mono#empty()} if not match found. + */ + Mono findFirst(Query query); + + /** + * Deletes all files matching the given {@link Query}. + * + * @param query must not be {@literal null}. + * @return a {@link Mono} signalling operation completion. + */ + Mono delete(Query query); + + /** + * Returns a {@link Mono} emitting the {@link ReactiveGridFsResource} with the given file name. + * + * @param filename must not be {@literal null}. + * @return {@link Mono#empty()} if no match found. + */ + Mono getResource(String filename); + + /** + * Returns a {@link Mono} emitting the {@link ReactiveGridFsResource} for a {@link GridFSFile}. + * + * @param file must not be {@literal null}. + * @return {@link Mono#empty()} if no match found. + */ + Mono getResource(GridFSFile file); + + /** + * Returns a {@link Flux} emitting all {@link ReactiveGridFsResource}s matching the given file name pattern. + * + * @param filenamePattern must not be {@literal null}. + * @return {@link Flux#empty()} if no match found. + */ + Flux getResources(String filenamePattern); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java new file mode 100644 index 0000000000..aec7cadef1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java @@ -0,0 +1,221 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.bson.BsonValue; +import org.reactivestreams.Publisher; +import org.springframework.core.io.Resource; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DataBufferUtils; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.reactivestreams.client.gridfs.GridFSDownloadPublisher; + +/** + * Reactive {@link GridFSFile} based {@link Resource} implementation. Note that the {@link #getDownloadStream() content} + * can be consumed only once. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class ReactiveGridFsResource implements GridFsObject> { + + private final AtomicBoolean consumed = new AtomicBoolean(false); + + private final @Nullable Object id; + private final Options options; + private final String filename; + private final @Nullable GridFSDownloadPublisher downloadPublisher; + private final DataBufferFactory dataBufferFactory; + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param filename filename of the absent resource. + * @param downloadPublisher + */ + public ReactiveGridFsResource(String filename, @Nullable GridFSDownloadPublisher downloadPublisher) { + this(null, filename, Options.none(), downloadPublisher); + } + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param id + * @param filename filename of the absent resource. + * @param options + * @param downloadPublisher + * @since 3.0 + */ + public ReactiveGridFsResource(@Nullable Object id, String filename, Options options, + @Nullable GridFSDownloadPublisher downloadPublisher) { + this(id, filename, options, downloadPublisher, new DefaultDataBufferFactory()); + } + + ReactiveGridFsResource(GridFSFile file, @Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) { + this(file.getId(), file.getFilename(), Options.from(file), downloadPublisher, dataBufferFactory); + } + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param id + * @param filename filename of the absent resource. + * @param options + * @param downloadPublisher + * @param dataBufferFactory + * @since 3.0 + */ + ReactiveGridFsResource(@Nullable Object id, String filename, Options options, + @Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) { + + this.id = id; + this.filename = filename; + this.options = options; + this.downloadPublisher = downloadPublisher; + this.dataBufferFactory = dataBufferFactory; + } + + /** + * Obtain an absent {@link ReactiveGridFsResource}. + * + * @param filename filename of the absent resource, must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + public static ReactiveGridFsResource absent(String filename) { + + Assert.notNull(filename, "Filename must not be null"); + return new ReactiveGridFsResource(filename, null); + } + + @Override + public Object getFileId() { + return id instanceof BsonValue bsonValue ? BsonUtils.toJavaType(bsonValue) : id; + } + + /** + * @see org.springframework.core.io.AbstractResource#getFilename() + */ + public String getFilename() throws IllegalStateException { + return this.filename; + } + + /** + * @return the underlying {@link GridFSFile}. Can be {@literal null} if absent. + * @since 2.2 + */ + public Mono getGridFSFile() { + return downloadPublisher != null ? Mono.from(downloadPublisher.getGridFSFile()) : Mono.empty(); + } + + /** + * Obtain the data as {@link InputStream}.
                    + * NOTE: Buffers data in memory. Use {@link #getDownloadStream()} for large files. + * + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Mono getInputStream() throws IllegalStateException { + + return getDownloadStream() // + .transform(DataBufferUtils::join) // + .as(Mono::from) // + .map(DataBuffer::asInputStream); + } + + /** + * Obtain the download stream emitting chunks of data as they come in.
                    + * + * @return {@link Flux#empty()} if the file does not exist. + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Flux getDownloadStream() { + + if (downloadPublisher == null) { + return Flux.empty(); + } + + return createDownloadStream(downloadPublisher); + } + + @Override + public Flux getContent() { + return getDownloadStream(); + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Obtain the download stream emitting chunks of data with given {@code chunkSize} as they come in. + * + * @param chunkSize the preferred number of bytes per emitted {@link DataBuffer}. + * @return {@link Flux#empty()} if the file does not exist. + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Flux getDownloadStream(int chunkSize) { + + if (downloadPublisher == null) { + return Flux.empty(); + } + + return createDownloadStream(downloadPublisher.bufferSizeBytes(chunkSize)); + } + + private Flux createDownloadStream(GridFSDownloadPublisher publisher) { + + return Flux.from(publisher) // + .map(dataBufferFactory::wrap) // + .doOnSubscribe(it -> this.verifyStreamStillAvailable()); + } + + public boolean exists() { + return downloadPublisher != null; + } + + private void verifyStreamStillAvailable() { + + if (!consumed.compareAndSet(false, true)) { + throw new IllegalStateException("Stream already consumed."); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java new file mode 100644 index 0000000000..305e55aee4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java @@ -0,0 +1,377 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.nio.ByteBuffer; + +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.client.gridfs.model.GridFSUploadOptions; +import com.mongodb.reactivestreams.client.gridfs.GridFSBucket; +import com.mongodb.reactivestreams.client.gridfs.GridFSBuckets; +import com.mongodb.reactivestreams.client.gridfs.GridFSFindPublisher; +import com.mongodb.reactivestreams.client.gridfs.GridFSUploadPublisher; + +/** + * {@link ReactiveGridFsOperations} implementation to store content into MongoDB GridFS. Uses by default + * {@link DefaultDataBufferFactory} to create {@link DataBuffer buffers}. + * + * @author Mark Paluch + * @author Nick Stolwijk + * @author Denis Zavedeev + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.2 + */ +public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements ReactiveGridFsOperations { + + private final DataBufferFactory dataBufferFactory; + private final Mono bucketSupplier; + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and + * {@link MongoConverter}. + *

                    + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + */ + public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter) { + this(dbFactory, converter, null); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and + * {@link MongoConverter}. + *

                    + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param bucket can be {@literal null}. + */ + public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter, + @Nullable String bucket) { + this(new DefaultDataBufferFactory(), dbFactory, converter, bucket); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link DataBufferFactory}, + * {@link ReactiveMongoDatabaseFactory} and {@link MongoConverter}. + *

                    + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dataBufferFactory must not be {@literal null}. + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param bucket can be {@literal null}. + */ + public ReactiveGridFsTemplate(DataBufferFactory dataBufferFactory, ReactiveMongoDatabaseFactory dbFactory, + MongoConverter converter, @Nullable String bucket) { + this(converter, Mono.defer(Lazy.of(() -> doGetBucket(dbFactory, bucket))), dataBufferFactory); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link MongoConverter}, {@link Mono} emitting a + * {@link ReactiveMongoDatabaseFactory} and {@link DataBufferFactory}. + * + * @param converter must not be {@literal null}. + * @param gridFSBucket must not be {@literal null}. + * @param dataBufferFactory must not be {@literal null}. + * @since 4.2 + */ + public ReactiveGridFsTemplate(MongoConverter converter, Mono gridFSBucket, + DataBufferFactory dataBufferFactory) { + + super(converter); + + Assert.notNull(gridFSBucket, "GridFSBucket Mono must not be null"); + Assert.notNull(dataBufferFactory, "DataBufferFactory must not be null"); + + this.bucketSupplier = gridFSBucket; + this.dataBufferFactory = dataBufferFactory; + } + + @Override + public Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata) { + return store(content, filename, contentType, toDocument(metadata)); + } + + @Override + @SuppressWarnings("unchecked") + public Mono store(GridFsObject> upload) { + + GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(), + upload.getOptions().getMetadata()); + + if (upload.getOptions().getChunkSize() > 0) { + uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize()); + } + + String filename = upload.getFilename(); + Flux source = Flux.from(upload.getContent()).map(DataBuffer::toByteBuffer); + T fileId = upload.getFileId(); + + if (fileId == null) { + return (Mono) createMono(new AutoIdCreatingUploadCallback(filename, source, uploadOptions)); + } + + UploadCallback callback = new UploadCallback(BsonUtils.simpleToBsonValue(fileId), filename, source, uploadOptions); + return createMono(callback).thenReturn(fileId); + } + + @Override + public Flux find(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindCallback(query, queryObject, sortObject)); + } + + @Override + public Mono findOne(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindLimitCallback(query, queryObject, sortObject, 2)) // + .collectList() // + .handle((files, sink) -> { + + if (files.size() == 1) { + sink.next(files.get(0)); + return; + } + + if (files.size() > 1) { + sink.error(new IncorrectResultSizeDataAccessException( + "Query " + SerializationUtils.serializeToJsonSafely(query) + " returned non unique result.", 1)); + } + }); + } + + @Override + public Mono findFirst(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindLimitCallback(query, queryObject, sortObject, 1)).next(); + } + + @Override + public Mono delete(Query query) { + return find(query).flatMap(it -> createMono(new DeleteCallback(it.getId()))).then(); + } + + @Override + public Mono getResource(String location) { + + Assert.notNull(location, "Filename must not be null"); + + return findOne(query(whereFilename().is(location))).flatMap(this::getResource) + .defaultIfEmpty(ReactiveGridFsResource.absent(location)); + } + + @Override + public Mono getResource(GridFSFile file) { + + Assert.notNull(file, "GridFSFile must not be null"); + + return doGetBucket() + .map(it -> new ReactiveGridFsResource(file, it.downloadToPublisher(file.getId()), dataBufferFactory)); + } + + @Override + public Flux getResources(String locationPattern) { + + if (!StringUtils.hasText(locationPattern)) { + return Flux.empty(); + } + + AntPath path = new AntPath(locationPattern); + + if (path.isPattern()) { + + Flux files = find(query(whereFilename().regex(path.toRegex()))); + return files.flatMap(this::getResource); + } + + return getResource(locationPattern).flux(); + } + + /** + * Create a reusable Mono for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveBucketCallback}. + */ + public Mono createMono(ReactiveBucketCallback callback) { + + Assert.notNull(callback, "ReactiveBucketCallback must not be null"); + + return doGetBucket().flatMap(bucket -> Mono.from(callback.doInBucket(bucket))); + } + + /** + * Create a reusable Flux for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveBucketCallback}. + */ + public Flux createFlux(ReactiveBucketCallback callback) { + + Assert.notNull(callback, "ReactiveBucketCallback must not be null"); + + return doGetBucket().flatMapMany(callback::doInBucket); + } + + protected Mono doGetBucket() { + return bucketSupplier; + } + + private static Mono doGetBucket(ReactiveMongoDatabaseFactory dbFactory, @Nullable String bucket) { + + Assert.notNull(dbFactory, "ReactiveMongoDatabaseFactory must not be null"); + + return dbFactory.getMongoDatabase() + .map(db -> bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket)); + } + + /** + * @param + * @author Mathieu Ouellet + * @since 3.0 + */ + interface ReactiveBucketCallback { + Publisher doInBucket(GridFSBucket bucket); + } + + private static class FindCallback implements ReactiveBucketCallback { + + private final Query query; + private final Document queryObject; + private final Document sortObject; + + public FindCallback(Query query, Document queryObject, Document sortObject) { + + this.query = query; + this.queryObject = queryObject; + this.sortObject = sortObject; + } + + @Override + public GridFSFindPublisher doInBucket(GridFSBucket bucket) { + + GridFSFindPublisher findPublisher = bucket.find(queryObject).sort(sortObject); + + if (query.getLimit() > 0) { + findPublisher = findPublisher.limit(query.getLimit()); + } + + if (query.getSkip() > 0) { + findPublisher = findPublisher.skip(Math.toIntExact(query.getSkip())); + } + + Integer cursorBatchSize = query.getMeta().getCursorBatchSize(); + if (cursorBatchSize != null) { + findPublisher = findPublisher.batchSize(cursorBatchSize); + } + + return findPublisher; + } + } + + private static class FindLimitCallback extends FindCallback { + + private final int limit; + + public FindLimitCallback(Query query, Document queryObject, Document sortObject, int limit) { + + super(query, queryObject, sortObject); + this.limit = limit; + } + + @Override + public GridFSFindPublisher doInBucket(GridFSBucket bucket) { + return super.doInBucket(bucket).limit(limit); + } + } + + private record UploadCallback(BsonValue fileId, String filename, Publisher source, + GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback { + + @Override + public GridFSUploadPublisher doInBucket(GridFSBucket bucket) { + return bucket.uploadFromPublisher(fileId, filename, source, uploadOptions); + } + } + + private record AutoIdCreatingUploadCallback(String filename, Publisher source, + GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback { + + @Override + public GridFSUploadPublisher doInBucket(GridFSBucket bucket) { + return bucket.uploadFromPublisher(filename, source, uploadOptions); + } + } + + private record DeleteCallback(BsonValue id) implements ReactiveBucketCallback { + + @Override + public Publisher doInBucket(GridFSBucket bucket) { + return bucket.delete(id); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java new file mode 100644 index 0000000000..2f16c3b06e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java @@ -0,0 +1,213 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Upload descriptor for a GridFS file upload. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class ReactiveGridFsUpload implements GridFsObject> { + + private final @Nullable ID id; + private final Publisher dataStream; + private final String filename; + private final Options options; + + private ReactiveGridFsUpload(@Nullable ID id, Publisher dataStream, String filename, Options options) { + + Assert.notNull(dataStream, "Data Stream must not be null"); + Assert.notNull(filename, "Filename must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.id = id; + this.dataStream = dataStream; + this.filename = filename; + this.options = options; + } + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
                    + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null}. + * @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId() + */ + @Override + @Nullable + public ID getFileId() { + return id; + } + + @Override + public String getFilename() { + return filename; + } + + @Override + public Publisher getContent() { + return dataStream; + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Create a new instance of {@link ReactiveGridFsUpload} for the given {@link Publisher}. + * + * @param source must not be {@literal null}. + * @return new instance of {@link GridFsUpload}. + */ + public static ReactiveGridFsUploadBuilder fromPublisher(Publisher source) { + return new ReactiveGridFsUploadBuilder().content(source); + } + + /** + * Builder to create {@link ReactiveGridFsUpload} in a fluent way. + * + * @param the target id type. + */ + public static class ReactiveGridFsUploadBuilder { + + private @Nullable Object id; + private Publisher dataStream; + private String filename; + private Options options = Options.none(); + + private ReactiveGridFsUploadBuilder() {} + + /** + * Define the content of the file to upload. + * + * @param source the upload content. + * @return this. + */ + public ReactiveGridFsUploadBuilder content(Publisher source) { + this.dataStream = source; + return this; + } + + /** + * Set the id to use. + * + * @param id the id to save the content to. + * @param + * @return this. + */ + public ReactiveGridFsUploadBuilder id(T1 id) { + + this.id = id; + return (ReactiveGridFsUploadBuilder) this; + } + + /** + * Set the filename. + * + * @param filename the filename to use. + * @return this. + */ + public ReactiveGridFsUploadBuilder filename(String filename) { + + this.filename = filename; + return this; + } + + /** + * Set additional file information. + * + * @param options must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder options(Options options) { + + Assert.notNull(options, "Options must not be null"); + + this.options = options; + return this; + } + + /** + * Set the file metadata. + * + * @param metadata must not be {@literal null}. + * @return + */ + public ReactiveGridFsUploadBuilder metadata(Document metadata) { + + this.options = this.options.metadata(metadata); + return this; + } + + /** + * Set the upload chunk size in bytes. + * + * @param chunkSize use negative number for default. + * @return + */ + public ReactiveGridFsUploadBuilder chunkSize(int chunkSize) { + + this.options = this.options.chunkSize(chunkSize); + return this; + } + + /** + * Set id, filename, metadata and chunk size from given file. + * + * @param gridFSFile must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder gridFsFile(GridFSFile gridFSFile) { + + Assert.notNull(gridFSFile, "GridFSFile must not be null"); + + this.id = gridFSFile.getId(); + this.filename = gridFSFile.getFilename(); + this.options = this.options.metadata(gridFSFile.getMetadata()); + this.options = this.options.chunkSize(gridFSFile.getChunkSize()); + + return this; + } + + /** + * Set the content type. + * + * @param contentType must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder contentType(String contentType) { + + this.options = this.options.contentType(contentType); + return this; + } + + public ReactiveGridFsUpload build() { + return new ReactiveGridFsUpload(id, dataStream, filename, options); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/package-info.java index b766405421..2f3b5af150 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/package-info.java @@ -1,5 +1,6 @@ /** * Support for MongoDB GridFS feature. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.gridfs; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java index 9498fcfb9b..5ffe37a4a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java @@ -1,72 +1,66 @@ -/* - * Copyright 2002-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.core.MongoDbUtils; - -import com.mongodb.CommandResult; -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoException; - -/** - * Base class to encapsulate common configuration settings when connecting to a database - * - * @author Mark Pollack - * @author Oliver Gierke - */ -public abstract class AbstractMonitor { - - private final Logger logger = LoggerFactory.getLogger(getClass()); - - protected Mongo mongo; - private String username; - private String password; - - /** - * Sets the username to use to connect to the Mongo database - * - * @param username The username to use - */ - public void setUsername(String username) { - this.username = username; - } - - /** - * Sets the password to use to authenticate with the Mongo database. - * - * @param password The password to use - */ - public void setPassword(String password) { - this.password = password; - } - - public CommandResult getServerStatus() { - CommandResult result = getDb("admin").command("serverStatus"); - if (!result.ok()) { - logger.error("Could not query for server status. Command Result = " + result); - throw new MongoException("could not query for server status. Command Result = " + result); - } - return result; - } - - public DB getDb(String databaseName) { - return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password)); - } -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; + +import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; +import com.mongodb.connection.ServerDescription; + +/** + * Base class to encapsulate common configuration settings when connecting to a database + * + * @author Mark Pollack + * @author Oliver Gierke + * @author Christoph Strobl + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +public abstract class AbstractMonitor { + + private final MongoClient mongoClient; + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + protected AbstractMonitor(MongoClient mongoClient) { + this.mongoClient = mongoClient; + } + + public Document getServerStatus() { + return getDb("admin").runCommand(new Document("serverStatus", 1).append("rangeDeleter", 1).append("repl", 1)); + } + + public MongoDatabase getDb(String databaseName) { + return mongoClient.getDatabase(databaseName); + } + + protected MongoClient getMongoClient() { + return mongoClient; + } + + protected List hosts() { + + return mongoClient.getClusterDescription().getServerDescriptions().stream().map(ServerDescription::getAddress) + .collect(Collectors.toList()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java index 58c1b5778d..15666fa4d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java @@ -1,67 +1,74 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for assertions - * - * @author Mark Pollack - */ -@ManagedResource(description = "Assertion Metrics") -public class AssertMetrics extends AbstractMonitor { - - public AssertMetrics(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Regular") - public int getRegular() { - return getBtree("regular"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Warning") - public int getWarning() { - return getBtree("warning"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Msg") - public int getMsg() { - return getBtree("msg"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "User") - public int getUser() { - return getBtree("user"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Rollovers") - public int getRollovers() { - return getBtree("rollovers"); - } - - private int getBtree(String key) { - DBObject asserts = (DBObject) getServerStatus().get("asserts"); - // Class c = btree.get(key).getClass(); - return (Integer) asserts.get(key); - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for assertions + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Assertion Metrics") +public class AssertMetrics extends AbstractMonitor { + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + public AssertMetrics(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Regular") + public int getRegular() { + return getBtree("regular"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Warning") + public int getWarning() { + return getBtree("warning"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Msg") + public int getMsg() { + return getBtree("msg"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "User") + public int getUser() { + return getBtree("user"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Rollovers") + public int getRollovers() { + return getBtree("rollovers"); + } + + private int getBtree(String key) { + Document asserts = (Document) getServerStatus().get("asserts"); + // Class c = btree.get(key).getClass(); + return (Integer) asserts.get(key); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java index eccd7786b5..2ceb75a4f8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java @@ -1,75 +1,82 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import java.util.Date; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for Background Flushing - * - * @author Mark Pollack - */ -@ManagedResource(description = "Background Flushing Metrics") -public class BackgroundFlushingMetrics extends AbstractMonitor { - - public BackgroundFlushingMetrics(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Flushes") - public int getFlushes() { - return getFlushingData("flushes", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Total ms", unit = "ms") - public int getTotalMs() { - return getFlushingData("total_ms", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Average ms", unit = "ms") - public double getAverageMs() { - return getFlushingData("average_ms", java.lang.Double.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Last Ms", unit = "ms") - public int getLastMs() { - return getFlushingData("last_ms", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Last finished") - public Date getLastFinished() { - return getLast(); - } - - @SuppressWarnings("unchecked") - private T getFlushingData(String key, Class targetClass) { - DBObject mem = (DBObject) getServerStatus().get("backgroundFlushing"); - return (T) mem.get(key); - } - - private Date getLast() { - DBObject bgFlush = (DBObject) getServerStatus().get("backgroundFlushing"); - Date lastFinished = (Date) bgFlush.get("last_finished"); - return lastFinished; - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import java.util.Date; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for Background Flushing + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Background Flushing Metrics") +public class BackgroundFlushingMetrics extends AbstractMonitor { + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + public BackgroundFlushingMetrics(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Flushes") + public int getFlushes() { + return getFlushingData("flushes", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Total ms", unit = "ms") + public int getTotalMs() { + return getFlushingData("total_ms", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Average ms", unit = "ms") + public double getAverageMs() { + return getFlushingData("average_ms", java.lang.Double.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Last Ms", unit = "ms") + public int getLastMs() { + return getFlushingData("last_ms", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Last finished") + public Date getLastFinished() { + return getLast(); + } + + @SuppressWarnings("unchecked") + private T getFlushingData(String key, Class targetClass) { + Document mem = (Document) getServerStatus().get("backgroundFlushing"); + return (T) mem.get(key); + } + + private Date getLast() { + Document bgFlush = (Document) getServerStatus().get("backgroundFlushing"); + Date lastFinished = (Date) bgFlush.get("last_finished"); + return lastFinished; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java index 90e902e096..671d017e05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java @@ -1,74 +1,81 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for B-tree index counters - * - * @author Mark Pollack - */ -@ManagedResource(description = "Btree Metrics") -public class BtreeIndexCounters extends AbstractMonitor { - - public BtreeIndexCounters(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Accesses") - public int getAccesses() { - return getBtree("accesses"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Hits") - public int getHits() { - return getBtree("hits"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Misses") - public int getMisses() { - return getBtree("misses"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Resets") - public int getResets() { - return getBtree("resets"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Miss Ratio") - public int getMissRatio() { - return getBtree("missRatio"); - } - - private int getBtree(String key) { - DBObject indexCounters = (DBObject) getServerStatus().get("indexCounters"); - if (indexCounters.get("note") != null) { - String message = (String) indexCounters.get("note"); - if (message.contains("not supported")) { - return -1; - } - } - DBObject btree = (DBObject) indexCounters.get("btree"); - // Class c = btree.get(key).getClass(); - return (Integer) btree.get(key); - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for B-tree index counters + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Btree Metrics") +public class BtreeIndexCounters extends AbstractMonitor { + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + public BtreeIndexCounters(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Accesses") + public int getAccesses() { + return getBtree("accesses"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Hits") + public int getHits() { + return getBtree("hits"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Misses") + public int getMisses() { + return getBtree("misses"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Resets") + public int getResets() { + return getBtree("resets"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Miss Ratio") + public int getMissRatio() { + return getBtree("missRatio"); + } + + private int getBtree(String key) { + Document indexCounters = (Document) getServerStatus().get("indexCounters"); + if (indexCounters.get("note") != null) { + String message = (String) indexCounters.get("note"); + if (message.contains("not supported")) { + return -1; + } + } + Document btree = (Document) indexCounters.get("btree"); + // Class c = btree.get(key).getClass(); + return (Integer) btree.get(key); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java index c44ca30497..0d0eb84b35 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java @@ -1,53 +1,60 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for Connections - * - * @author Mark Pollack - */ -@ManagedResource(description = "Connection metrics") -public class ConnectionMetrics extends AbstractMonitor { - - public ConnectionMetrics(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Current Connections") - public int getCurrent() { - return getConnectionData("current", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Available Connections") - public int getAvailable() { - return getConnectionData("available", java.lang.Integer.class); - } - - @SuppressWarnings("unchecked") - private T getConnectionData(String key, Class targetClass) { - DBObject mem = (DBObject) getServerStatus().get("connections"); - // Class c = mem.get(key).getClass(); - return (T) mem.get(key); - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for Connections + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Connection metrics") +public class ConnectionMetrics extends AbstractMonitor { + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + public ConnectionMetrics(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Current Connections") + public int getCurrent() { + return getConnectionData("current", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Available Connections") + public int getAvailable() { + return getConnectionData("available", java.lang.Integer.class); + } + + @SuppressWarnings("unchecked") + private T getConnectionData(String key, Class targetClass) { + Document mem = (Document) getServerStatus().get("connections"); + // Class c = mem.get(key).getClass(); + return (T) mem.get(key); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java index 18bcc0f74d..6997f5fba8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java @@ -1,77 +1,85 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for Global Locks - * - * @author Mark Pollack - */ -@ManagedResource(description = "Global Lock Metrics") -public class GlobalLockMetrics extends AbstractMonitor { - - public GlobalLockMetrics(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Total time") - public double getTotalTime() { - return getGlobalLockData("totalTime", java.lang.Double.class); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Lock time", unit = "s") - public double getLockTime() { - return getGlobalLockData("lockTime", java.lang.Double.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Lock time") - public double getLockTimeRatio() { - return getGlobalLockData("ratio", java.lang.Double.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Current Queue") - public int getCurrentQueueTotal() { - return getCurrentQueue("total"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Reader Queue") - public int getCurrentQueueReaders() { - return getCurrentQueue("readers"); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Writer Queue") - public int getCurrentQueueWriters() { - return getCurrentQueue("writers"); - } - - @SuppressWarnings("unchecked") - private T getGlobalLockData(String key, Class targetClass) { - DBObject globalLock = (DBObject) getServerStatus().get("globalLock"); - return (T) globalLock.get(key); - } - - private int getCurrentQueue(String key) { - DBObject globalLock = (DBObject) getServerStatus().get("globalLock"); - DBObject currentQueue = (DBObject) globalLock.get("currentQueue"); - return (Integer) currentQueue.get(key); - } -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.DBObject; +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for Global Locks + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Global Lock Metrics") +public class GlobalLockMetrics extends AbstractMonitor { + + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ + public GlobalLockMetrics(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Total time") + public double getTotalTime() { + return getGlobalLockData("totalTime", java.lang.Double.class); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Lock time", unit = "s") + public double getLockTime() { + return getGlobalLockData("lockTime", java.lang.Double.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Lock time") + public double getLockTimeRatio() { + return getGlobalLockData("ratio", java.lang.Double.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Current Queue") + public int getCurrentQueueTotal() { + return getCurrentQueue("total"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Reader Queue") + public int getCurrentQueueReaders() { + return getCurrentQueue("readers"); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Writer Queue") + public int getCurrentQueueWriters() { + return getCurrentQueue("writers"); + } + + @SuppressWarnings("unchecked") + private T getGlobalLockData(String key, Class targetClass) { + DBObject globalLock = (DBObject) getServerStatus().get("globalLock"); + return (T) globalLock.get(key); + } + + private int getCurrentQueue(String key) { + Document globalLock = (Document) getServerStatus().get("globalLock"); + Document currentQueue = (Document) globalLock.get("currentQueue"); + return (Integer) currentQueue.get(key); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java index d68d72d457..4dbdebb26f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java @@ -1,68 +1,75 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for Memory - * - * @author Mark Pollack - */ -@ManagedResource(description = "Memory Metrics") -public class MemoryMetrics extends AbstractMonitor { - - public MemoryMetrics(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Memory address size") - public int getBits() { - return getMemData("bits", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Resident in Physical Memory", unit = "MB") - public int getResidentSpace() { - return getMemData("resident", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Virtual Address Space", unit = "MB") - public int getVirtualAddressSpace() { - return getMemData("virtual", java.lang.Integer.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Is memory info supported on this platform") - public boolean getMemoryInfoSupported() { - return getMemData("supported", java.lang.Boolean.class); - } - - @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Memory Mapped Space", unit = "MB") - public int getMemoryMappedSpace() { - return getMemData("mapped", java.lang.Integer.class); - } - - @SuppressWarnings("unchecked") - private T getMemData(String key, Class targetClass) { - DBObject mem = (DBObject) getServerStatus().get("mem"); - // Class c = mem.get(key).getClass(); - return (T) mem.get(key); - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for Memory + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Memory Metrics") +public class MemoryMetrics extends AbstractMonitor { + + /** + * @param mongoClient + * @since 2.2 + */ + public MemoryMetrics(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Memory address size") + public int getBits() { + return getMemData("bits", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Resident in Physical Memory", unit = "MB") + public int getResidentSpace() { + return getMemData("resident", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Virtual Address Space", unit = "MB") + public int getVirtualAddressSpace() { + return getMemData("virtual", java.lang.Integer.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Is memory info supported on this platform") + public boolean getMemoryInfoSupported() { + return getMemData("supported", java.lang.Boolean.class); + } + + @ManagedMetric(metricType = MetricType.GAUGE, displayName = "Memory Mapped Space", unit = "MB") + public int getMemoryMappedSpace() { + return getMemData("mapped", java.lang.Integer.class); + } + + @SuppressWarnings("unchecked") + private T getMemData(String key, Class targetClass) { + Document mem = (Document) getServerStatus().get("mem"); + // Class c = mem.get(key).getClass(); + return (T) mem.get(key); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java index b74ee0dad2..1624501490 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java @@ -1,70 +1,78 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -/** - * JMX Metrics for Operation counters - * - * @author Mark Pollack - */ -@ManagedResource(description = "Operation Counters") -public class OperationCounters extends AbstractMonitor { - - public OperationCounters(Mongo mongo) { - this.mongo = mongo; - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Insert operation count") - public int getInsertCount() { - return getOpCounter("insert"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Query operation count") - public int getQueryCount() { - return getOpCounter("query"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Update operation count") - public int getUpdateCount() { - return getOpCounter("update"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Delete operation count") - public int getDeleteCount() { - return getOpCounter("delete"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "GetMore operation count") - public int getGetMoreCount() { - return getOpCounter("getmore"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Command operation count") - public int getCommandCount() { - return getOpCounter("command"); - } - - private int getOpCounter(String key) { - DBObject opCounters = (DBObject) getServerStatus().get("opcounters"); - return (Integer) opCounters.get(key); - } -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import org.bson.Document; +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; +import org.springframework.util.NumberUtils; + +import com.mongodb.client.MongoClient; + +/** + * JMX Metrics for Operation counters + * + * @author Mark Pollack + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Operation Counters") +public class OperationCounters extends AbstractMonitor { + + /** + * @param mongoClient + * @since 2.2 + */ + public OperationCounters(MongoClient mongoClient) { + super(mongoClient); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Insert operation count") + public int getInsertCount() { + return getOpCounter("insert"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Query operation count") + public int getQueryCount() { + return getOpCounter("query"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Update operation count") + public int getUpdateCount() { + return getOpCounter("update"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Delete operation count") + public int getDeleteCount() { + return getOpCounter("delete"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "GetMore operation count") + public int getGetMoreCount() { + return getOpCounter("getmore"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Command operation count") + public int getCommandCount() { + return getOpCounter("command"); + } + + private int getOpCounter(String key) { + Document opCounters = (Document) getServerStatus().get("opcounters"); + return NumberUtils.convertNumberToTargetClass((Number) opCounters.get(key), Integer.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java index 98c9714c17..3aedf3f29f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java @@ -1,76 +1,83 @@ -/* - * Copyright 2012-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import java.net.UnknownHostException; - -import org.springframework.jmx.export.annotation.ManagedMetric; -import org.springframework.jmx.export.annotation.ManagedOperation; -import org.springframework.jmx.export.annotation.ManagedResource; -import org.springframework.jmx.support.MetricType; - -import com.mongodb.Mongo; - -/** - * Expose basic server information via JMX - * - * @author Mark Pollack - * @author Thomas Darimont - * @author Christoph Strobl - */ -@ManagedResource(description = "Server Information") -public class ServerInfo extends AbstractMonitor { - - public ServerInfo(Mongo mongo) { - this.mongo = mongo; - } - - /** - * Returns the hostname of the used server reported by MongoDB. - * - * @return the reported hostname can also be an IP address. - * @throws UnknownHostException - */ - @ManagedOperation(description = "Server host name") - public String getHostName() throws UnknownHostException { - - /* - * UnknownHostException is not necessary anymore, but clients could have - * called this method in a try..catch(UnknownHostException) already - */ - return mongo.getAddress().getHost(); - } - - @ManagedMetric(displayName = "Uptime Estimate") - public double getUptimeEstimate() { - return (Double) getServerStatus().get("uptimeEstimate"); - } - - @ManagedOperation(description = "MongoDB Server Version") - public String getVersion() { - return (String) getServerStatus().get("version"); - } - - @ManagedOperation(description = "Local Time") - public String getLocalTime() { - return (String) getServerStatus().get("localTime"); - } - - @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Server uptime in seconds", unit = "seconds") - public double getUptime() { - return (Double) getServerStatus().get("uptime"); - } -} +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import java.net.UnknownHostException; + +import org.springframework.jmx.export.annotation.ManagedMetric; +import org.springframework.jmx.export.annotation.ManagedOperation; +import org.springframework.jmx.export.annotation.ManagedResource; +import org.springframework.jmx.support.MetricType; +import org.springframework.util.StringUtils; + +import com.mongodb.client.MongoClient; + +/** + * Expose basic server information via JMX + * + * @author Mark Pollack + * @author Thomas Darimont + * @author Christoph Strobl + * @deprecated since 4.5 + */ +@Deprecated(since = "4.5", forRemoval = true) +@ManagedResource(description = "Server Information") +public class ServerInfo extends AbstractMonitor { + + /** + * @param mongoClient + * @since 2.2 + */ + protected ServerInfo(MongoClient mongoClient) { + super(mongoClient); + } + + /** + * Returns the hostname of the used server reported by MongoDB. + * + * @return the reported hostname can also be an IP address. + * @throws UnknownHostException + */ + @ManagedOperation(description = "Server host name") + public String getHostName() throws UnknownHostException { + + /* + * UnknownHostException is not necessary anymore, but clients could have + * called this method in a try..catch(UnknownHostException) already + */ + return StringUtils.collectionToDelimitedString(hosts(), ","); + } + + @ManagedMetric(displayName = "Uptime Estimate") + public double getUptimeEstimate() { + return (Double) getServerStatus().get("uptimeEstimate"); + } + + @ManagedOperation(description = "MongoDB Server Version") + public String getVersion() { + return (String) getServerStatus().get("version"); + } + + @ManagedOperation(description = "Local Time") + public String getLocalTime() { + return (String) getServerStatus().get("localTime"); + } + + @ManagedMetric(metricType = MetricType.COUNTER, displayName = "Server uptime in seconds", unit = "seconds") + public double getUptime() { + return (Double) getServerStatus().get("uptime"); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java index db25cc4872..1e1c221b64 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java @@ -1,5 +1,7 @@ /** * MongoDB specific JMX monitoring support. */ +@Deprecated(since = "4.5", forRemoval = true) +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.monitor; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java new file mode 100644 index 0000000000..4b37225b5d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java @@ -0,0 +1,136 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; +import reactor.core.CoreSubscriber; + +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +import org.reactivestreams.Subscriber; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.util.ClassUtils; + +import com.mongodb.ContextProvider; +import com.mongodb.RequestContext; +import com.mongodb.client.SynchronousContextProvider; +import com.mongodb.reactivestreams.client.ReactiveContextProvider; + +/** + * Factory to create a {@link ContextProvider} to propagate the request context across tasks. Requires either + * {@link SynchronousContextProvider} or {@link ReactiveContextProvider} to be present. + * + * @author Mark Paluch + * @since 3.0 + */ +public class ContextProviderFactory { + + private static final boolean SYNCHRONOUS_PRESENT = ClassUtils + .isPresent("com.mongodb.client.SynchronousContextProvider", ContextProviderFactory.class.getClassLoader()); + + private static final boolean REACTIVE_PRESENT = ClassUtils.isPresent( + "com.mongodb.reactivestreams.client.ReactiveContextProvider", ContextProviderFactory.class.getClassLoader()) + && ReactiveWrappers.isAvailable(ReactiveLibrary.PROJECT_REACTOR); + + /** + * Create a {@link ContextProvider} given {@link ObservationRegistry}. The factory method attempts to create a + * {@link ContextProvider} that is capable to propagate request contexts across imperative or reactive usage, + * depending on their class path presence. + * + * @param observationRegistry must not be {@literal null}. + * @return + */ + public static ContextProvider create(ObservationRegistry observationRegistry) { + + if (SYNCHRONOUS_PRESENT && REACTIVE_PRESENT) { + return new CompositeContextProvider(observationRegistry); + } + + if (SYNCHRONOUS_PRESENT) { + return new DefaultSynchronousContextProvider(observationRegistry); + } + + if (REACTIVE_PRESENT) { + return DefaultReactiveContextProvider.INSTANCE; + } + + throw new IllegalStateException( + "Cannot create ContextProvider. Neither SynchronousContextProvider nor ReactiveContextProvider is on the class path."); + } + + record DefaultSynchronousContextProvider( + ObservationRegistry observationRegistry) implements SynchronousContextProvider { + + @Override + public RequestContext getContext() { + + MapRequestContext requestContext = new MapRequestContext(); + + Observation currentObservation = observationRegistry.getCurrentObservation(); + if (currentObservation != null) { + requestContext.put(ObservationThreadLocalAccessor.KEY, currentObservation); + } + + return requestContext; + } + + } + + enum DefaultReactiveContextProvider implements ReactiveContextProvider { + + INSTANCE; + + @Override + public RequestContext getContext(Subscriber subscriber) { + + if (subscriber instanceof CoreSubscriber cs) { + + Map map = cs.currentContext().stream() + .collect(Collectors.toConcurrentMap(Entry::getKey, Entry::getValue)); + + return new MapRequestContext(map); + } + + return new MapRequestContext(); + } + } + + record CompositeContextProvider(DefaultSynchronousContextProvider synchronousContextProvider) + implements + SynchronousContextProvider, + ReactiveContextProvider { + + CompositeContextProvider(ObservationRegistry observationRegistry) { + this(new DefaultSynchronousContextProvider(observationRegistry)); + } + + @Override + public RequestContext getContext() { + return synchronousContextProvider.getContext(); + } + + @Override + public RequestContext getContext(Subscriber subscriber) { + return DefaultReactiveContextProvider.INSTANCE.getContext(subscriber); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java new file mode 100644 index 0000000000..b823ce223b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java @@ -0,0 +1,121 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.common.KeyValues; + +import java.net.InetSocketAddress; + +import org.springframework.data.mongodb.observability.MongoObservation.LowCardinalityCommandKeyNames; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.ServerAddress; +import com.mongodb.connection.ConnectionDescription; +import com.mongodb.connection.ConnectionId; +import com.mongodb.event.CommandStartedEvent; + +/** + * Default {@link MongoHandlerObservationConvention} implementation. + * + * @author Greg Turnquist + * @author Mark Paluch + * @since 4.0 + */ +class DefaultMongoHandlerObservationConvention implements MongoHandlerObservationConvention { + + @Override + public KeyValues getLowCardinalityKeyValues(MongoHandlerContext context) { + + KeyValues keyValues = KeyValues.of(LowCardinalityCommandKeyNames.DB_SYSTEM.withValue("mongodb"), + LowCardinalityCommandKeyNames.MONGODB_COMMAND.withValue(context.getCommandName())); + + ConnectionString connectionString = context.getConnectionString(); + if (connectionString != null) { + + keyValues = keyValues + .and(LowCardinalityCommandKeyNames.DB_CONNECTION_STRING.withValue(connectionString.getConnectionString())); + + String user = connectionString.getUsername(); + + if (!ObjectUtils.isEmpty(user)) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.DB_USER.withValue(user)); + } + + } + + if (!ObjectUtils.isEmpty(context.getDatabaseName())) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.DB_NAME.withValue(context.getDatabaseName())); + } + + if (!ObjectUtils.isEmpty(context.getCollectionName())) { + keyValues = keyValues + .and(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue(context.getCollectionName())); + } + + ConnectionDescription connectionDescription = context.getCommandStartedEvent().getConnectionDescription(); + + if (connectionDescription != null) { + + ServerAddress serverAddress = connectionDescription.getServerAddress(); + + if (serverAddress != null) { + + keyValues = keyValues.and(LowCardinalityCommandKeyNames.NET_TRANSPORT.withValue("IP.TCP"), + LowCardinalityCommandKeyNames.NET_PEER_NAME.withValue(serverAddress.getHost()), + LowCardinalityCommandKeyNames.NET_PEER_PORT.withValue("" + serverAddress.getPort())); + + InetSocketAddress socketAddress = MongoCompatibilityAdapter.serverAddressAdapter(serverAddress) + .getSocketAddress(); + + if (socketAddress != null) { + + keyValues = keyValues.and( + LowCardinalityCommandKeyNames.NET_SOCK_PEER_ADDR.withValue(socketAddress.getHostName()), + LowCardinalityCommandKeyNames.NET_SOCK_PEER_PORT.withValue("" + socketAddress.getPort())); + } + } + + ConnectionId connectionId = connectionDescription.getConnectionId(); + if (connectionId != null) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.MONGODB_CLUSTER_ID + .withValue(connectionId.getServerId().getClusterId().getValue())); + } + } + + return keyValues; + } + + @Override + public KeyValues getHighCardinalityKeyValues(MongoHandlerContext context) { + return KeyValues.empty(); + } + + @Override + public String getContextualName(MongoHandlerContext context) { + + String collectionName = context.getCollectionName(); + CommandStartedEvent commandStartedEvent = context.getCommandStartedEvent(); + + if (ObjectUtils.isEmpty(collectionName)) { + return commandStartedEvent.getCommandName(); + } + + return collectionName + "." + commandStartedEvent.getCommandName(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java new file mode 100644 index 0000000000..854e1481fc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java @@ -0,0 +1,77 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +import com.mongodb.RequestContext; + +/** + * A {@link Map}-based {@link RequestContext}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @since 4.0.0 + */ +class MapRequestContext implements RequestContext { + + private final Map map; + + public MapRequestContext() { + this(new HashMap<>()); + } + + public MapRequestContext(Map context) { + this.map = context; + } + + @Override + public T get(Object key) { + return (T) map.get(key); + } + + @Override + public boolean hasKey(Object key) { + return map.containsKey(key); + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + @Override + public void put(Object key, Object value) { + map.put(key, value); + } + + @Override + public void delete(Object key) { + map.remove(key); + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Stream> stream() { + return map.entrySet().stream(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java new file mode 100644 index 0000000000..cc58aac56e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.transport.Kind; +import io.micrometer.observation.transport.SenderContext; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.bson.BsonDocument; +import org.bson.BsonValue; + +import org.springframework.lang.Nullable; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * A {@link Observation.Context} that contains MongoDB events. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author Mark Paluch + * @since 4.0 + */ +public class MongoHandlerContext extends SenderContext { + + /** + * @see https://docs.mongodb.com/manual/reference/command for + * the command reference + */ + private static final Set COMMANDS_WITH_COLLECTION_NAME = new LinkedHashSet<>( + Arrays.asList("aggregate", "count", "distinct", "mapReduce", "geoSearch", "delete", "find", "findAndModify", + "insert", "update", "collMod", "compact", "convertToCapped", "create", "createIndexes", "drop", "dropIndexes", + "killCursors", "listIndexes", "reIndex")); + + private final @Nullable ConnectionString connectionString; + private final CommandStartedEvent commandStartedEvent; + private final RequestContext requestContext; + private final String collectionName; + + private CommandSucceededEvent commandSucceededEvent; + private CommandFailedEvent commandFailedEvent; + + public MongoHandlerContext(@Nullable ConnectionString connectionString, CommandStartedEvent commandStartedEvent, + RequestContext requestContext) { + + super((carrier, key, value) -> {}, Kind.CLIENT); + this.connectionString = connectionString; + this.commandStartedEvent = commandStartedEvent; + this.requestContext = requestContext; + this.collectionName = getCollectionName(commandStartedEvent); + } + + public CommandStartedEvent getCommandStartedEvent() { + return this.commandStartedEvent; + } + + public RequestContext getRequestContext() { + return this.requestContext; + } + + public String getDatabaseName() { + return commandStartedEvent.getDatabaseName(); + } + + public String getCollectionName() { + return this.collectionName; + } + + public String getCommandName() { + return commandStartedEvent.getCommandName(); + } + + @Nullable + public ConnectionString getConnectionString() { + return connectionString; + } + + void setCommandSucceededEvent(CommandSucceededEvent commandSucceededEvent) { + this.commandSucceededEvent = commandSucceededEvent; + } + + void setCommandFailedEvent(CommandFailedEvent commandFailedEvent) { + this.commandFailedEvent = commandFailedEvent; + } + + /** + * Transform the command name into a collection name; + * + * @param event the {@link CommandStartedEvent} + * @return the name of the collection based on the command + */ + @Nullable + private static String getCollectionName(CommandStartedEvent event) { + + String commandName = event.getCommandName(); + BsonDocument command = event.getCommand(); + + if (COMMANDS_WITH_COLLECTION_NAME.contains(commandName)) { + + String collectionName = getNonEmptyBsonString(command.get(commandName)); + + if (collectionName != null) { + return collectionName; + } + } + + // Some other commands, like getMore, have a field like {"collection": collectionName}. + return command == null ? "" : getNonEmptyBsonString(command.get("collection")); + } + + /** + * Utility method to convert {@link BsonValue} into a plain string. + * + * @return trimmed string from {@code bsonValue} or null if the trimmed string was empty or the value wasn't a string + */ + @Nullable + private static String getNonEmptyBsonString(@Nullable BsonValue bsonValue) { + + if (bsonValue == null || !bsonValue.isString()) { + return null; + } + + String stringValue = bsonValue.asString().getValue().trim(); + + return stringValue.isEmpty() ? null : stringValue; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java new file mode 100644 index 0000000000..7d1100c582 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java @@ -0,0 +1,33 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationConvention; + +/** + * {@link ObservationConvention} for {@link MongoHandlerContext}. + * + * @author Greg Turnquist + * @since 4 + */ +public interface MongoHandlerObservationConvention extends ObservationConvention { + + @Override + default boolean supportsContext(Observation.Context context) { + return context instanceof MongoHandlerContext; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java new file mode 100644 index 0000000000..9dfc292521 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java @@ -0,0 +1,178 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.common.docs.KeyName; +import io.micrometer.observation.docs.ObservationDocumentation; + +/** + * A MongoDB-based {@link io.micrometer.observation.Observation}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @since 4.0 + */ +enum MongoObservation implements ObservationDocumentation { + + /** + * Timer created around a MongoDB command execution. + */ + MONGODB_COMMAND_OBSERVATION { + + @Override + public String getName() { + return "spring.data.mongodb.command"; + } + + @Override + public KeyName[] getLowCardinalityKeyNames() { + return LowCardinalityCommandKeyNames.values(); + } + + @Override + public KeyName[] getHighCardinalityKeyNames() { + return new KeyName[0]; + } + + }; + + /** + * Enums related to low cardinality key names for MongoDB commands. + */ + enum LowCardinalityCommandKeyNames implements KeyName { + + /** + * MongoDB database system. + */ + DB_SYSTEM { + @Override + public String asString() { + return "db.system"; + } + }, + + /** + * MongoDB connection string. + */ + DB_CONNECTION_STRING { + @Override + public String asString() { + return "db.connection_string"; + } + }, + + /** + * Network transport. + */ + NET_TRANSPORT { + @Override + public String asString() { + return "net.transport"; + } + }, + + /** + * Name of the database host. + */ + NET_PEER_NAME { + @Override + public String asString() { + return "net.peer.name"; + } + }, + + /** + * Logical remote port number. + */ + NET_PEER_PORT { + @Override + public String asString() { + return "net.peer.port"; + } + }, + + /** + * Mongo peer address. + */ + NET_SOCK_PEER_ADDR { + @Override + public String asString() { + return "net.sock.peer.addr"; + } + }, + + /** + * Mongo peer port. + */ + NET_SOCK_PEER_PORT { + @Override + public String asString() { + return "net.sock.peer.port"; + } + }, + + /** + * MongoDB user. + */ + DB_USER { + @Override + public String asString() { + return "db.user"; + } + }, + + /** + * MongoDB database name. + */ + DB_NAME { + @Override + public String asString() { + return "db.name"; + } + }, + + /** + * MongoDB collection name. + */ + MONGODB_COLLECTION { + @Override + public String asString() { + return "db.mongodb.collection"; + } + }, + + /** + * MongoDB cluster identifier. + */ + MONGODB_CLUSTER_ID { + @Override + public String asString() { + return "spring.data.mongodb.cluster_id"; + } + }, + + /** + * MongoDB command value. + */ + MONGODB_COMMAND { + @Override + public String asString() { + return "db.operation"; + } + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java new file mode 100644 index 0000000000..9360a95de2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java @@ -0,0 +1,219 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; + +import java.util.function.BiConsumer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandListener; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * Implement MongoDB's {@link CommandListener} using Micrometer's {@link Observation} API. + * + * @author OpenZipkin Brave Authors + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author François Kha + * @since 4.0 + */ +public class MongoObservationCommandListener implements CommandListener { + + private static final Log log = LogFactory.getLog(MongoObservationCommandListener.class); + + private final ObservationRegistry observationRegistry; + private final @Nullable ConnectionString connectionString; + + private final MongoHandlerObservationConvention observationConvention; + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. + * + * @param observationRegistry must not be {@literal null} + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry) { + this(observationRegistry, null); + } + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. This constructor attaches the + * {@link ConnectionString} to every {@link Observation}. + * + * @param observationRegistry must not be {@literal null} + * @param connectionString can be {@literal null} + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry, + @Nullable ConnectionString connectionString) { + this(observationRegistry, connectionString, new DefaultMongoHandlerObservationConvention()); + } + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. This constructor attaches the + * {@link ConnectionString} to every {@link Observation} and uses the given {@link MongoHandlerObservationConvention}. + * + * @param observationRegistry must not be {@literal null} + * @param connectionString can be {@literal null} + * @param observationConvention must not be {@literal null} + * @since 4.3 + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry, + @Nullable ConnectionString connectionString, MongoHandlerObservationConvention observationConvention) { + + Assert.notNull(observationRegistry, "ObservationRegistry must not be null"); + Assert.notNull(observationConvention, "ObservationConvention must not be null"); + + this.observationRegistry = observationRegistry; + this.connectionString = connectionString; + this.observationConvention = observationConvention; + } + + @Override + public void commandStarted(CommandStartedEvent event) { + + if (log.isDebugEnabled()) { + log.debug("Instrumenting the command started event"); + } + + String databaseName = event.getDatabaseName(); + + if ("admin".equals(databaseName)) { + return; // don't instrument commands like "endSessions" + } + + RequestContext requestContext = event.getRequestContext(); + + if (requestContext == null) { + return; + } + + Observation parent = observationFromContext(requestContext); + + if (log.isDebugEnabled()) { + log.debug("Found the following observation passed from the mongo context [" + parent + "]"); + } + + MongoHandlerContext observationContext = new MongoHandlerContext(connectionString, event, requestContext); + observationContext.setRemoteServiceName("mongo"); + + Observation observation = MongoObservation.MONGODB_COMMAND_OBSERVATION + .observation(this.observationRegistry, () -> observationContext) // + .observationConvention(this.observationConvention); + + if (parent != null) { + observation.parentObservation(parent); + } + + observation.start(); + + requestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + if (log.isDebugEnabled()) { + log.debug( + "Created a child observation [" + observation + "] for Mongo instrumentation and put it in Mongo context"); + } + } + + @Override + public void commandSucceeded(CommandSucceededEvent event) { + + doInObservation(event.getRequestContext(), (observation, context) -> { + + context.setCommandSucceededEvent(event); + + if (log.isDebugEnabled()) { + log.debug("Command succeeded - will stop observation [" + observation + "]"); + } + + observation.stop(); + }); + } + + @Override + public void commandFailed(CommandFailedEvent event) { + + doInObservation(event.getRequestContext(), (observation, context) -> { + + context.setCommandFailedEvent(event); + + if (log.isDebugEnabled()) { + log.debug("Command failed - will stop observation [" + observation + "]"); + } + + observation.error(event.getThrowable()); + observation.stop(); + }); + } + + /** + * Performs the given action for the {@link Observation} and {@link MongoHandlerContext} if there is an ongoing Mongo + * Observation. Exceptions thrown by the action are relayed to the caller. + * + * @param requestContext the context to extract the Observation from. + * @param action the action to invoke. + */ + private void doInObservation(@Nullable RequestContext requestContext, + BiConsumer action) { + + if (requestContext == null) { + return; + } + + Observation observation = requestContext.getOrDefault(ObservationThreadLocalAccessor.KEY, null); + if (observation == null || !(observation.getContext() instanceof MongoHandlerContext context)) { + return; + } + + action.accept(observation, context); + } + + /** + * Extract the {@link Observation} from MongoDB's {@link RequestContext}. + * + * @param context + * @return + */ + @Nullable + private static Observation observationFromContext(RequestContext context) { + + Observation observation = context.getOrDefault(ObservationThreadLocalAccessor.KEY, null); + + if (observation != null) { + + if (log.isDebugEnabled()) { + log.debug("Found a observation in Mongo context [" + observation + "]"); + } + return observation; + } + + if (log.isDebugEnabled()) { + log.debug("No observation was found - will not create any child observations"); + } + + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java new file mode 100644 index 0000000000..d240e12f9e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java @@ -0,0 +1,5 @@ +/** + * Infrastructure to provide driver observability using Micrometer. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.observability; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/package-info.java index 1a6fe39500..900342bbcb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/package-info.java @@ -1,5 +1,5 @@ /** * Spring Data's MongoDB abstraction. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb; - diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java new file mode 100644 index 0000000000..871f89d041 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.data.mongodb.core.annotation.Collation; + +/** + * The {@link Aggregation} annotation can be used to annotate a {@link org.springframework.data.repository.Repository} + * query method so that it runs the {@link Aggregation#pipeline()} on invocation. + *

                    + * Pipeline stages are mapped against the {@link org.springframework.data.repository.Repository} domain type to consider + * {@link org.springframework.data.mongodb.core.mapping.Field field} mappings and may contain simple placeholders + * {@code ?0} as well as {@link org.springframework.expression.spel.standard.SpelExpression SpelExpressions}. + *

                    + * Query method {@link org.springframework.data.domain.Sort} and {@link org.springframework.data.domain.Pageable} + * arguments are applied at the end of the pipeline or can be defined manually as part of it. + * + * @author Christoph Strobl + * @since 2.2 + */ +@Collation +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +@ReadPreference +public @interface Aggregation { + + /** + * Alias for {@link #pipeline()}. Defines the aggregation pipeline to apply. + * + * @return an empty array by default. + * @see #pipeline() + */ + @AliasFor("pipeline") + String[] value() default {}; + + /** + * Defines the aggregation pipeline to apply. + * + *

                    +	 *
                    +	 * // aggregation resulting in collection with single value
                    +	 * @Aggregation("{ '$project': { '_id' : '$lastname' } }")
                    +	 * List<String> findAllLastnames();
                    +	 *
                    +	 * // aggregation with parameter replacement
                    +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
                    +	 * List<PersonAggregate> groupByLastnameAnd(String property);
                    +	 *
                    +	 * // aggregation with sort in pipeline
                    +	 * @Aggregation(pipeline = {"{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }", "{ '$sort' : { 'lastname' : -1 } }"})
                    +	 * List<PersonAggregate> groupByLastnameAnd(String property);
                    +	 *
                    +	 * // Sort parameter is used for sorting results
                    +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
                    +	 * List<PersonAggregate> groupByLastnameAnd(String property, Sort sort);
                    +	 *
                    +	 * // Pageable parameter used for sort, skip and limit
                    +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
                    +	 * List<PersonAggregate> groupByLastnameAnd(String property, Pageable page);
                    +	 *
                    +	 * // Single value result aggregation.
                    +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }")
                    +	 * Long sumAge();
                    +	 *
                    +	 * // Single value wrapped in container object
                    +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } })
                    +	 * SumAge sumAgeAndReturnAggregationResultWrapperWithConcreteType();
                    +	 *
                    +	 * // Raw aggregation result
                    +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } })
                    +	 * AggregationResults<org.bson.Document>> sumAgeAndReturnAggregationResultWrapper();
                    +	 * 
                    + * + * @return an empty array by default. + */ + @AliasFor("value") + String[] pipeline() default {}; + + /** + * Defines the collation to apply when executing the aggregation. + * + *
                    +	 * // Fixed value
                    +	 * @Aggregation(pipeline = "...", collation = "en_US")
                    +	 * List<Entry> findAllByFixedCollation();
                    +	 *
                    +	 * // Fixed value as Document
                    +	 * @Aggregation(pipeline = "...", collation = "{ 'locale' :  'en_US' }")
                    +	 * List<Entry> findAllByFixedJsonCollation();
                    +	 *
                    +	 * // Dynamic value as String
                    +	 * @Aggregation(pipeline = "...", collation = "?0")
                    +	 * List<Entry> findAllByDynamicCollation(String collation);
                    +	 *
                    +	 * // Dynamic value as Document
                    +	 * @Aggregation(pipeline = "...", collation = "{ 'locale' :  ?0 }")
                    +	 * List<Entry> findAllByDynamicJsonCollation(String collation);
                    +	 *
                    +	 * // SpEL expression
                    +	 * @Aggregation(pipeline = "...", collation = "?#{[0]}")
                    +	 * List<Entry> findAllByDynamicSpElCollation(String collation);
                    +	 * 
                    + * + * @return an empty {@link String} by default. + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + + /** + * The mode of the read preference to use. This attribute ({@code @Aggregation(pipeline = { ... }, readPreference = + * "secondary")}) is an alias for: + * + *
                    +	 * @@Aggregation(pipeline = { ... })
                    +	 * @ReadPreference("secondary")
                    +	 * List<PersonAggregate> groupByLastnameAnd(String property);
                    +	 * 
                    + * + * @return the index name. + * @since 4.2 + * @see ReadPreference#value() + */ + @AliasFor(annotation = ReadPreference.class, attribute = "value") + String readPreference() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java new file mode 100644 index 0000000000..c8f6cf4d0d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare finder count queries directly on repository methods. Both attributes allow using a placeholder + * notation of {@code ?0}, {@code ?1} and so on. + * + * @author Fırat KÜÇÜK + * @author Oliver Gierke + * @since 1.10 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@Query(count = true) +public @interface CountQuery { + + /** + * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the + * method name then. Alias for {@link Query#value}. + * + * @return an empty String by default. + */ + @AliasFor(annotation = Query.class) + String value() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java new file mode 100644 index 0000000000..b1d620046f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare finder delete queries directly on repository methods. Both attributes allow using a placeholder + * notation of {@code ?0}, {@code ?1} and so on. + * + * @author Fırat KÜÇÜK + * @author Oliver Gierke + * @since 1.10 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@Query(delete = true) +public @interface DeleteQuery { + + /** + * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the + * method name then. Alias for {@link Query#value}. + * + * @return empty {@link String} by default. + */ + @AliasFor(annotation = Query.class) + String value() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java new file mode 100644 index 0000000000..0d057340a5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java @@ -0,0 +1,47 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare finder exists queries directly on repository methods. Both attributes allow using a placeholder + * notation of {@code ?0}, {@code ?1} and so on. + * + * @author Mark Paluch + * @since 1.10 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@Query(exists = true) +public @interface ExistsQuery { + + /** + * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the + * method name then. Alias for {@link Query#value}. + * + * @return empty {@link String} by default. + */ + @AliasFor(annotation = Query.class) + String value() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java new file mode 100644 index 0000000000..50db722b15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare index hints for repository query, update and aggregate operations. The index is specified by + * its name. + * + * @author Christoph Strobl + * @since 4.1 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface Hint { + + /** + * The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial + * collection or view. + * + * @return the index name. + */ + String value() default ""; + + /** + * The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial + * collection or view. + * + * @return the index name. + */ + @AliasFor("value") + String indexName() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java index 61890370bf..37109426f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2016 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.QueryAnnotation; /** @@ -38,41 +37,43 @@ /** * Set the maximum time limit in milliseconds for processing operations. * - * @deprecated since 1.10 because of spelling issues. Please use {@link #maxExecutionTimeMs()} instead. - * @return - */ - @AliasFor("maxExecutionTimeMs") - @Deprecated - long maxExcecutionTime() default -1; - - /** - * Set the maximum time limit in milliseconds for processing operations. - * - * @return + * @return {@literal -1} by default. * @since 1.10 */ - @AliasFor("maxExcecutionTime") long maxExecutionTimeMs() default -1; /** - * Only scan the specified number of documents. + * Sets the number of documents to return per batch.
                    + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. * - * @return + * @return {@literal 0 (zero)} by default. + * @since 2.1 */ - long maxScanDocuments() default -1; + int cursorBatchSize() default 0; /** * Add a comment to the query. * - * @return + * @return empty {@link String} by default. */ String comment() default ""; /** - * Using snapshot prevents the cursor from returning a document more than once. + * Set {@link org.springframework.data.mongodb.core.query.Meta.CursorOption} to be used when executing query. + * + * @return never {@literal null}. + * @since 1.10 + */ + org.springframework.data.mongodb.core.query.Meta.CursorOption[] flags() default {}; + + /** + * When set to {@literal true}, aggregation stages can write data to disk. * - * @return + * @return {@literal false} by default. + * @since 3.0 + * @see Aggregation */ - boolean snapshot() default false; + boolean allowDiskUse() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java index 59b013d37a..5a80e90cd2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,13 @@ */ package org.springframework.data.mongodb.repository; -import java.io.Serializable; import java.util.List; import org.springframework.data.domain.Example; import org.springframework.data.domain.Sort; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.data.repository.ListPagingAndSortingRepository; import org.springframework.data.repository.NoRepositoryBean; -import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.QueryByExampleExecutor; /** @@ -31,33 +31,16 @@ * @author Christoph Strobl * @author Thomas Darimont * @author Mark Paluch + * @author Khaled Baklouti */ @NoRepositoryBean -public interface MongoRepository - extends PagingAndSortingRepository, QueryByExampleExecutor { - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(java.lang.Iterable) - */ - List save(Iterable entites); - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll() - */ - List findAll(); - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort) - */ - List findAll(Sort sort); +public interface MongoRepository + extends ListCrudRepository, ListPagingAndSortingRepository, QueryByExampleExecutor { /** - * Inserts the given a given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use - * the returned instance for further operations as the save operation might have changed the entity instance - * completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use the + * returned instance for further operations as the save operation might have changed the entity instance completely. + * Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. * * @param entity must not be {@literal null}. * @return the saved entity @@ -67,7 +50,7 @@ public interface MongoRepository /** * Inserts the given entities. Assumes the given entities to have not been persisted yet and thus will optimize the - * insert over a call to {@link #save(Iterable)}. Prefer using {@link #save(Iterable)} to avoid the usage of store + * insert over a call to {@link #saveAll(Iterable)}. Prefer using {@link #saveAll(Iterable)} to avoid the usage of store * specific API. * * @param entities must not be {@literal null}. @@ -76,14 +59,32 @@ public interface MongoRepository */ List insert(Iterable entities); - /* (non-Javadoc) + /** + * Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link List} is + * returned.
                    + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
                    + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example) */ + @Override List findAll(Example example); - /* (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) + /** + * Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be + * found an empty {@link List} is returned.
                    + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
                    + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, + * org.springframework.data.domain.Sort) */ + @Override List findAll(Example example, Sort sort); - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java index adcc73899a..f0da9965a3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,7 +28,7 @@ * Annotation to be used for disambiguing method parameters that shall be used to trigger geo near queries. By default * those parameters are found without the need for additional annotation if they are the only parameters of the * according type (e.g. {@link Point}, {@code double[]}, {@link Distance}). - * + * * @author Oliver Gierke */ @Documented diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java index 97fb25ff16..fa15ff5af0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,52 +21,147 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.data.mongodb.core.annotation.Collation; /** * Annotation to declare finder queries directly on repository methods. Both attributes allow using a placeholder * notation of {@code ?0}, {@code ?1} and so on. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ +@Collation @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) @Documented @QueryAnnotation +@Hint +@ReadPreference public @interface Query { /** * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the * method name then. - * - * @return + * + * @return empty {@link String} by default. */ String value() default ""; /** * Defines the fields that should be returned for the given query. Note that only these fields will make it into the * domain object returned. - * - * @return + * + * @return empty {@link String} by default. */ String fields() default ""; /** * Returns whether the query defined should be executed as count projection. - * + * * @since 1.3 - * @return + * @return {@literal false} by default. */ boolean count() default false; + /** + * Returns whether the query defined should be executed as exists projection. + * + * @since 1.10 + * @return {@literal false} by default. + */ + boolean exists() default false; + /** * Returns whether the query should delete matching documents. - * + * * @since 1.5 - * @return + * @return {@literal false} by default. */ boolean delete() default false; + + /** + * Defines a default sort order for the given query. NOTE: The so set defaults can be altered / + * overwritten using an explicit {@link org.springframework.data.domain.Sort} argument of the query method. + * + *
                    +	 * 
                    +	 *
                    +	 * 		@Query(sort = "{ age : -1 }") // order by age descending
                    +	 * 		List<Person> findByFirstname(String firstname);
                    +	 * 
                    +	 * 
                    + * + * @return empty {@link String} by default. + * @since 2.1 + */ + String sort() default ""; + + /** + * Defines the collation to apply when executing the query. + * + *
                    +	 * // Fixed value
                    +	 * @Query(collation = "en_US")
                    +	 * List<Entry> findAllByFixedCollation();
                    +	 *
                    +	 * // Fixed value as Document
                    +	 * @Query(collation = "{ 'locale' :  'en_US' }")
                    +	 * List<Entry> findAllByFixedJsonCollation();
                    +	 *
                    +	 * // Dynamic value as String
                    +	 * @Query(collation = "?0")
                    +	 * List<Entry> findAllByDynamicCollation(String collation);
                    +	 *
                    +	 * // Dynamic value as Document
                    +	 * @Query(collation = "{ 'locale' :  ?0 }")
                    +	 * List<Entry> findAllByDynamicJsonCollation(String collation);
                    +	 *
                    +	 * // SpEL expression
                    +	 * @Query(collation = "?#{[0]}")
                    +	 * List<Entry> findAllByDynamicSpElCollation(String collation);
                    +	 * 
                    + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + + /** + * The name of the index to use. {@code @Query(value = "...", hint = "lastname-idx")} can be used as shortcut for: + * + *
                    +	 * @Query(...)
                    +	 * @Hint("lastname-idx")
                    +	 * List<User> findAllByLastname(String collation);
                    +	 * 
                    + * + * @return the index name. + * @since 4.1 + * @see Hint#indexName() + */ + @AliasFor(annotation = Hint.class, attribute = "indexName") + String hint() default ""; + + /** + * The mode of the read preference to use. This attribute + * ({@code @Query(value = "...", readPreference = "secondary")}) is an alias for: + * + *
                    +	 * @Query(...)
                    +	 * @ReadPreference("secondary")
                    +	 * List<User> findAllByLastname(String lastname);
                    +	 * 
                    + * + * @return the index name. + * @since 4.2 + * @see ReadPreference#value() + */ + @AliasFor(annotation = ReadPreference.class, attribute = "value") + String readPreference() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java new file mode 100644 index 0000000000..f8a2b34d11 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java @@ -0,0 +1,98 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.repository.query.ReactiveQueryByExampleExecutor; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.data.repository.reactive.ReactiveSortingRepository; + +/** + * Mongo specific {@link org.springframework.data.repository.Repository} interface with reactive support. + * + * @author Mark Paluch + * @since 2.0 + */ +@NoRepositoryBean +public interface ReactiveMongoRepository + extends ReactiveCrudRepository, ReactiveSortingRepository, ReactiveQueryByExampleExecutor { + + /** + * Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use the + * returned instance for further operations as the save operation might have changed the entity instance completely. + * Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entity must not be {@literal null}. + * @return the saved entity + */ + Mono insert(S entity); + + /** + * Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use the + * returned instance for further operations as the save operation might have changed the entity instance completely. + * Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entities must not be {@literal null}. + * @return the saved entity + */ + Flux insert(Iterable entities); + + /** + * Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use the + * returned instance for further operations as the save operation might have changed the entity instance completely. + * Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entities must not be {@literal null}. + * @return the saved entity + */ + Flux insert(Publisher entities); + + /** + * Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link Flux} is + * returned.
                    + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
                    + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example) + */ + @Override + Flux findAll(Example example); + + /** + * Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be + * found an empty {@link Flux} is returned.
                    + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
                    + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example, + * org.springframework.data.domain.Sort) + */ + @Override + Flux findAll(Example example, Sort sort); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java new file mode 100644 index 0000000000..ddb4a67d1c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to declare read preference for repository and query. + * + * @author Jorge Rodríguez + * @author Christoph Strobl + * @since 4.2 + * @see com.mongodb.ReadPreference + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface ReadPreference { + + /** + * Configure the read preference mode. + * + * @return read preference mode. + */ + String value() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java new file mode 100644 index 0000000000..7bd22059c5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.QueryAnnotation; + +/** + * Annotation to declare an infinite stream using MongoDB's {@link com.mongodb.CursorType#TailableAwait tailable} + * cursors. An infinite stream can only be used with capped collections. Objects are emitted through the stream as data + * is inserted into the collection. An infinite stream can only be used with streams that emit more than one element, + * such as {@link reactor.core.publisher.Flux}. + *

                    + * The stream may become dead, or invalid, if either the query returns no match or the cursor returns the document at + * the "end" of the collection and then the application deletes that document. + *

                    + * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the streams + * will linger and exhaust resources. + * + * @author Mark Paluch + * @see Tailable Cursors + * @since 2.0 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +public @interface Tailable { + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java new file mode 100644 index 0000000000..9bc62aa258 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare update operators directly on repository methods. Both attributes allow using a placeholder + * notation of {@code ?0}, {@code ?1} and so on. The update will be applied to documents matching the either method name + * derived or annotated query, but not to any custom implementation methods. + * + * @author Christoph Strobl + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface Update { + + /** + * Takes a MongoDB JSON string to define the actual update to be executed. + * + * @return the MongoDB JSON string representation of the update. Empty string by default. + * @see #update() + */ + @AliasFor("update") + String value() default ""; + + /** + * Takes a MongoDB JSON string to define the actual update to be executed. + * + * @return the MongoDB JSON string representation of the update. Empty string by default. + * @see https://docs.mongodb.com/manual/tutorial/update-documents/ + */ + @AliasFor("value") + String update() default ""; + + /** + * Takes a MongoDB JSON string representation of an aggregation pipeline to define the update stages to be executed. + *

                    + * This allows to e.g. define update statement that can evaluate conditionals based on a field value, etc. + * + * @return the MongoDB JSON string representation of the update pipeline. Empty array by default. + * @see https://docs.mongodb.com/manual/tutorial/update-documents-with-aggregation-pipeline + */ + String[] pipeline() default {}; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java new file mode 100644 index 0000000000..d49726f724 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.data.mongodb.aot.LazyLoadingProxyAotProcessor; +import org.springframework.data.mongodb.aot.MongoAotPredicates; +import org.springframework.data.repository.config.AotRepositoryContext; +import org.springframework.data.repository.config.RepositoryRegistrationAotProcessor; +import org.springframework.data.util.TypeContributor; +import org.springframework.data.util.TypeUtils; + +/** + * @author Christoph Strobl + */ +public class AotMongoRepositoryPostProcessor extends RepositoryRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + @Override + protected void contribute(AotRepositoryContext repositoryContext, GenerationContext generationContext) { + // do some custom type registration here + super.contribute(repositoryContext, generationContext); + + repositoryContext.getResolvedTypes().stream().filter(MongoAotPredicates.IS_SIMPLE_TYPE.negate()).forEach(type -> { + TypeContributor.contribute(type, it -> true, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type, generationContext); + }); + } + + @Override + protected void contributeType(Class type, GenerationContext generationContext) { + + if (TypeUtils.type(type).isPartOf("org.springframework.data.mongodb", "com.mongodb")) { + return; + } + super.contributeType(type, generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java new file mode 100644 index 0000000000..b1ba6ea3f0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java @@ -0,0 +1,96 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.*; + +import java.util.List; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.aot.MongoAotPredicates; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadata; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.repository.support.ReactiveQuerydslMongoPredicateExecutor; +import org.springframework.data.querydsl.QuerydslUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +class RepositoryRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + List.of(TypeReference.of("org.springframework.data.mongodb.repository.support.SimpleMongoRepository")), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + if (isAopPresent(classLoader)) { + + // required for pushing ReadPreference,... into the default repository implementation + hints.proxies().registerJdkProxy(CrudMethodMetadata.class, // + org.springframework.aop.SpringProxy.class, // + org.springframework.aop.framework.Advised.class, // + org.springframework.core.DecoratingProxy.class); + } + + if (isReactorPresent()) { + + hints.reflection().registerTypes( + List.of( + TypeReference.of("org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository")), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + + if (QuerydslUtils.QUERY_DSL_PRESENT) { + registerQuerydslHints(hints, classLoader); + } + } + + /** + * Register hints for Querydsl integration. + * + * @param hints must not be {@literal null}. + * @param classLoader can be {@literal null}. + * @since 4.0.2 + */ + private static void registerQuerydslHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (isReactorPresent()) { + hints.reflection().registerType(ReactiveQuerydslMongoPredicateExecutor.class, + MemberCategory.INVOKE_PUBLIC_METHODS, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS); + + } + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + hints.reflection().registerType(QuerydslMongoPredicateExecutor.class, MemberCategory.INVOKE_PUBLIC_METHODS, + MemberCategory.INVOKE_DECLARED_CONSTRUCTORS); + } + } + + private static boolean isAopPresent(@Nullable ClassLoader classLoader) { + return ClassUtils.isPresent("org.springframework.aop.Pointcut", classLoader); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java new file mode 100644 index 0000000000..9016519d9b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java @@ -0,0 +1,5 @@ +/** + * Ahead-Of-Time processors for MongoDB repositories. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.repository.aot; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java index 0a43c72b2a..0b2515af52 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,11 +16,12 @@ package org.springframework.data.mongodb.repository.cdi; import java.lang.annotation.Annotation; +import java.util.Optional; import java.util.Set; -import javax.enterprise.context.spi.CreationalContext; -import javax.enterprise.inject.spi.Bean; -import javax.enterprise.inject.spi.BeanManager; +import jakarta.enterprise.context.spi.CreationalContext; +import jakarta.enterprise.inject.spi.Bean; +import jakarta.enterprise.inject.spi.BeanManager; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; @@ -30,7 +31,7 @@ /** * {@link CdiRepositoryBean} to create Mongo repository instances. - * + * * @author Oliver Gierke * @author Mark Paluch */ @@ -40,33 +41,28 @@ public class MongoRepositoryBean extends CdiRepositoryBean { /** * Creates a new {@link MongoRepositoryBean}. - * + * * @param operations must not be {@literal null}. * @param qualifiers must not be {@literal null}. * @param repositoryType must not be {@literal null}. * @param beanManager must not be {@literal null}. * @param detector detector for the custom {@link org.springframework.data.repository.Repository} implementations - * {@link CustomRepositoryImplementationDetector}, can be {@literal null}. + * {@link CustomRepositoryImplementationDetector}, can be {@link Optional#empty()}. */ public MongoRepositoryBean(Bean operations, Set qualifiers, Class repositoryType, - BeanManager beanManager, CustomRepositoryImplementationDetector detector) { + BeanManager beanManager, Optional detector) { super(qualifiers, repositoryType, beanManager, detector); - Assert.notNull(operations); + Assert.notNull(operations, "MongoOperations bean must not be null"); this.operations = operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.cdi.CdiRepositoryBean#create(javax.enterprise.context.spi.CreationalContext, java.lang.Class) - */ @Override - protected T create(CreationalContext creationalContext, Class repositoryType, Object customImplementation) { + protected T create(CreationalContext creationalContext, Class repositoryType) { MongoOperations mongoOperations = getDependencyInstance(operations, MongoOperations.class); - MongoRepositoryFactory factory = new MongoRepositoryFactory(mongoOperations); - return factory.getRepository(repositoryType, customImplementation); + return create(() -> new MongoRepositoryFactory(mongoOperations), repositoryType); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java index 6902326045..c74e9c3f2f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,30 +21,32 @@ import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.Set; -import javax.enterprise.event.Observes; -import javax.enterprise.inject.UnsatisfiedResolutionException; -import javax.enterprise.inject.spi.AfterBeanDiscovery; -import javax.enterprise.inject.spi.Bean; -import javax.enterprise.inject.spi.BeanManager; -import javax.enterprise.inject.spi.ProcessBean; +import jakarta.enterprise.event.Observes; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.enterprise.inject.spi.AfterBeanDiscovery; +import jakarta.enterprise.inject.spi.Bean; +import jakarta.enterprise.inject.spi.BeanManager; +import jakarta.enterprise.inject.spi.ProcessBean; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.repository.cdi.CdiRepositoryBean; import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport; /** * CDI extension to export Mongo repositories. - * + * * @author Oliver Gierke * @author Mark Paluch */ public class MongoRepositoryExtension extends CdiRepositoryExtensionSupport { - private static final Logger LOG = LoggerFactory.getLogger(MongoRepositoryExtension.class); + private static final Log LOG = LogFactory.getLog(MongoRepositoryExtension.class); private final Map, Bean> mongoOperations = new HashMap, Bean>(); @@ -65,7 +67,7 @@ void processBean(@Observes ProcessBean processBean) { } // Store the EntityManager bean using its qualifiers. - mongoOperations.put(new HashSet(bean.getQualifiers()), (Bean) bean); + mongoOperations.put(new HashSet<>(bean.getQualifiers()), (Bean) bean); } } } @@ -92,7 +94,7 @@ void afterBeanDiscovery(@Observes AfterBeanDiscovery afterBeanDiscovery, BeanMan /** * Creates a {@link CdiRepositoryBean} for the repository of the given type. - * + * * @param the type of the repository. * @param repositoryType the class representing the repository. * @param qualifiers the qualifiers to be applied to the bean. @@ -112,6 +114,6 @@ private CdiRepositoryBean createRepositoryBean(Class repositoryType, S // Construct and return the repository bean. return new MongoRepositoryBean(mongoOperations, qualifiers, repositoryType, beanManager, - getCustomImplementationDetector()); + Optional.of(getCustomImplementationDetector())); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/package-info.java index ca8bb77acd..a2cbf659dd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/package-info.java @@ -1,5 +1,6 @@ /** * CDI support for MongoDB specific repository implementation. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.repository.cdi; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java index 7873693ec9..b6f693e16d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import java.lang.annotation.Target; import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Import; import org.springframework.data.mongodb.core.MongoTemplate; @@ -34,7 +35,7 @@ /** * Annotation to activate MongoDB repositories. If no base package is configured through either {@link #value()}, * {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of annotated class. - * + * * @author Oliver Gierke * @author Thomas Darimont */ @@ -47,7 +48,8 @@ /** * Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.: - * {@code @EnableMongoRepositories("org.my.pkg")} instead of {@code @EnableMongoRepositories(basePackages="org.my.pkg")}. + * {@code @EnableMongoRepositories("org.my.pkg")} instead of + * {@code @EnableMongoRepositories(basePackages="org.my.pkg")}. */ String[] value() default {}; @@ -79,60 +81,69 @@ * Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning * for {@code PersonRepositoryImpl}. - * - * @return + * + * @return {@literal Impl} by default. */ String repositoryImplementationPostfix() default "Impl"; /** * Configures the location of where to find the Spring Data named queries properties file. Will default to * {@code META-INFO/mongo-named-queries.properties}. - * - * @return + * + * @return empty {@link String} by default. */ String namedQueriesLocation() default ""; /** * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to * {@link Key#CREATE_IF_NOT_FOUND}. - * - * @return + * + * @return {@link Key#CREATE_IF_NOT_FOUND} by default. */ Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND; /** * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to * {@link MongoRepositoryFactoryBean}. - * - * @return + * + * @return {@link MongoRepositoryFactoryBean} by default. */ Class repositoryFactoryBeanClass() default MongoRepositoryFactoryBean.class; /** * Configure the repository base class to be used to create repository proxies for this particular configuration. - * - * @return + * + * @return {@link DefaultRepositoryBaseClass} by default. * @since 1.8 */ Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; + /** + * Configure a specific {@link BeanNameGenerator} to be used when creating the repository beans. + * @return the {@link BeanNameGenerator} to be used or the base {@link BeanNameGenerator} interface to indicate context default. + * @since 4.4 + */ + Class nameGenerator() default BeanNameGenerator.class; + /** * Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected. - * - * @return + * + * @return {@literal mongoTemplate} by default. */ String mongoTemplateRef() default "mongoTemplate"; /** * Whether to automatically create indexes for query methods defined in the repository interface. - * - * @return + * + * @return {@literal false} by default. */ boolean createIndexesForQueryMethods() default false; /** * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the * repositories infrastructure. + * + * @return {@literal false} by default. */ boolean considerNestedRepositories() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java new file mode 100644 index 0000000000..9973bc7bdf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java @@ -0,0 +1,151 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.support.BeanNameGenerator; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Import; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.config.DefaultRepositoryBaseClass; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.QueryLookupStrategy.Key; + +/** + * Annotation to activate reactive MongoDB repositories. If no base package is configured through either + * {@link #value()}, {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of + * annotated class. + * + * @author Mark Paluch + * @since 2.0 + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Inherited +@Import(ReactiveMongoRepositoriesRegistrar.class) +public @interface EnableReactiveMongoRepositories { + + /** + * Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.: + * {@code @EnableReactiveMongoRepositories("org.my.pkg")} instead of + * {@code @EnableReactiveMongoRepositories(basePackages="org.my.pkg")}. + */ + String[] value() default {}; + + /** + * Base packages to scan for annotated components. {@link #value()} is an alias for (and mutually exclusive with) this + * attribute. Use {@link #basePackageClasses()} for a type-safe alternative to String-based package names. + */ + String[] basePackages() default {}; + + /** + * Type-safe alternative to {@link #basePackages()} for specifying the packages to scan for annotated components. The + * package of each class specified will be scanned. Consider creating a special no-op marker class or interface in + * each package that serves no purpose other than being referenced by this attribute. + */ + Class[] basePackageClasses() default {}; + + /** + * Specifies which types are eligible for component scanning. Further narrows the set of candidate components from + * everything in {@link #basePackages()} to everything in the base packages that matches the given filter or filters. + */ + Filter[] includeFilters() default {}; + + /** + * Specifies which types are not eligible for component scanning. + */ + Filter[] excludeFilters() default {}; + + /** + * Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So + * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning + * for {@code PersonRepositoryImpl}. + * + * @return {@literal Impl} by default. + */ + String repositoryImplementationPostfix() default "Impl"; + + /** + * Configures the location of where to find the Spring Data named queries properties file. Will default to + * {@code META-INF/mongo-named-queries.properties}. + * + * @return empty {@link String} by default. + */ + String namedQueriesLocation() default ""; + + /** + * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to + * {@link Key#CREATE_IF_NOT_FOUND}. + * + * @return {@link Key#CREATE_IF_NOT_FOUND} by default. + */ + Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND; + + /** + * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to + * {@link MongoRepositoryFactoryBean}. + * + * @return {@link ReactiveMongoRepositoryFactoryBean} by default. + */ + Class repositoryFactoryBeanClass() default ReactiveMongoRepositoryFactoryBean.class; + + /** + * Configure the repository base class to be used to create repository proxies for this particular configuration. + * + * @return {@link DefaultRepositoryBaseClass} by default. + */ + Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; + + /** + * Configure a specific {@link BeanNameGenerator} to be used when creating the repository beans. + * @return the {@link BeanNameGenerator} to be used or the base {@link BeanNameGenerator} interface to indicate context default. + * @since 4.4 + */ + Class nameGenerator() default BeanNameGenerator.class; + + /** + * Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected. + * + * @return {@literal reactiveMongoTemplate} by default. + */ + String reactiveMongoTemplateRef() default "reactiveMongoTemplate"; + + /** + * Whether to automatically create indexes for query methods defined in the repository interface. + * + * @return {@literal false} by default. + */ + boolean createIndexesForQueryMethods() default false; + + /** + * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the + * repositories infrastructure. + * + * @return {@literal false} by default. + */ + boolean considerNestedRepositories() default false; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java index 96b8c317ab..508ca16f50 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,24 +23,16 @@ /** * Mongo-specific {@link ImportBeanDefinitionRegistrar}. - * + * * @author Oliver Gierke */ class MongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoRepositories.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension() - */ @Override protected RepositoryConfigurationExtension getExtension() { return new MongoRepositoryConfigurationExtension(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java index 6e059b8ae6..2d852a0e07 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,15 +22,11 @@ /** * {@link NamespaceHandler} to register repository configuration. - * + * * @author Oliver Gierke */ public class MongoRepositoryConfigNamespaceHandler extends MongoNamespaceHandler { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.MongoNamespaceHandler#init() - */ @Override public void init() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java index 2279379787..9db7be0069 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,96 +19,66 @@ import java.util.Collection; import java.util.Collections; -import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.beans.factory.aot.BeanRegistrationAotProcessor; import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.core.annotation.AnnotationAttributes; import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mongodb.config.BeanNames; +import org.springframework.data.mongodb.repository.aot.AotMongoRepositoryPostProcessor; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; -import org.springframework.data.repository.config.RepositoryConfigurationExtension; import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; -import org.springframework.data.repository.config.RepositoryConfigurationSource; import org.springframework.data.repository.config.XmlRepositoryConfigurationSource; +import org.springframework.data.repository.core.RepositoryMetadata; + import org.w3c.dom.Element; /** - * {@link RepositoryConfigurationExtension} for MongoDB. - * + * {@link org.springframework.data.repository.config.RepositoryConfigurationExtension} for MongoDB. + * * @author Oliver Gierke + * @author Mark Paluch */ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport { private static final String MONGO_TEMPLATE_REF = "mongo-template-ref"; private static final String CREATE_QUERY_INDEXES = "create-query-indexes"; - private boolean fallbackMappingContextCreated = false; - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName() - */ @Override public String getModuleName() { return "MongoDB"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix() - */ @Override - protected String getModulePrefix() { + public String getModulePrefix() { return "mongo"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryClassName() - */ - public String getRepositoryFactoryClassName() { + public String getRepositoryFactoryBeanClassName() { return MongoRepositoryFactoryBean.class.getName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingAnnotations() - */ @Override - protected Collection> getIdentifyingAnnotations() { - return Collections.> singleton(Document.class); + public String getModuleIdentifier() { + return getModulePrefix(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingTypes() - */ @Override - protected Collection> getIdentifyingTypes() { - return Collections.> singleton(MongoRepository.class); + public Collection> getIdentifyingAnnotations() { + return Collections.singleton(Document.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.RepositoryConfigurationSource) - */ @Override - public void postProcess(BeanDefinitionBuilder builder, RepositoryConfigurationSource source) { + public Class getRepositoryAotProcessor() { + return AotMongoRepositoryPostProcessor.class; + } - if (fallbackMappingContextCreated) { - builder.addPropertyReference("mappingContext", BeanNames.MAPPING_CONTEXT_BEAN_NAME); - } + @Override + protected Collection> getIdentifyingTypes() { + return Collections.singleton(MongoRepository.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) { @@ -118,10 +88,6 @@ public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfiguratio ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods"); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) { @@ -131,22 +97,8 @@ public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfi builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource) - */ @Override - public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) { - - super.registerBeansForRoot(registry, configurationSource); - - if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) { - - RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class); - definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE); - definition.setSource(configurationSource.getSource()); - - registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition); - } + protected boolean useRepositoryConfiguration(RepositoryMetadata metadata) { + return !metadata.isReactiveRepository(); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java new file mode 100644 index 0000000000..2c8384be93 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java @@ -0,0 +1,42 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import java.lang.annotation.Annotation; + +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport; +import org.springframework.data.repository.config.RepositoryConfigurationExtension; + +/** + * Mongo-specific {@link ImportBeanDefinitionRegistrar}. + * + * @author Mark Paluch + * @since 2.0 + */ +class ReactiveMongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { + + @Override + protected Class getAnnotation() { + return EnableReactiveMongoRepositories.class; + } + + @Override + protected RepositoryConfigurationExtension getExtension() { + return new ReactiveMongoRepositoryConfigurationExtension(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java new file mode 100644 index 0000000000..817cc397c2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java @@ -0,0 +1,81 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import java.util.Collection; +import java.util.Collections; + +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.core.annotation.AnnotationAttributes; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfigurationExtension; +import org.springframework.data.repository.config.XmlRepositoryConfigurationSource; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.w3c.dom.Element; + +/** + * Reactive {@link RepositoryConfigurationExtension} for MongoDB. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Oliver Gierke + * @since 2.0 + */ +public class ReactiveMongoRepositoryConfigurationExtension extends MongoRepositoryConfigurationExtension { + + private static final String MONGO_TEMPLATE_REF = "reactive-mongo-template-ref"; + private static final String CREATE_QUERY_INDEXES = "create-query-indexes"; + + @Override + public String getModuleName() { + return "Reactive MongoDB"; + } + + public String getRepositoryFactoryClassName() { + return ReactiveMongoRepositoryFactoryBean.class.getName(); + } + + @Override + protected Collection> getIdentifyingTypes() { + return Collections.singleton(ReactiveMongoRepository.class); + } + + @Override + public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) { + + Element element = config.getElement(); + + ParsingUtils.setPropertyReference(builder, element, MONGO_TEMPLATE_REF, "reactiveMongoOperations"); + ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods"); + } + + @Override + public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) { + + AnnotationAttributes attributes = config.getAttributes(); + + builder.addPropertyReference("reactiveMongoOperations", attributes.getString("reactiveMongoTemplateRef")); + builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); + } + + @Override + protected boolean useRepositoryConfiguration(RepositoryMetadata metadata) { + return metadata.isReactiveRepository(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/package-info.java index 30e2aeb82a..d0d9b07081 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/package-info.java @@ -1,5 +1,6 @@ /** * Support infrastructure for the configuration of MongoDB specific repositories. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.repository.config; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/package-info.java index 50507d7b44..8deddfe939 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/package-info.java @@ -1,5 +1,6 @@ /** * MongoDB specific repository implementation. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.repository; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java index d6b65be255..4d0d604a27 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,116 +15,229 @@ */ package org.springframework.data.mongodb.repository.query; -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiators; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueEvaluationContextProvider; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.BasicUpdate; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.MongoQueryExecution.CollectionExecution; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Update; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution; -import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingConverter; -import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingExecution; -import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SingleEntityExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SlicedExecution; -import org.springframework.data.mongodb.repository.query.MongoQueryExecution.StreamExecution; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.UpdateExecution; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.MongoDatabase; /** * Base class for {@link RepositoryQuery} implementations for Mongo. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Jorge Rodríguez */ public abstract class AbstractMongoQuery implements RepositoryQuery { private final MongoQueryMethod method; private final MongoOperations operations; - private final EntityInstantiators instantiators; + private final ExecutableFind executableFind; + private final ExecutableUpdate executableUpdate; + private final Lazy codec = Lazy + .of(() -> new ParameterBindingDocumentCodec(getCodecRegistry())); + private final ValueExpressionDelegate valueExpressionDelegate; + private final ValueEvaluationContextProvider valueEvaluationContextProvider; /** * Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}. - * + * * @param method must not be {@literal null}. * @param operations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated use the constructor version with {@link ValueExpressionDelegate} */ - public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations) { + @Deprecated(since = "4.4.0") + public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations, ExpressionParser expressionParser, + QueryMethodEvaluationContextProvider evaluationContextProvider) { - Assert.notNull(operations, "MongoOperations must not be null!"); - Assert.notNull(method, "MongoQueryMethod must not be null!"); + Assert.notNull(operations, "MongoOperations must not be null"); + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(expressionParser, "SpelExpressionParser must not be null"); + Assert.notNull(evaluationContextProvider, "QueryMethodEvaluationContextProvider must not be null"); this.method = method; this.operations = operations; - this.instantiators = new EntityInstantiators(); + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.executableFind = operations.query(type); + this.executableUpdate = operations.update(type); + this.valueExpressionDelegate = new ValueExpressionDelegate(new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), evaluationContextProvider.getEvaluationContextProvider()), ValueExpressionParser.create(() -> expressionParser)); + this.valueEvaluationContextProvider = valueExpressionDelegate.createValueContextProvider(method.getParameters()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#getQueryMethod() + /** + * Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param delegate must not be {@literal null} + * @since 4.4.0 */ + public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations, ValueExpressionDelegate delegate) { + + Assert.notNull(operations, "MongoOperations must not be null"); + Assert.notNull(method, "MongoQueryMethod must not be null"); + + this.method = method; + this.operations = operations; + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.executableFind = operations.query(type); + this.executableUpdate = operations.update(type); + this.valueExpressionDelegate = delegate; + this.valueEvaluationContextProvider = delegate.createValueContextProvider(method.getParameters()); + } + + @Override public MongoQueryMethod getQueryMethod() { return method; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#execute(java.lang.Object[]) - */ + @Override public Object execute(Object[] parameters) { - MongoParameterAccessor accessor = new MongoParametersParameterAccessor(method, parameters); - Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), accessor)); - - applyQueryMetaAttributesWhenPresent(query); + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(operations.getConverter(), + new MongoParametersParameterAccessor(method, parameters)); ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor); - String collection = method.getEntityInformation().getCollectionName(); + Class typeToRead = processor.getReturnedType().getTypeToRead(); + + return processor.processResult(doExecute(method, processor, accessor, typeToRead)); + } + + /** + * Execute the {@link RepositoryQuery} of the given method with the parameters provided by the + * {@link ConvertingParameterAccessor accessor} + * + * @param method the {@link MongoQueryMethod} invoked. Never {@literal null}. + * @param processor {@link ResultProcessor} for post procession. Never {@literal null}. + * @param accessor for providing invocation arguments. Never {@literal null}. + * @param typeToRead the desired component target type. Can be {@literal null}. + */ + @Nullable + protected Object doExecute(MongoQueryMethod method, ResultProcessor processor, ConvertingParameterAccessor accessor, + @Nullable Class typeToRead) { + + Query query = createQuery(accessor); + + applyQueryMetaAttributesWhenPresent(query); + query = applyAnnotatedDefaultSortIfPresent(query); + query = applyAnnotatedCollationIfPresent(query, accessor); + query = applyHintIfPresent(query); + query = applyAnnotatedReadPreferenceIfPresent(query); - MongoQueryExecution execution = getExecution(query, accessor, - new ResultProcessingConverter(processor, operations, instantiators)); + FindWithQuery find = typeToRead == null // + ? executableFind // + : executableFind.as(typeToRead); - return execution.execute(query, processor.getReturnedType().getDomainType(), collection); + return getExecution(accessor, find).execute(query); } /** - * Returns the execution instance to use. - * + * If present apply the {@link com.mongodb.ReadPreference} from the {@link org.springframework.data.mongodb.repository.ReadPreference} annotation. + * * @param query must not be {@literal null}. - * @param parameters must not be {@literal null}. - * @param accessor must not be {@literal null}. - * @return + * @return never {@literal null}. + * @since 4.2 */ - private MongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor, - Converter resultProcessing) { + private Query applyAnnotatedReadPreferenceIfPresent(Query query) { - if (method.isStreamQuery()) { - return new StreamExecution(operations, resultProcessing); + if (!method.hasAnnotatedReadPreference()) { + return query; } - return new ResultProcessingExecution(getExecutionToWrap(query, accessor), resultProcessing); + return query.withReadPreference(com.mongodb.ReadPreference.valueOf(method.getAnnotatedReadPreference())); } - private MongoQueryExecution getExecutionToWrap(Query query, MongoParameterAccessor accessor) { + private MongoQueryExecution getExecution(ConvertingParameterAccessor accessor, FindWithQuery operation) { if (isDeleteQuery()) { return new DeleteExecution(operations, method); - } else if (method.isGeoNearQuery() && method.isPageQuery()) { - return new PagingGeoNearExecution(operations, accessor, method.getReturnType(), this); + } + + if (method.isModifyingQuery()) { + if (isLimiting()) { + throw new IllegalStateException( + String.format("Update method must not be limiting; Offending method: %s", method)); + } + return new UpdateExecution(executableUpdate, method, () -> createUpdate(accessor), accessor); + } + + if (method.isGeoNearQuery() && method.isPageQuery()) { + return new PagingGeoNearExecution(operation, method, accessor, this); } else if (method.isGeoNearQuery()) { - return new GeoNearExecution(operations, accessor, method.getReturnType()); + return new GeoNearExecution(operation, method, accessor); } else if (method.isSliceQuery()) { - return new SlicedExecution(operations, accessor.getPageable()); + return new SlicedExecution(operation, accessor.getPageable()); + } else if (method.isStreamQuery()) { + return q -> operation.matching(q).stream(); } else if (method.isCollectionQuery()) { - return new CollectionExecution(operations, accessor.getPageable()); + return q -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())).all(); + } else if (method.isScrollQuery()) { + return q -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())) + .scroll(accessor.getScrollPosition()); } else if (method.isPageQuery()) { - return new PagedExecution(operations, accessor.getPageable()); + return new PagedExecution(operation, accessor.getPageable()); + } else if (isCountQuery()) { + return q -> operation.matching(q).count(); + } else if (isExistsQuery()) { + return q -> operation.matching(q).exists(); } else { - return new SingleEntityExecution(operations, isCountQuery()); + return q -> { + TerminatingFind find = operation.matching(q); + return isLimiting() ? find.firstValue() : find.oneValue(); + }; } } @@ -137,11 +250,59 @@ Query applyQueryMetaAttributesWhenPresent(Query query) { return query; } + /** + * Add a default sort derived from {@link org.springframework.data.mongodb.repository.Query#sort()} to the given + * {@link Query} if present. + * + * @param query the {@link Query} to potentially apply the sort to. + * @return the query with potential default sort applied. + * @since 2.1 + */ + Query applyAnnotatedDefaultSortIfPresent(Query query) { + + if (!method.hasAnnotatedSort()) { + return query; + } + + return QueryUtils.decorateSort(query, Document.parse(method.getAnnotatedSort())); + } + + /** + * If present apply a {@link org.springframework.data.mongodb.core.query.Collation} derived from the + * {@link org.springframework.data.repository.query.QueryMethod} the given {@link Query}. + * + * @param query must not be {@literal null}. + * @param accessor the {@link ParameterAccessor} used to obtain parameter placeholder replacement values. + * @return + * @since 2.2 + */ + Query applyAnnotatedCollationIfPresent(Query query, ConvertingParameterAccessor accessor) { + + return QueryUtils.applyCollation(query, method.hasAnnotatedCollation() ? method.getAnnotatedCollation() : null, + accessor, getExpressionEvaluatorFor(accessor)); + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + Query applyHintIfPresent(Query query) { + + if (!method.hasAnnotatedHint()) { + return query; + } + + return query.withHint(method.getAnnotatedHint()); + } + /** * Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to * {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be * triggered. - * + * * @param accessor must not be {@literal null}. * @return */ @@ -149,9 +310,129 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { return applyQueryMetaAttributesWhenPresent(createQuery(accessor)); } + /** + * Retrieves the {@link UpdateDefinition update} from the given + * {@link org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getUpdate() accessor} or creates + * one via by parsing the annotated statement extracted from {@link Update}. + * + * @param accessor never {@literal null}. + * @return the computed {@link UpdateDefinition}. + * @throws IllegalStateException if no update could be found. + * @since 3.4 + */ + protected UpdateDefinition createUpdate(ConvertingParameterAccessor accessor) { + + if (accessor.getUpdate() != null) { + return accessor.getUpdate(); + } + + if (method.hasAnnotatedUpdate()) { + + Update updateSource = method.getUpdateSource(); + if (StringUtils.hasText(updateSource.update())) { + return new BasicUpdate(bindParameters(updateSource.update(), accessor)); + } + if (!ObjectUtils.isEmpty(updateSource.pipeline())) { + return AggregationUpdate.from(parseAggregationPipeline(updateSource.pipeline(), accessor)); + } + } + + throw new IllegalStateException(String.format("No Update provided for method %s.", method)); + } + + /** + * Parse the given aggregation pipeline stages applying values to placeholders to compute the actual list of + * {@link AggregationOperation operations}. + * + * @param sourcePipeline must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the parsed aggregation pipeline. + * @since 3.4 + */ + protected List parseAggregationPipeline(String[] sourcePipeline, + ConvertingParameterAccessor accessor) { + + List stages = new ArrayList<>(sourcePipeline.length); + for (String source : sourcePipeline) { + stages.add(computePipelineStage(source, accessor)); + } + return stages; + } + + private AggregationOperation computePipelineStage(String source, ConvertingParameterAccessor accessor) { + return new StringAggregationOperation(source, getQueryMethod().getDomainClass(), + (it) -> bindParameters(it, accessor)); + } + + protected Document decode(String source, ParameterBindingContext bindingContext) { + return getParameterBindingCodec().decode(source, bindingContext); + } + + private Document bindParameters(String source, ConvertingParameterAccessor accessor) { + return decode(source, prepareBindingContext(source, accessor)); + } + + /** + * Create the {@link ParameterBindingContext binding context} used for SpEL evaluation. + * + * @param source the JSON source. + * @param accessor value provider for parameter binding. + * @return never {@literal null}. + * @since 3.4 + */ + protected ParameterBindingContext prepareBindingContext(String source, ConvertingParameterAccessor accessor) { + + ValueExpressionEvaluator evaluator = getExpressionEvaluatorFor(accessor); + return new ParameterBindingContext(accessor::getBindableValue, evaluator); + } + + /** + * Obtain the {@link ParameterBindingDocumentCodec} used for parsing JSON expressions. + * + * @return never {@literal null}. + * @since 3.4 + */ + protected ParameterBindingDocumentCodec getParameterBindingCodec() { + return codec.get(); + } + + /** + * Obtain a the {@link EvaluationContext} suitable to evaluate expressions backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the {@link SpELExpressionEvaluator}. + * @since 2.4 + */ + protected SpELExpressionEvaluator getSpELExpressionEvaluatorFor(ExpressionDependencies dependencies, + ConvertingParameterAccessor accessor) { + + return new DefaultSpELExpressionEvaluator(new SpelExpressionParser(), valueEvaluationContextProvider.getEvaluationContext(accessor.getValues(), dependencies).getEvaluationContext()); + } + + /** + * Obtain a {@link ValueExpressionEvaluator} suitable to evaluate expressions. + * + * @param accessor must not be {@literal null}. + * @return the {@link ValueExpressionEvaluator}. + * @since 4.4.0 + */ + protected ValueExpressionEvaluator getExpressionEvaluatorFor(MongoParameterAccessor accessor) { + return new ValueExpressionDelegateValueExpressionEvaluator(valueExpressionDelegate, (ValueExpression expression) -> + valueEvaluationContextProvider.getEvaluationContext(accessor.getValues(), expression.getExpressionDependencies())); + } + + /** + * @return the {@link CodecRegistry} used. + * @since 2.4 + */ + protected CodecRegistry getCodecRegistry() { + return operations.execute(MongoDatabase::getCodecRegistry); + } + /** * Creates a {@link Query} instance using the given {@link ParameterAccessor} - * + * * @param accessor must not be {@literal null}. * @return */ @@ -159,16 +440,33 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { /** * Returns whether the query should get a count projection applied. - * + * * @return */ protected abstract boolean isCountQuery(); + /** + * Returns whether the query should get an exists projection applied. + * + * @return + * @since 1.10 + */ + protected abstract boolean isExistsQuery(); + /** * Return weather the query should delete matching documents. - * + * * @return * @since 1.5 */ protected abstract boolean isDeleteQuery(); + + /** + * Return whether the query has an explicit limit set. + * + * @return + * @since 2.0.4 + */ + protected abstract boolean isLimiting(); + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java new file mode 100644 index 0000000000..a5754a4e46 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java @@ -0,0 +1,568 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.reactivestreams.Publisher; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ReactiveValueEvaluationContextProvider; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueEvaluationContextProvider; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.EntityInstantiators; +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection; +import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.BasicUpdate; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Update; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingConverter; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.UpdateExecution; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoClientSettings; + +/** + * Base class for reactive {@link RepositoryQuery} implementations for MongoDB. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Jorge Rodríguez + * @since 2.0 + */ +public abstract class AbstractReactiveMongoQuery implements RepositoryQuery { + + private final ReactiveMongoQueryMethod method; + private final ReactiveMongoOperations operations; + private final EntityInstantiators instantiators; + private final FindWithProjection findOperationWithProjection; + private final ReactiveUpdate updateOps; + private final ValueExpressionDelegate valueExpressionDelegate; + private final ReactiveValueEvaluationContextProvider valueEvaluationContextProvider; + + /** + * Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and + * {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated use the constructor version with {@link ValueExpressionDelegate} + */ + @Deprecated(since = "4.4.0") + public AbstractReactiveMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations, + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(operations, "ReactiveMongoOperations must not be null"); + Assert.notNull(expressionParser, "SpelExpressionParser must not be null"); + Assert.notNull(evaluationContextProvider, "ReactiveEvaluationContextExtension must not be null"); + + this.method = method; + this.operations = operations; + this.instantiators = new EntityInstantiators(); + this.valueExpressionDelegate = new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), + evaluationContextProvider.getEvaluationContextProvider()), + ValueExpressionParser.create(() -> expressionParser)); + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.findOperationWithProjection = operations.query(type); + this.updateOps = operations.update(type); + ValueEvaluationContextProvider valueContextProvider = valueExpressionDelegate + .createValueContextProvider(method.getParameters()); + Assert.isInstanceOf(ReactiveValueEvaluationContextProvider.class, valueContextProvider, + "ValueEvaluationContextProvider must be reactive"); + this.valueEvaluationContextProvider = (ReactiveValueEvaluationContextProvider) valueContextProvider; + } + + /** + * Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and + * {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public AbstractReactiveMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations, + ValueExpressionDelegate delegate) { + + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(operations, "ReactiveMongoOperations must not be null"); + Assert.notNull(delegate, "ValueExpressionDelegate must not be null"); + + this.method = method; + this.operations = operations; + this.instantiators = new EntityInstantiators(); + this.valueExpressionDelegate = delegate; + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.findOperationWithProjection = operations.query(type); + this.updateOps = operations.update(type); + ValueEvaluationContextProvider valueContextProvider = valueExpressionDelegate + .createValueContextProvider(method.getParameters()); + Assert.isInstanceOf(ReactiveValueEvaluationContextProvider.class, valueContextProvider, + "ValueEvaluationContextProvider must be reactive"); + this.valueEvaluationContextProvider = (ReactiveValueEvaluationContextProvider) valueContextProvider; + } + + @Override + public MongoQueryMethod getQueryMethod() { + return method; + } + + @Override + public Publisher execute(Object[] parameters) { + + return method.hasReactiveWrapperParameter() ? executeDeferred(parameters) + : execute(new MongoParametersParameterAccessor(method, parameters)); + } + + @SuppressWarnings("unchecked") + private Publisher executeDeferred(Object[] parameters) { + + ReactiveMongoParameterAccessor parameterAccessor = new ReactiveMongoParameterAccessor(method, parameters); + + return parameterAccessor.resolveParameters().flatMapMany(this::execute); + } + + private Publisher execute(MongoParameterAccessor parameterAccessor) { + + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(operations.getConverter(), + parameterAccessor); + + ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor); + Class typeToRead = processor.getReturnedType().getTypeToRead(); + + return doExecute(method, processor, accessor, typeToRead); + } + + /** + * Execute the {@link RepositoryQuery} of the given method with the parameters provided by the + * {@link ConvertingParameterAccessor accessor} + * + * @param method the {@link ReactiveMongoQueryMethod} invoked. Never {@literal null}. + * @param processor {@link ResultProcessor} for post procession. Never {@literal null}. + * @param accessor for providing invocation arguments. Never {@literal null}. + * @param typeToRead the desired component target type. Can be {@literal null}. + */ + protected Publisher doExecute(ReactiveMongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, @Nullable Class typeToRead) { + + return createQuery(accessor).flatMapMany(it -> { + + Query query = it; + applyQueryMetaAttributesWhenPresent(query); + query = applyAnnotatedDefaultSortIfPresent(query); + query = applyAnnotatedCollationIfPresent(query, accessor); + query = applyHintIfPresent(query); + query = applyAnnotatedReadPreferenceIfPresent(query); + + FindWithQuery find = typeToRead == null // + ? findOperationWithProjection // + : findOperationWithProjection.as(typeToRead); + + String collection = method.getEntityInformation().getCollectionName(); + + ReactiveMongoQueryExecution execution = getExecution(accessor, + getResultProcessing(processor), find); + return execution.execute(query, processor.getReturnedType().getDomainType(), collection); + }); + } + + ResultProcessingConverter getResultProcessing(ResultProcessor processor) { + return new ResultProcessingConverter(processor, operations, instantiators); + } + + /** + * Returns the execution instance to use. + * + * @param accessor must not be {@literal null}. + * @param resultProcessing must not be {@literal null}. + * @return + */ + private ReactiveMongoQueryExecution getExecution(MongoParameterAccessor accessor, + Converter resultProcessing, FindWithQuery operation) { + return new ResultProcessingExecution(getExecutionToWrap(accessor, operation), resultProcessing); + } + + private ReactiveMongoQueryExecution getExecutionToWrap(MongoParameterAccessor accessor, FindWithQuery operation) { + + if (isDeleteQuery()) { + return new DeleteExecution(operations, method); + } else if (method.isModifyingQuery()) { + + if (isLimiting()) { + throw new IllegalStateException( + String.format("Update method must not be limiting; Offending method: %s", method)); + } + + return new UpdateExecution(updateOps, method, accessor, createUpdate(accessor)); + } else if (method.isGeoNearQuery()) { + return new GeoNearExecution(operations, accessor, method.getReturnType()); + } else if (isTailable(method)) { + return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).tail(); + } else if (method.isCollectionQuery()) { + return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).all(); + } else if (method.isScrollQuery()) { + return (q, t, c) -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())) + .scroll(accessor.getScrollPosition()); + } else if (isCountQuery()) { + return (q, t, c) -> operation.matching(q).count(); + } else if (isExistsQuery()) { + return (q, t, c) -> operation.matching(q).exists(); + } else { + return (q, t, c) -> { + + TerminatingFind find = operation.matching(q); + + if (isCountQuery()) { + return find.count(); + } + + return isLimiting() ? find.first() : find.one(); + }; + } + } + + private boolean isTailable(MongoQueryMethod method) { + return method.getTailableAnnotation() != null; + } + + Query applyQueryMetaAttributesWhenPresent(Query query) { + + if (method.hasQueryMetaAttributes()) { + query.setMeta(method.getQueryMetaAttributes()); + } + + return query; + } + + /** + * Add a default sort derived from {@link org.springframework.data.mongodb.repository.Query#sort()} to the given + * {@link Query} if present. + * + * @param query the {@link Query} to potentially apply the sort to. + * @return the query with potential default sort applied. + * @since 2.1 + */ + Query applyAnnotatedDefaultSortIfPresent(Query query) { + + if (!method.hasAnnotatedSort()) { + return query; + } + + return QueryUtils.decorateSort(query, Document.parse(method.getAnnotatedSort())); + } + + /** + * If present apply a {@link org.springframework.data.mongodb.core.query.Collation} derived from the + * {@link org.springframework.data.repository.query.QueryMethod} the given {@link Query}. + * + * @param query must not be {@literal null}. + * @param accessor the {@link ParameterAccessor} used to obtain parameter placeholder replacement values. + * @return + * @since 2.2 + */ + Query applyAnnotatedCollationIfPresent(Query query, ConvertingParameterAccessor accessor) { + + return QueryUtils.applyCollation(query, method.hasAnnotatedCollation() ? method.getAnnotatedCollation() : null, + accessor, getValueExpressionEvaluator(accessor)); + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + Query applyHintIfPresent(Query query) { + + if (!method.hasAnnotatedHint()) { + return query; + } + + return query.withHint(method.getAnnotatedHint()); + } + + /** + * If present apply the {@link com.mongodb.ReadPreference} from the + * {@link org.springframework.data.mongodb.repository.ReadPreference} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + private Query applyAnnotatedReadPreferenceIfPresent(Query query) { + + if (!method.hasAnnotatedReadPreference()) { + return query; + } + + return query.withReadPreference(com.mongodb.ReadPreference.valueOf(method.getAnnotatedReadPreference())); + } + + /** + * Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to + * {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be + * triggered. + * + * @param accessor must not be {@literal null}. + * @return + */ + protected Mono createCountQuery(ConvertingParameterAccessor accessor) { + return createQuery(accessor).map(this::applyQueryMetaAttributesWhenPresent); + } + + /** + * Retrieves the {@link UpdateDefinition update} from the given + * {@link org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getUpdate() accessor} or creates + * one via by parsing the annotated statement extracted from {@link Update}. + * + * @param accessor never {@literal null}. + * @return the computed {@link UpdateDefinition}. + * @throws IllegalStateException if no update could be found. + * @since 3.4 + */ + protected Mono createUpdate(MongoParameterAccessor accessor) { + + if (accessor.getUpdate() != null) { + return Mono.just(accessor.getUpdate()); + } + + if (method.hasAnnotatedUpdate()) { + Update updateSource = method.getUpdateSource(); + if (StringUtils.hasText(updateSource.update())) { + + String updateJson = updateSource.update(); + return getParameterBindingCodec() // + .flatMap(codec -> expressionEvaluator(updateJson, accessor, codec) // + .map(evaluator -> decode(evaluator, updateJson, accessor, codec))) // + .map(BasicUpdate::fromDocument); + } + if (!ObjectUtils.isEmpty(updateSource.pipeline())) { + return parseAggregationPipeline(updateSource.pipeline(), accessor).map(AggregationUpdate::from); + } + } + + throw new IllegalStateException(String.format("No Update provided for method %s.", method)); + } + + /** + * Parse the given aggregation pipeline stages applying values to placeholders to compute the actual list of + * {@link AggregationOperation operations}. + * + * @param pipeline must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the parsed aggregation pipeline. + * @since 3.4 + */ + protected Mono> parseAggregationPipeline(String[] pipeline, + MongoParameterAccessor accessor) { + + return getCodecRegistry().map(ParameterBindingDocumentCodec::new).flatMap(codec -> { + + List> stages = new ArrayList<>(pipeline.length); + for (String source : pipeline) { + stages.add(computePipelineStage(source, accessor, codec)); + } + return Flux.concat(stages).collectList(); + }); + } + + private Mono computePipelineStage(String source, MongoParameterAccessor accessor, + ParameterBindingDocumentCodec codec) { + + return expressionEvaluator(source, accessor, codec).map(evaluator -> new StringAggregationOperation(source, + AbstractReactiveMongoQuery.this.getQueryMethod().getDomainClass(), + bsonString -> AbstractReactiveMongoQuery.this.decode(evaluator, bsonString, accessor, codec))); + } + + private Mono> expressionEvaluator(String source, + MongoParameterAccessor accessor, ParameterBindingDocumentCodec codec) { + + ExpressionDependencies dependencies = codec.captureExpressionDependencies(source, accessor::getBindableValue, + valueExpressionDelegate.getValueExpressionParser()); + return getValueExpressionEvaluatorLater(dependencies, accessor).zipWith(Mono.just(codec)); + } + + private Document decode(Tuple2 expressionEvaluator, + String source, MongoParameterAccessor accessor, ParameterBindingDocumentCodec codec) { + + ParameterBindingContext bindingContext = new ParameterBindingContext(accessor::getBindableValue, + expressionEvaluator.getT1()); + return codec.decode(source, bindingContext); + } + + /** + * Obtain the {@link ParameterBindingDocumentCodec} used for parsing JSON expressions. + * + * @return never {@literal null}. + * @since 3.4 + */ + protected Mono getParameterBindingCodec() { + return getCodecRegistry().map(ParameterBindingDocumentCodec::new); + } + + /** + * Obtain a {@link Mono publisher} emitting the {@link SpELExpressionEvaluator} suitable to evaluate expressions + * backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return a {@link Mono} emitting the {@link SpELExpressionEvaluator} when ready. + * @since 3.4 + * @deprecated since 4.4.0, use + * {@link #getValueExpressionEvaluatorLater(ExpressionDependencies, MongoParameterAccessor)} instead + */ + @Deprecated(since = "4.4.0") + protected Mono getSpelEvaluatorFor(ExpressionDependencies dependencies, + MongoParameterAccessor accessor) { + return valueEvaluationContextProvider.getEvaluationContextLater(accessor.getValues(), dependencies) + .map(evaluationContext -> (SpELExpressionEvaluator) new DefaultSpELExpressionEvaluator( + new SpelExpressionParser(), evaluationContext.getEvaluationContext())) + .defaultIfEmpty(DefaultSpELExpressionEvaluator.unsupported()); + } + + /** + * Obtain a {@link ValueExpressionEvaluator} suitable to evaluate expressions. + * + * @param accessor must not be {@literal null}. + * @since 4.3 + */ + ValueExpressionEvaluator getValueExpressionEvaluator(MongoParameterAccessor accessor) { + + return new ValueExpressionEvaluator() { + + @Override + public T evaluate(String expressionString) { + ValueExpression expression = valueExpressionDelegate.parse(expressionString); + ValueEvaluationContext evaluationContext = valueEvaluationContextProvider + .getEvaluationContext(accessor.getValues(), expression.getExpressionDependencies()); + return (T) expression.evaluate(evaluationContext); + } + }; + } + + /** + * Obtain a {@link Mono publisher} emitting the {@link ValueExpressionEvaluator} suitable to evaluate expressions + * backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return a {@link Mono} emitting the {@link ValueExpressionEvaluator} when ready. + * @since 4.3 + */ + protected Mono getValueExpressionEvaluatorLater(ExpressionDependencies dependencies, + MongoParameterAccessor accessor) { + + return valueEvaluationContextProvider.getEvaluationContextLater(accessor.getValues(), dependencies) + .map(evaluationContext -> new ValueExpressionDelegateValueExpressionEvaluator(valueExpressionDelegate, + valueExpression -> evaluationContext)); + } + + /** + * @return a {@link Mono} emitting the {@link CodecRegistry} when ready. + * @since 2.4 + */ + protected Mono getCodecRegistry() { + + return Mono.from(operations.execute(db -> Mono.just(db.getCodecRegistry()))) + .defaultIfEmpty(MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Creates a {@link Query} instance using the given {@link ParameterAccessor} + * + * @param accessor must not be {@literal null}. + * @return + */ + protected abstract Mono createQuery(ConvertingParameterAccessor accessor); + + /** + * Returns whether the query should get a count projection applied. + * + * @return + */ + protected abstract boolean isCountQuery(); + + /** + * Returns whether the query should get an exists projection applied. + * + * @return + * @since 2.0.9 + */ + protected abstract boolean isExistsQuery(); + + /** + * Return weather the query should delete matching documents. + * + * @return + * @since 1.5 + */ + protected abstract boolean isDeleteQuery(); + + /** + * Return whether the query has an explicit limit set. + * + * @return + * @since 2.0.4 + */ + protected abstract boolean isLimiting(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java new file mode 100644 index 0000000000..6eb6a5da89 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java @@ -0,0 +1,376 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.time.Duration; +import java.util.Map; +import java.util.function.Function; +import java.util.function.IntUnaryOperator; +import java.util.function.LongUnaryOperator; + +import org.bson.Document; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ReadPreference; + +/** + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Aggregation} + * support offered by repositories. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + * @since 2.2 + */ +abstract class AggregationUtils { + + private AggregationUtils() {} + + /** + * Apply a collation extracted from the given {@literal collationExpression} to the given + * {@link org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder}. Potentially replace parameter + * placeholders with values from the {@link ConvertingParameterAccessor accessor}. + * + * @param builder must not be {@literal null}. + * @param collationExpression must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the {@link Query} having proper {@link Collation}. + * @see AggregationOptions#getCollation() + */ + static AggregationOptions.Builder applyCollation(AggregationOptions.Builder builder, + @Nullable String collationExpression, ConvertingParameterAccessor accessor, ValueExpressionEvaluator evaluator) { + + Collation collation = CollationUtils.computeCollation(collationExpression, accessor, evaluator); + return collation == null ? builder : builder.collation(collation); + } + + /** + * Apply {@link Meta#getComment()} and {@link Meta#getCursorBatchSize()}. + * + * @param builder must not be {@literal null}. + * @param queryMethod must not be {@literal null}. + */ + static AggregationOptions.Builder applyMeta(AggregationOptions.Builder builder, MongoQueryMethod queryMethod) { + + Meta meta = queryMethod.getQueryMetaAttributes(); + + if (meta.hasComment()) { + builder.comment(meta.getComment()); + } + + if (meta.getCursorBatchSize() != null) { + builder.cursorBatchSize(meta.getCursorBatchSize()); + } + + if (meta.hasMaxTime()) { + builder.maxTime(Duration.ofMillis(meta.getRequiredMaxTimeMsec())); + } + + if (meta.getAllowDiskUse() != null) { + builder.allowDiskUse(meta.getAllowDiskUse()); + } + + return builder; + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param builder must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + static AggregationOptions.Builder applyHint(AggregationOptions.Builder builder, MongoQueryMethod queryMethod) { + + if (!queryMethod.hasAnnotatedHint()) { + return builder; + } + + return builder.hint(queryMethod.getAnnotatedHint()); + } + + /** + * If present apply the preference from the {@link org.springframework.data.mongodb.repository.ReadPreference} + * annotation. + * + * @param builder must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + static AggregationOptions.Builder applyReadPreference(AggregationOptions.Builder builder, + MongoQueryMethod queryMethod) { + + if (!queryMethod.hasAnnotatedReadPreference()) { + return builder; + } + + return builder.readPreference(ReadPreference.valueOf(queryMethod.getAnnotatedReadPreference())); + } + + static AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor, + AggregationPipeline pipeline, ValueExpressionEvaluator evaluator) { + + AggregationOptions.Builder builder = Aggregation.newAggregationOptions(); + + AggregationUtils.applyCollation(builder, method.getAnnotatedCollation(), accessor, evaluator); + AggregationUtils.applyMeta(builder, method); + AggregationUtils.applyHint(builder, method); + AggregationUtils.applyReadPreference(builder, method); + + TypeInformation returnType = method.getReturnType(); + if (returnType.getComponentType() != null) { + returnType = returnType.getRequiredComponentType(); + } + if (ReflectionUtils.isVoid(returnType.getType()) && pipeline.isOutOrMerge()) { + builder.skipOutput(); + } + + return builder.build(); + } + + /** + * Prepares the AggregationPipeline including type discovery and calling {@link AggregationCallback} to run the + * aggregation. + */ + @Nullable + static T doAggregate(AggregationPipeline pipeline, MongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, + Function evaluatorFunction, AggregationCallback callback) { + + Class sourceType = method.getDomainClass(); + ReturnedType returnedType = processor.getReturnedType(); + // 🙈Interface Projections do not happen on the Aggregation level but through our repository infrastructure. + // Non-projections and raw results (AggregationResults<…>) are handled here. Interface projections read a Document + // and DTO projections read the returned type. + // We also support simple return types (String) that are read from a Document + TypeInformation returnType = method.getReturnType(); + Class returnElementType = (returnType.getComponentType() != null ? returnType.getRequiredComponentType() + : returnType).getType(); + Class entityType; + + boolean isRawAggregationResult = ClassUtils.isAssignable(AggregationResults.class, method.getReturnedObjectType()); + + if (returnElementType.equals(Document.class)) { + entityType = sourceType; + } else { + entityType = returnElementType; + } + + AggregationUtils.appendSortIfPresent(pipeline, accessor, entityType); + + if (method.isSliceQuery()) { + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor, LongUnaryOperator.identity(), + limit -> limit + 1); + } else { + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor); + } + + AggregationOptions options = AggregationUtils.computeOptions(method, accessor, pipeline, + evaluatorFunction.apply(accessor)); + TypedAggregation aggregation = new TypedAggregation<>(sourceType, pipeline.getOperations(), options); + + boolean isSimpleReturnType = MongoSimpleTypes.HOLDER.isSimpleType(returnElementType); + Class typeToRead; + + if (isSimpleReturnType) { + typeToRead = Document.class; + } else if (isRawAggregationResult) { + typeToRead = returnElementType; + } else { + + if (returnedType.isProjecting()) { + typeToRead = returnedType.getReturnedType().isInterface() ? Document.class : returnedType.getReturnedType(); + } else { + typeToRead = entityType; + } + } + + return callback.doAggregate(aggregation, sourceType, typeToRead, returnElementType, isSimpleReturnType, + isRawAggregationResult); + } + + static AggregationPipeline computePipeline(AbstractMongoQuery mongoQuery, MongoQueryMethod method, + ConvertingParameterAccessor accessor) { + return new AggregationPipeline(mongoQuery.parseAggregationPipeline(method.getAnnotatedAggregation(), accessor)); + } + + /** + * Append {@code $sort} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is present. + * + * @param aggregationPipeline + * @param accessor + * @param targetType + */ + static void appendSortIfPresent(AggregationPipeline aggregationPipeline, ConvertingParameterAccessor accessor, + @Nullable Class targetType) { + + if (accessor.getSort().isUnsorted()) { + return; + } + + aggregationPipeline.add(ctx -> { + + Document sort = new Document(); + for (Order order : accessor.getSort()) { + sort.append(order.getProperty(), order.isAscending() ? 1 : -1); + } + + return ctx.getMappedObject(new Document("$sort", sort), targetType); + }); + } + + /** + * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is + * present. + * + * @param aggregationPipeline + * @param accessor + */ + static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline, + ConvertingParameterAccessor accessor) { + appendLimitAndOffsetIfPresent(aggregationPipeline, accessor, LongUnaryOperator.identity(), + IntUnaryOperator.identity()); + } + + /** + * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is + * present. + * + * @param aggregationPipeline + * @param accessor + * @param offsetOperator + * @param limitOperator + * @since 3.3 + */ + static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline, + ConvertingParameterAccessor accessor, LongUnaryOperator offsetOperator, IntUnaryOperator limitOperator) { + + Pageable pageable = accessor.getPageable(); + if (pageable.isUnpaged()) { + return; + } + + if (pageable.getOffset() > 0) { + aggregationPipeline.add(Aggregation.skip(offsetOperator.applyAsLong(pageable.getOffset()))); + } + + aggregationPipeline.add(Aggregation.limit(limitOperator.applyAsInt(pageable.getPageSize()))); + } + + /** + * Extract a single entry from the given {@link Document}.
                    + *
                      + *
                    1. empty source: {@literal null}
                    2. + *
                    3. single entry convert that one
                    4. + *
                    5. single entry when ignoring {@literal _id} field convert that one
                    6. + *
                    7. multiple entries first value assignable to the target type
                    8. + *
                    9. no match IllegalArgumentException
                    10. + *
                    + * + * @param + * @param source + * @param targetType + * @param converter + * @return can be {@literal null} if source {@link Document#isEmpty() is empty}. + * @throws IllegalArgumentException when none of the above rules is met. + */ + @Nullable + static T extractSimpleTypeResult(@Nullable Document source, Class targetType, MongoConverter converter) { + + if (ObjectUtils.isEmpty(source)) { + return null; + } + + if (source.size() == 1) { + return getPotentiallyConvertedSimpleTypeValue(converter, source.values().iterator().next(), targetType); + } + + Document intermediate = new Document(source); + intermediate.remove(FieldName.ID.name()); + + if (intermediate.size() == 1) { + return getPotentiallyConvertedSimpleTypeValue(converter, intermediate.values().iterator().next(), targetType); + } + + for (Map.Entry entry : intermediate.entrySet()) { + if (entry != null && ClassUtils.isAssignable(targetType, entry.getValue().getClass())) { + return targetType.cast(entry.getValue()); + } + } + + throw new IllegalArgumentException( + String.format("o_O no entry of type %s found in %s.", targetType.getSimpleName(), source.toJson())); + } + + @Nullable + @SuppressWarnings("unchecked") + private static T getPotentiallyConvertedSimpleTypeValue(MongoConverter converter, @Nullable Object value, + Class targetType) { + + if (value == null) { + return null; + } + + if (ClassUtils.isAssignableValue(targetType, value)) { + return (T) value; + } + + return converter.getConversionService().convert(value, targetType); + } + + /** + * Interface to invoke an aggregation along with source, intermediate, and target types. + * + * @param + */ + interface AggregationCallback { + + /** + * @param aggregation + * @param domainType + * @param typeToRead + * @param elementType + * @param simpleType whether the aggregation returns {@link Document} or a + * {@link org.springframework.data.mapping.model.SimpleTypeHolder simple type}. + * @param rawResult whether the aggregation returns {@link AggregationResults}. + * @return + */ + @Nullable + T doAggregate(TypedAggregation aggregation, Class domainType, Class typeToRead, Class elementType, + boolean simpleType, boolean rawResult); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java new file mode 100644 index 0000000000..74249b40d5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java @@ -0,0 +1,49 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +/** + * Utility class containing methods to interact with boolean values. + * + * @author Mark Paluch + * @since 2.0.9 + */ +final class BooleanUtil { + + private BooleanUtil() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + /** + * Count the number of {@literal true} values. + * + * @param values + * @return the number of values that are {@literal true}. + */ + static int countBooleanTrueValues(boolean... values) { + + int count = 0; + + for (boolean value : values) { + + if (value) { + count++; + } + } + + return count; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java new file mode 100644 index 0000000000..2aac6b77a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java @@ -0,0 +1,107 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.Locale; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; + +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.lang.Nullable; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Collation} + * support offered by repositories. + * + * @author Christoph Strobl + * @since 2.2 + */ +abstract class CollationUtils { + + private static final ParameterBindingDocumentCodec CODEC = new ParameterBindingDocumentCodec(); + private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); + + private CollationUtils() { + } + + /** + * Compute the {@link Collation} by inspecting the {@link ConvertingParameterAccessor#getCollation() parameter + * accessor} or parsing a potentially given {@literal collationExpression}. + * + * @param collationExpression + * @param accessor + * @param expressionEvaluator + * @return can be {@literal null} if neither {@link ConvertingParameterAccessor#getCollation()} nor + * {@literal collationExpression} are present. + */ + @Nullable + static Collation computeCollation(@Nullable String collationExpression, ConvertingParameterAccessor accessor, + ValueExpressionEvaluator expressionEvaluator) { + + if (accessor.getCollation() != null) { + return accessor.getCollation(); + } + + if (!StringUtils.hasText(collationExpression)) { + return null; + } + + if (collationExpression.stripLeading().startsWith("{")) { + + ParameterBindingContext bindingContext = ParameterBindingContext.forExpressions(accessor::getBindableValue, + expressionEvaluator); + + return Collation.from(CODEC.decode(collationExpression, bindingContext)); + } + + Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(collationExpression); + if (!matcher.find()) { + return Collation.parse(collationExpression); + } + + String placeholder = matcher.group(); + Object placeholderValue = accessor.getBindableValue(computeParameterIndex(placeholder)); + + if (collationExpression.startsWith("?")) { + + if (placeholderValue instanceof String) { + return Collation.parse(placeholderValue.toString()); + } + if (placeholderValue instanceof Locale locale) { + return Collation.of(locale); + } + if (placeholderValue instanceof Document document) { + return Collation.from(document); + } + throw new IllegalArgumentException(String.format("Collation must be a String, Locale or Document but was %s", + ObjectUtils.nullSafeClassName(placeholderValue))); + } + + return Collation.parse(collationExpression.replace(placeholder, placeholderValue.toString())); + } + + private static int computeParameterIndex(String parameter) { + return NumberUtils.parseNumber(parameter.replace("?", ""), Integer.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java index 15d0cd25e6..dbf87f2f2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,16 +21,21 @@ import java.util.Iterator; import java.util.List; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; @@ -38,10 +43,11 @@ /** * Custom {@link ParameterAccessor} that uses a {@link MongoWriter} to serialize parameters into Mongo format. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ public class ConvertingParameterAccessor implements MongoParameterAccessor { @@ -50,110 +56,92 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor { /** * Creates a new {@link ConvertingParameterAccessor} with the given {@link MongoWriter} and delegate. - * + * * @param writer must not be {@literal null}. * @param delegate must not be {@literal null}. */ public ConvertingParameterAccessor(MongoWriter writer, MongoParameterAccessor delegate) { - Assert.notNull(writer); - Assert.notNull(delegate); + Assert.notNull(writer, "MongoWriter must not be null"); + Assert.notNull(delegate, "MongoParameterAccessor must not be null"); this.writer = writer; this.delegate = delegate; } - /* - * (non-Javadoc) - * - * @see java.lang.Iterable#iterator() - */ public PotentiallyConvertingIterator iterator() { return new ConvertingIterator(delegate.iterator()); } - /* - * (non-Javadoc) - * - * @see org.springframework.data.repository.query.ParameterAccessor#getPageable() - */ + @Override + public ScrollPosition getScrollPosition() { + return delegate.getScrollPosition(); + } + public Pageable getPageable() { return delegate.getPageable(); } - /* - * (non-Javadoc) - * - * @see org.springframework.data.repository.query.ParameterAccessor#getSort() - */ public Sort getSort() { return delegate.getSort(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection() - */ @Override - public Class getDynamicProjection() { - return delegate.getDynamicProjection(); + public Class findDynamicProjection() { + return delegate.findDynamicProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int) - */ public Object getBindableValue(int index) { return getConvertedValue(delegate.getBindableValue(index), null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getDistanceRange() - */ @Override public Range getDistanceRange() { return delegate.getDistanceRange(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { return delegate.getGeoNearLocation(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ public TextCriteria getFullText() { return delegate.getFullText(); } + @Override + public Collation getCollation() { + return delegate.getCollation(); + } + + @Override + public UpdateDefinition getUpdate() { + return delegate.getUpdate(); + } + + @Override + public Limit getLimit() { + return delegate.getLimit(); + } + /** * Converts the given value with the underlying {@link MongoWriter}. - * + * * @param value can be {@literal null}. * @param typeInformation can be {@literal null}. - * @return + * @return can be {@literal null}. */ - private Object getConvertedValue(Object value, TypeInformation typeInformation) { + @Nullable + private Object getConvertedValue(Object value, @Nullable TypeInformation typeInformation) { return writer.convertToMongoType(value, typeInformation == null ? null : typeInformation.getActualType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#hasBindableNullValue() - */ public boolean hasBindableNullValue() { return delegate.hasBindableNullValue(); } /** * Custom {@link Iterator} to convert items before returning them. - * + * * @author Oliver Gierke */ private class ConvertingIterator implements PotentiallyConvertingIterator { @@ -162,33 +150,21 @@ private class ConvertingIterator implements PotentiallyConvertingIterator { /** * Creates a new {@link ConvertingIterator} for the given delegate. - * + * * @param delegate */ public ConvertingIterator(Iterator delegate) { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see java.util.Iterator#hasNext() - */ public boolean hasNext() { return delegate.hasNext(); } - /* - * (non-Javadoc) - * @see java.util.Iterator#next() - */ public Object next() { return delegate.next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ConvertingParameterAccessor.PotentiallConvertingIterator#nextConverted() - */ public Object nextConverted(MongoPersistentProperty property) { Object next = next(); @@ -200,8 +176,10 @@ public Object nextConverted(MongoPersistentProperty property) { if (property.isAssociation()) { if (next.getClass().isArray() || next instanceof Iterable) { - List dbRefs = new ArrayList(); - for (Object element : asCollection(next)) { + Collection values = asCollection(next); + + List dbRefs = new ArrayList<>(values.size()); + for (Object element : values) { dbRefs.add(writer.toDBRef(element, property)); } @@ -214,10 +192,6 @@ public Object nextConverted(MongoPersistentProperty property) { return getConvertedValue(next, property.getTypeInformation()); } - /* - * (non-Javadoc) - * @see java.util.Iterator#remove() - */ public void remove() { delegate.remove(); } @@ -227,19 +201,22 @@ public void remove() { * Returns the given object as {@link Collection}. Will do a copy of it if it implements {@link Iterable} or is an * array. Will return an empty {@link Collection} in case {@literal null} is given. Will wrap all other types into a * single-element collection. - * - * @param source - * @return + * + * @param source can be {@literal null}, returns an empty {@link List} in that case. + * @return never {@literal null}. */ - private static Collection asCollection(Object source) { + private static Collection asCollection(@Nullable Object source) { - if (source instanceof Iterable) { + if (source instanceof Iterable iterable) { - List result = new ArrayList(); - for (Object element : (Iterable) source) { - result.add(element); + if(source instanceof Collection collection) { + return new ArrayList<>(collection); } + List result = new ArrayList<>(); + for (Object element : iterable) { + result.add(element); + } return result; } @@ -250,10 +227,6 @@ private static Collection asCollection(Object source) { return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() - */ @Override public Object[] getValues() { return delegate.getValues(); @@ -261,17 +234,16 @@ public Object[] getValues() { /** * Custom {@link Iterator} that adds a method to access elements in a converted manner. - * + * * @author Oliver Gierke */ public interface PotentiallyConvertingIterator extends Iterator { /** * Returns the next element which has already been converted. - * + * * @return */ Object nextConverted(MongoPersistentProperty property); } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java new file mode 100644 index 0000000000..16a1e55226 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java @@ -0,0 +1,69 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ExpressionParser; + +/** + * Simple {@link SpELExpressionEvaluator} implementation using {@link ExpressionParser} and {@link EvaluationContext}. + * + * @author Mark Paluch + * @since 3.1 + */ +class DefaultSpELExpressionEvaluator implements SpELExpressionEvaluator { + + private final ExpressionParser parser; + private final EvaluationContext context; + + DefaultSpELExpressionEvaluator(ExpressionParser parser, EvaluationContext context) { + this.parser = parser; + this.context = context; + } + + /** + * Return a {@link SpELExpressionEvaluator} that does not support expression evaluation. + * + * @return a {@link SpELExpressionEvaluator} that does not support expression evaluation. + * @since 3.1 + */ + public static SpELExpressionEvaluator unsupported() { + return NoOpExpressionEvaluator.INSTANCE; + } + + @Override + @SuppressWarnings("unchecked") + public T evaluate(String expression) { + return (T) parser.parseExpression(expression).getValue(context, Object.class); + } + + /** + * {@link SpELExpressionEvaluator} that does not support SpEL evaluation. + * + * @author Mark Paluch + * @since 3.1 + */ + enum NoOpExpressionEvaluator implements SpELExpressionEvaluator { + + INSTANCE; + + @Override + public T evaluate(String expression) { + throw new UnsupportedOperationException("Expression evaluation not supported"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java deleted file mode 100644 index 0aa06e5e58..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2015-2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.query; - -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiator; -import org.springframework.data.convert.EntityInstantiators; -import org.springframework.data.mapping.PersistentEntity; -import org.springframework.data.mapping.PersistentProperty; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor; -import org.springframework.data.mapping.PreferredConstructor.Parameter; -import org.springframework.data.mapping.SimplePropertyHandler; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ParameterValueProvider; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.util.Assert; - -/** - * {@link Converter} to instantiate DTOs from fully equipped domain objects. - * - * @author Oliver Gierke - */ -class DtoInstantiatingConverter implements Converter { - - private final Class targetType; - private final MappingContext, ? extends PersistentProperty> context; - private final EntityInstantiator instantiator; - - /** - * Creates a new {@link Converter} to instantiate DTOs. - * - * @param dtoType must not be {@literal null}. - * @param context must not be {@literal null}. - * @param instantiators must not be {@literal null}. - */ - public DtoInstantiatingConverter(Class dtoType, - MappingContext, MongoPersistentProperty> context, - EntityInstantiators instantiator) { - - Assert.notNull(dtoType, "DTO type must not be null!"); - Assert.notNull(context, "MappingContext must not be null!"); - Assert.notNull(instantiator, "EntityInstantiators must not be null!"); - - this.targetType = dtoType; - this.context = context; - this.instantiator = instantiator.getInstantiatorFor(context.getPersistentEntity(dtoType)); - } - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ - @Override - public Object convert(Object source) { - - if (targetType.isInterface()) { - return source; - } - - final PersistentEntity sourceEntity = context.getPersistentEntity(source.getClass()); - final PersistentPropertyAccessor sourceAccessor = sourceEntity.getPropertyAccessor(source); - final PersistentEntity targetEntity = context.getPersistentEntity(targetType); - final PreferredConstructor> constructor = targetEntity - .getPersistenceConstructor(); - - @SuppressWarnings({ "rawtypes", "unchecked" }) - Object dto = instantiator.createInstance(targetEntity, new ParameterValueProvider() { - - @Override - public Object getParameterValue(Parameter parameter) { - return sourceAccessor.getProperty(sourceEntity.getPersistentProperty(parameter.getName())); - } - }); - - final PersistentPropertyAccessor dtoAccessor = targetEntity.getPropertyAccessor(dto); - - targetEntity.doWithProperties(new SimplePropertyHandler() { - - @Override - public void doWithPersistentProperty(PersistentProperty property) { - - if (constructor.isConstructorParameter(property)) { - return; - } - - dtoAccessor.setProperty(property, - sourceAccessor.getProperty(sourceEntity.getPersistentProperty(property.getName()))); - } - }); - - return dto; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java deleted file mode 100644 index 10c34bb391..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.query; - -import java.util.Collections; -import java.util.List; - -import javax.xml.bind.DatatypeConverter; - -import org.bson.BSON; -import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding; -import org.springframework.data.repository.query.EvaluationContextProvider; -import org.springframework.expression.EvaluationContext; -import org.springframework.expression.Expression; -import org.springframework.expression.spel.standard.SpelExpressionParser; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -import com.mongodb.util.JSON; - -/** - * {@link ExpressionEvaluatingParameterBinder} allows to evaluate, convert and bind parameters to placholders within a - * {@link String}. - * - * @author Christoph Strobl - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.9 - */ -class ExpressionEvaluatingParameterBinder { - - private final SpelExpressionParser expressionParser; - private final EvaluationContextProvider evaluationContextProvider; - - /** - * Creates new {@link ExpressionEvaluatingParameterBinder} - * - * @param expressionParser must not be {@literal null}. - * @param evaluationContextProvider must not be {@literal null}. - */ - public ExpressionEvaluatingParameterBinder(SpelExpressionParser expressionParser, - EvaluationContextProvider evaluationContextProvider) { - - Assert.notNull(expressionParser, "ExpressionParser must not be null!"); - Assert.notNull(evaluationContextProvider, "EvaluationContextProvider must not be null!"); - - this.expressionParser = expressionParser; - this.evaluationContextProvider = evaluationContextProvider; - } - - /** - * Bind values provided by {@link MongoParameterAccessor} to placeholders in {@literal raw} while considering - * potential conversions and parameter types. - * - * @param raw can be {@literal null} or empty. - * @param accessor must not be {@literal null}. - * @param bindingContext must not be {@literal null}. - * @return {@literal null} if given {@code raw} value is empty. - */ - public String bind(String raw, MongoParameterAccessor accessor, BindingContext bindingContext) { - - if (!StringUtils.hasText(raw)) { - return null; - } - - return replacePlaceholders(raw, accessor, bindingContext); - } - - /** - * Replaced the parameter placeholders with the actual parameter values from the given {@link ParameterBinding}s. - * - * @param input must not be {@literal null} or empty. - * @param accessor must not be {@literal null}. - * @param bindings must not be {@literal null}. - * @return - */ - private String replacePlaceholders(String input, MongoParameterAccessor accessor, BindingContext bindingContext) { - - if (!bindingContext.hasBindings()) { - return input; - } - - boolean isCompletlyParameterizedQuery = input.matches("^\\?\\d+$"); - StringBuilder result = new StringBuilder(input); - - for (ParameterBinding binding : bindingContext.getBindings()) { - - String parameter = binding.getParameter(); - int idx = result.indexOf(parameter); - - if (idx == -1) { - continue; - } - - String valueForBinding = getParameterValueForBinding(accessor, bindingContext.getParameters(), binding); - - int start = idx; - int end = idx + parameter.length(); - - // If the value to bind is an object literal we need to remove the quoting around the expression insertion point. - if (valueForBinding.startsWith("{") && !isCompletlyParameterizedQuery) { - - // Is the insertion point actually surrounded by quotes? - char beforeStart = result.charAt(start - 1); - char afterEnd = result.charAt(end); - - if ((beforeStart == '\'' || beforeStart == '"') && (afterEnd == '\'' || afterEnd == '"')) { - - // Skip preceding and following quote - start -= 1; - end += 1; - } - } - - result.replace(start, end, valueForBinding); - } - - return result.toString(); - } - - /** - * Returns the serialized value to be used for the given {@link ParameterBinding}. - * - * @param accessor must not be {@literal null}. - * @param parameters - * @param binding must not be {@literal null}. - * @return - */ - private String getParameterValueForBinding(MongoParameterAccessor accessor, MongoParameters parameters, - ParameterBinding binding) { - - Object value = binding.isExpression() - ? evaluateExpression(binding.getExpression(), parameters, accessor.getValues()) - : accessor.getBindableValue(binding.getParameterIndex()); - - if (value instanceof String && binding.isQuoted()) { - return (String) value; - } - - if (value instanceof byte[]) { - - String base64representation = DatatypeConverter.printBase64Binary((byte[]) value); - - if (!binding.isQuoted()) { - return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}"; - } - - return base64representation; - } - - return JSON.serialize(value); - } - - /** - * Evaluates the given {@code expressionString}. - * - * @param expressionString must not be {@literal null} or empty. - * @param parameters must not be {@literal null}. - * @param parameterValues must not be {@literal null}. - * @return - */ - private Object evaluateExpression(String expressionString, MongoParameters parameters, Object[] parameterValues) { - - EvaluationContext evaluationContext = evaluationContextProvider.getEvaluationContext(parameters, parameterValues); - Expression expression = expressionParser.parseExpression(expressionString); - - return expression.getValue(evaluationContext, Object.class); - } - - /** - * @author Christoph Strobl - * @since 1.9 - */ - static class BindingContext { - - final MongoParameters parameters; - final List bindings; - - /** - * Creates new {@link BindingContext}. - * - * @param parameters - * @param bindings - */ - public BindingContext(MongoParameters parameters, List bindings) { - - this.parameters = parameters; - this.bindings = bindings; - } - - /** - * @return {@literal true} when list of bindings is not empty. - */ - boolean hasBindings() { - return !CollectionUtils.isEmpty(bindings); - } - - /** - * Get unmodifiable list of {@link ParameterBinding}s. - * - * @return never {@literal null}. - */ - public List getBindings() { - return Collections.unmodifiableList(bindings); - } - - /** - * Get the associated {@link MongoParameters}. - * - * @return - */ - public MongoParameters getParameters() { - return parameters; - } - - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java index a68896c03d..8678e5a74c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,70 @@ */ package org.springframework.data.mongodb.repository.query; -import java.io.Serializable; - +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.repository.core.EntityInformation; +import org.springframework.lang.Nullable; /** * Mongo specific {@link EntityInformation}. - * + * * @author Oliver Gierke + * @author Mark Paluch */ -public interface MongoEntityInformation extends EntityInformation { +public interface MongoEntityInformation extends EntityInformation { /** * Returns the name of the collection the entity shall be persisted to. - * + * * @return */ String getCollectionName(); /** * Returns the attribute that the id will be persisted to. - * + * * @return */ String getIdAttribute(); -} \ No newline at end of file + + /** + * Returns whether the entity uses optimistic locking. + * + * @return true if the entity defines a {@link org.springframework.data.annotation.Version} property. + * @since 2.2 + */ + default boolean isVersioned() { + return false; + } + + /** + * Returns the version value for the entity or {@literal null} if the entity is not {@link #isVersioned() versioned}. + * + * @param entity must not be {@literal null} + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + default Object getVersion(T entity) { + return null; + } + + /** + * Returns whether the entity defines a specific collation. + * + * @return {@literal true} if the entity defines a collation. + * @since 2.2 + */ + default boolean hasCollation() { + return getCollation() != null; + } + + /** + * Return the collation for the entity or {@literal null} if {@link #hasCollation() not defined}. + * + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + Collation getCollation(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java index 8d951b9e92..9aa8af8cfe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,28 @@ */ package org.springframework.data.mongodb.repository.query; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.repository.core.EntityMetadata; /** * Extension of {@link EntityMetadata} to additionally expose the collection name an entity shall be persisted to. - * + * * @author Oliver Gierke */ public interface MongoEntityMetadata extends EntityMetadata { /** * Returns the name of the collection the entity shall be persisted to. - * + * * @return */ String getCollectionName(); + + /** + * Returns the {@link MongoPersistentEntity} that supposed to determine the collection to be queried. + * + * @return + * @since 2.0.4 + */ + MongoPersistentEntity getCollectionEntity(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java index 6c67778e30..5db853e810 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,21 +18,26 @@ import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.lang.Nullable; /** * Mongo-specific {@link ParameterAccessor} exposing a maximum distance parameter. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ public interface MongoParameterAccessor extends ParameterAccessor { /** * Returns a {@link Distance} to be applied to Mongo geo queries. - * + * * @return the maximum distance to apply to the geo query or {@literal null} if there's no {@link Distance} parameter * at all or the given value for it was {@literal null}. */ @@ -40,24 +45,44 @@ public interface MongoParameterAccessor extends ParameterAccessor { /** * Returns the {@link Point} to use for a geo-near query. - * + * * @return */ + @Nullable Point getGeoNearLocation(); /** * Returns the {@link TextCriteria} to be used for full text query. - * + * * @return null if not set. * @since 1.6 */ + @Nullable TextCriteria getFullText(); + /** + * Returns the {@link Collation} to be used for the query. + * + * @return {@literal null} if not set. + * @since 2.2 + */ + @Nullable + Collation getCollation(); + /** * Returns the raw parameter values of the underlying query method. - * + * * @return * @since 1.8 */ Object[] getValues(); + + /** + * Returns the {@link Update} to be used for an update execution. + * + * @return {@literal null} if not present. + * @since 3.4 + */ + @Nullable + UpdateDefinition getUpdate(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java index 1e36047b94..1f66d5b77d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.repository.query; +import java.io.Serializable; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; @@ -22,57 +23,92 @@ import org.springframework.core.MethodParameter; import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoPage; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Near; import org.springframework.data.mongodb.repository.query.MongoParameters.MongoParameter; import org.springframework.data.repository.query.Parameter; import org.springframework.data.repository.query.Parameters; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.repository.query.ParametersSource; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; /** * Custom extension of {@link Parameters} discovering additional - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch + * @author Thomas Darimont */ public class MongoParameters extends Parameters { + private static final List> GEO_NEAR_RESULTS = Arrays.asList(GeoResult.class, + GeoResults.class, GeoPage.class); + private final int rangeIndex; private final int maxDistanceIndex; - private final Integer fullTextIndex; + private final @Nullable Integer fullTextIndex; + private final @Nullable Integer nearIndex; + private final @Nullable Integer collationIndex; + private final int updateIndex; + private final TypeInformation domainType; - private Integer nearIndex; + /** + * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. + * + * @param parametersSource must not be {@literal null}. + * @since 4.5 + */ + public MongoParameters(ParametersSource parametersSource) { + this(parametersSource, isGeoNearQuery(parametersSource.getMethod())); + } /** * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. - * - * @param method must not be {@literal null}. - * @param queryMethod must not be {@literal null}. + * + * @param parametersSource must not be {@literal null}. + * @param isGeoNearMethod indicate if this is a geo-spatial query method */ - public MongoParameters(Method method, boolean isGeoNearMethod) { + public MongoParameters(ParametersSource parametersSource, boolean isGeoNearMethod) { + this(parametersSource, new NearIndex(parametersSource, isGeoNearMethod)); + } - super(method); + /** + * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. + * + * @param parametersSource must not be {@literal null}. + * @param nearIndex the near parameter index. + */ + private MongoParameters(ParametersSource parametersSource, NearIndex nearIndex) { + + super(parametersSource, methodParameter -> new MongoParameter(methodParameter, + parametersSource.getDomainTypeInformation(), nearIndex.nearIndex)); + + Method method = parametersSource.getMethod(); List> parameterTypes = Arrays.asList(method.getParameterTypes()); + this.domainType = parametersSource.getDomainTypeInformation(); this.fullTextIndex = parameterTypes.indexOf(TextCriteria.class); - ClassTypeInformation declaringClassInfo = ClassTypeInformation.from(method.getDeclaringClass()); + TypeInformation declaringClassInfo = TypeInformation.of(parametersSource.getContainingClass()); List> parameterTypeInfo = declaringClassInfo.getParameterTypes(method); this.rangeIndex = getTypeIndex(parameterTypeInfo, Range.class, Distance.class); this.maxDistanceIndex = this.rangeIndex == -1 ? getTypeIndex(parameterTypeInfo, Distance.class, null) : -1; - - if (this.nearIndex == null && isGeoNearMethod) { - this.nearIndex = getNearIndex(parameterTypes); - } else if (this.nearIndex == null) { - this.nearIndex = -1; - } + this.collationIndex = getTypeIndex(parameterTypeInfo, Collation.class, null); + this.updateIndex = QueryUtils.indexOfAssignableParameter(UpdateDefinition.class, parameterTypes); + this.nearIndex = nearIndex.nearIndex; } - private MongoParameters(List parameters, int maxDistanceIndex, Integer nearIndex, - Integer fullTextIndex, int rangeIndex) { + private MongoParameters(List parameters, int maxDistanceIndex, @Nullable Integer nearIndex, + @Nullable Integer fullTextIndex, int rangeIndex, @Nullable Integer collationIndex, int updateIndex, + TypeInformation domainType) { super(parameters); @@ -80,9 +116,45 @@ private MongoParameters(List parameters, int maxDistanceIndex, I this.fullTextIndex = fullTextIndex; this.maxDistanceIndex = maxDistanceIndex; this.rangeIndex = rangeIndex; + this.collationIndex = collationIndex; + this.updateIndex = updateIndex; + this.domainType = domainType; + } + + static boolean isGeoNearQuery(Method method) { + + Class returnType = method.getReturnType(); + + for (Class type : GEO_NEAR_RESULTS) { + if (type.isAssignableFrom(returnType)) { + return true; + } + } + + if (Iterable.class.isAssignableFrom(returnType)) { + TypeInformation from = TypeInformation.fromReturnTypeOf(method); + return GeoResult.class.equals(from.getRequiredComponentType().getType()); + } + + return false; + } + + static class NearIndex { + + private final @Nullable Integer nearIndex; + + public NearIndex(ParametersSource parametersSource, boolean isGeoNearMethod) { + + int index = findNearIndexInParameters(parametersSource.getMethod()); + if (index == -1 && isGeoNearMethod) { + index = getNearIndex(Arrays.asList(parametersSource.getMethod().getParameterTypes())); + } + + this.nearIndex = index; + } } - private final int getNearIndex(List> parameterTypes) { + private static int getNearIndex(List> parameterTypes) { for (Class reference : Arrays.asList(Point.class, double[].class)) { @@ -95,51 +167,42 @@ private final int getNearIndex(List> parameterTypes) { if (nearIndex == parameterTypes.lastIndexOf(reference)) { return nearIndex; } else { - throw new IllegalStateException("Multiple Point parameters found but none annotated with @Near!"); + throw new IllegalStateException("Multiple Point parameters found but none annotated with @Near"); } } return -1; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameters#createParameter(org.springframework.core.MethodParameter) - */ - @Override - protected MongoParameter createParameter(MethodParameter parameter) { + static int findNearIndexInParameters(Method method) { - MongoParameter mongoParameter = new MongoParameter(parameter); + int index = -1; + for (java.lang.reflect.Parameter p : method.getParameters()) { - // Detect manually annotated @Near Point and reject multiple annotated ones - if (this.nearIndex == null && mongoParameter.isManuallyAnnotatedNearParameter()) { - this.nearIndex = mongoParameter.getIndex(); - } else if (mongoParameter.isManuallyAnnotatedNearParameter()) { - throw new IllegalStateException(String.format( - "Found multiple @Near annotations ond method %s! Only one allowed!", parameter.getMethod().toString())); - } + MethodParameter methodParameter = MethodParameter.forParameter(p); + + if ((Point.class.isAssignableFrom(methodParameter.getParameterType()) + || methodParameter.getParameterType().equals(double[].class)) + && methodParameter.hasParameterAnnotation(Near.class)) { + if (index == -1) { + index = methodParameter.getParameterIndex(); + } else { + throw new IllegalStateException( + String.format("Found multiple @Near annotations ond method %s; Only one allowed", method)); + } - return mongoParameter; + } + } + return index; } public int getDistanceRangeIndex() { return -1; } - /** - * Returns the index of a {@link Distance} parameter to be used for geo queries. - * - * @return - * @deprecated since 1.7. Please use {@link #getMaxDistanceIndex()} instead. - */ - @Deprecated - public int getDistanceIndex() { - return getMaxDistanceIndex(); - } - /** * Returns the index of the {@link Distance} parameter to be used for max distance in geo queries. - * + * * @return * @since 1.7 */ @@ -149,7 +212,7 @@ public int getMaxDistanceIndex() { /** * Returns the index of the parameter to be used to start a geo-near query from. - * + * * @return */ public int getNearIndex() { @@ -157,13 +220,13 @@ public int getNearIndex() { } /** - * Returns ths inde of the parameter to be used as a textquery param - * + * Returns the index of the parameter to be used as a text query param + * * @return * @since 1.6 */ public int getFullTextParameterIndex() { - return fullTextIndex != null ? fullTextIndex.intValue() : -1; + return fullTextIndex != null ? fullTextIndex : -1; } /** @@ -171,7 +234,7 @@ public int getFullTextParameterIndex() { * @since 1.6 */ public boolean hasFullTextParameter() { - return this.fullTextIndex != null && this.fullTextIndex.intValue() >= 0; + return this.fullTextIndex != null && this.fullTextIndex >= 0; } /** @@ -182,16 +245,33 @@ public int getRangeIndex() { return rangeIndex; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameters#createFrom(java.util.List) + /** + * Returns the index of the {@link Collation} parameter or -1 if not present. + * + * @return -1 if not set. + * @since 2.2 */ + public int getCollationParameterIndex() { + return collationIndex != null ? collationIndex : -1; + } + + /** + * Returns the index of the {@link UpdateDefinition} parameter or -1 if not present. + * + * @return -1 if not present. + * @since 3.4 + */ + public int getUpdateIndex() { + return updateIndex; + } + @Override protected MongoParameters createFrom(List parameters) { - return new MongoParameters(parameters, this.maxDistanceIndex, this.nearIndex, this.fullTextIndex, this.rangeIndex); + return new MongoParameters(parameters, this.maxDistanceIndex, this.nearIndex, this.fullTextIndex, this.rangeIndex, + this.collationIndex, this.updateIndex, this.domainType); } - private int getTypeIndex(List> parameterTypes, Class type, Class componentType) { + private int getTypeIndex(List> parameterTypes, Class type, @Nullable Class componentType) { for (int i = 0; i < parameterTypes.size(); i++) { @@ -201,7 +281,7 @@ private int getTypeIndex(List> parameterTypes, Class type, if (componentType == null) { return i; - } else if (componentType.equals(candidate.getComponentType().getType())) { + } else if (componentType.equals(candidate.getRequiredComponentType().getType())) { return i; } } @@ -212,39 +292,37 @@ private int getTypeIndex(List> parameterTypes, Class type, /** * Custom {@link Parameter} implementation adding parameters of type {@link Distance} to the special ones. - * + * * @author Oliver Gierke */ - class MongoParameter extends Parameter { + static class MongoParameter extends Parameter { private final MethodParameter parameter; + private final @Nullable Integer nearIndex; /** * Creates a new {@link MongoParameter}. - * + * * @param parameter must not be {@literal null}. + * @param domainType must not be {@literal null}. */ - MongoParameter(MethodParameter parameter) { - super(parameter); + MongoParameter(MethodParameter parameter, TypeInformation domainType, @Nullable Integer nearIndex) { + super(parameter, domainType); this.parameter = parameter; + this.nearIndex = nearIndex; if (!isPoint() && hasNearAnnotation()) { - throw new IllegalArgumentException("Near annotation is only allowed at Point parameter!"); + throw new IllegalArgumentException("Near annotation is only allowed at Point parameter"); } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameter#isSpecialParameter() - */ @Override public boolean isSpecialParameter() { return super.isSpecialParameter() || Distance.class.isAssignableFrom(getType()) || isNearParameter() - || TextCriteria.class.isAssignableFrom(getType()); + || TextCriteria.class.isAssignableFrom(getType()) || Collation.class.isAssignableFrom(getType()); } private boolean isNearParameter() { - Integer nearIndex = MongoParameters.this.nearIndex; return nearIndex != null && nearIndex.equals(getIndex()); } @@ -259,7 +337,6 @@ private boolean isPoint() { private boolean hasNearAnnotation() { return parameter.getParameterAnnotation(Near.class) != null; } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java index 7f8ac79b4f..ac1931e10c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,34 @@ */ package org.springframework.data.mongodb.repository.query; -import java.util.Arrays; -import java.util.List; - import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParametersParameterAccessor; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; /** * Mongo-specific {@link ParametersParameterAccessor} to allow access to the {@link Distance} parameter. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ public class MongoParametersParameterAccessor extends ParametersParameterAccessor implements MongoParameterAccessor { - private final MongoQueryMethod method; - private final List values; + final MongoQueryMethod method; /** * Creates a new {@link MongoParametersParameterAccessor}. - * + * * @param method must not be {@literal null}. * @param values must not be {@literal null}. */ @@ -50,7 +51,6 @@ public MongoParametersParameterAccessor(MongoQueryMethod method, Object[] values super(method.getParameters(), values); this.method = method; - this.values = Arrays.asList(values); } public Range getDistanceRange() { @@ -64,15 +64,12 @@ public Range getDistanceRange() { } int maxDistanceIndex = mongoParameters.getMaxDistanceIndex(); - Distance maxDistance = maxDistanceIndex == -1 ? null : (Distance) getValue(maxDistanceIndex); + Bound maxDistance = maxDistanceIndex == -1 ? Bound.unbounded() + : Bound.inclusive((Distance) getValue(maxDistanceIndex)); - return new Range(null, maxDistance); + return Range.of(Bound.unbounded(), maxDistance); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { int nearIndex = method.getParameters().getNearIndex(); @@ -87,10 +84,9 @@ public Point getGeoNearLocation() { return null; } - if (value instanceof double[]) { - double[] typedValue = (double[]) value; + if (value instanceof double[] typedValue) { if (typedValue.length != 2) { - throw new IllegalArgumentException("The given double[] must have exactly 2 elements!"); + throw new IllegalArgumentException("The given double[] must have exactly 2 elements"); } else { return new Point(typedValue[0], typedValue[1]); } @@ -99,10 +95,7 @@ public Point getGeoNearLocation() { return (Point) value; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ + @Nullable @Override public TextCriteria getFullText() { int index = method.getParameters().getFullTextParameterIndex(); @@ -113,16 +106,16 @@ protected TextCriteria potentiallyConvertFullText(Object fullText) { Assert.notNull(fullText, "Fulltext parameter must not be 'null'."); - if (fullText instanceof String) { - return TextCriteria.forDefaultLanguage().matching((String) fullText); + if (fullText instanceof String stringValue) { + return TextCriteria.forDefaultLanguage().matching(stringValue); } - if (fullText instanceof Term) { - return TextCriteria.forDefaultLanguage().matching((Term) fullText); + if (fullText instanceof Term term) { + return TextCriteria.forDefaultLanguage().matching(term); } - if (fullText instanceof TextCriteria) { - return ((TextCriteria) fullText); + if (fullText instanceof TextCriteria textCriteria) { + return textCriteria; } throw new IllegalArgumentException( @@ -130,12 +123,25 @@ protected TextCriteria potentiallyConvertFullText(Object fullText) { ClassUtils.getShortName(fullText.getClass()))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() - */ + @Override + public Collation getCollation() { + + if (method.getParameters().getCollationParameterIndex() == -1) { + return null; + } + + return getValue(method.getParameters().getCollationParameterIndex()); + } + @Override public Object[] getValues() { - return values.toArray(); + return super.getValues(); + } + + @Override + public UpdateDefinition getUpdate() { + + int updateIndex = method.getParameters().getUpdateIndex(); + return updateIndex == -1 ? null : (UpdateDefinition) getValue(updateIndex); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java index 083b778d7e..66a8870623 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,54 +20,63 @@ import java.util.Arrays; import java.util.Collection; import java.util.Iterator; +import java.util.Optional; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonRegularExpression; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; +import org.springframework.data.mongodb.core.geo.GeoJson; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.MetricConversion; import org.springframework.data.mongodb.core.query.MongoRegexCreator; +import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor.PotentiallyConvertingIterator; import org.springframework.data.repository.query.parser.AbstractQueryCreator; import org.springframework.data.repository.query.parser.Part; import org.springframework.data.repository.query.parser.Part.IgnoreCaseType; import org.springframework.data.repository.query.parser.Part.Type; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; /** * Custom query creator to create Mongo criterias. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Edward Prentice */ class MongoQueryCreator extends AbstractQueryCreator { - private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class); - private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}"); - private final MongoParameterAccessor accessor; - private final boolean isGeoNearQuery; + private static final Log LOG = LogFactory.getLog(MongoQueryCreator.class); + private final MongoParameterAccessor accessor; private final MappingContext context; + private final boolean isGeoNearQuery; /** * Creates a new {@link MongoQueryCreator} from the given {@link PartTree}, {@link ConvertingParameterAccessor} and * {@link MappingContext}. - * + * * @param tree * @param accessor * @param context @@ -80,7 +89,7 @@ public MongoQueryCreator(PartTree tree, ConvertingParameterAccessor accessor, /** * Creates a new {@link MongoQueryCreator} from the given {@link PartTree}, {@link ConvertingParameterAccessor} and * {@link MappingContext}. - * + * * @param tree * @param accessor * @param context @@ -91,17 +100,13 @@ public MongoQueryCreator(PartTree tree, ConvertingParameterAccessor accessor, super(tree, accessor); - Assert.notNull(context); + Assert.notNull(context, "MappingContext must not be null"); this.accessor = accessor; this.isGeoNearQuery = isGeoNearQuery; this.context = context; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#create(org.springframework.data.repository.query.parser.Part, java.util.Iterator) - */ @Override protected Criteria create(Part part, Iterator iterator) { @@ -111,15 +116,10 @@ protected Criteria create(Part part, Iterator iterator) { PersistentPropertyPath path = context.getPersistentPropertyPath(part.getProperty()); MongoPersistentProperty property = path.getLeafProperty(); - Criteria criteria = from(part, property, where(path.toDotPath()), (PotentiallyConvertingIterator) iterator); - return criteria; + return from(part, property, where(path.toDotPath()), iterator); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#and(org.springframework.data.repository.query.parser.Part, java.lang.Object, java.util.Iterator) - */ @Override protected Criteria and(Part part, Criteria base, Iterator iterator) { @@ -130,13 +130,9 @@ protected Criteria and(Part part, Criteria base, Iterator iterator) { PersistentPropertyPath path = context.getPersistentPropertyPath(part.getProperty()); MongoPersistentProperty property = path.getLeafProperty(); - return from(part, property, base.and(path.toDotPath()), (PotentiallyConvertingIterator) iterator); + return from(part, property, base.and(path.toDotPath()), iterator); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#or(java.lang.Object, java.lang.Object) - */ @Override protected Criteria or(Criteria base, Criteria criteria) { @@ -144,10 +140,6 @@ protected Criteria or(Criteria base, Criteria criteria) { return result.orOperator(base, criteria); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#complete(java.lang.Object, org.springframework.data.domain.Sort) - */ @Override protected Query complete(Criteria criteria, Sort sort) { @@ -162,7 +154,7 @@ protected Query complete(Criteria criteria, Sort sort) { /** * Populates the given {@link CriteriaDefinition} depending on the {@link Part} given. - * + * * @param part * @param property * @param criteria @@ -185,24 +177,28 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit case LESS_THAN_EQUAL: return criteria.lte(parameters.next()); case BETWEEN: - return criteria.gt(parameters.next()).lt(parameters.next()); + return computeBetweenPart(criteria, parameters); case IS_NOT_NULL: return criteria.ne(null); case IS_NULL: return criteria.is(null); case NOT_IN: - return criteria.nin(nextAsArray(parameters)); + return criteria.nin(nextAsList(parameters, part)); case IN: - return criteria.in(nextAsArray(parameters)); + return criteria.in(nextAsList(parameters, part)); case LIKE: case STARTING_WITH: case ENDING_WITH: case CONTAINING: return createContainingCriteria(part, property, criteria, parameters); + case NOT_LIKE: + return createContainingCriteria(part, property, criteria.not(), parameters); case NOT_CONTAINING: - return createContainingCriteria(part, property, criteria, parameters).not(); + return createContainingCriteria(part, property, criteria.not(), parameters); case REGEX: - return criteria.regex(parameters.next().toString()); + + Object param = parameters.next(); + return param instanceof Pattern pattern ? criteria.regex(pattern) : criteria.regex(param.toString()); case EXISTS: return criteria.exists((Boolean) parameters.next()); case TRUE: @@ -212,63 +208,65 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit case NEAR: Range range = accessor.getDistanceRange(); - Distance distance = range.getUpperBound(); - Distance minDistance = range.getLowerBound(); + Optional distance = range.getUpperBound().getValue(); + Optional minDistance = range.getLowerBound().getValue(); Point point = accessor.getGeoNearLocation(); - point = point == null ? nextAs(parameters, Point.class) : point; + Point pointToUse = point == null ? nextAs(parameters, Point.class) : point; boolean isSpherical = isSpherical(property); - if (distance == null) { - return isSpherical ? criteria.nearSphere(point) : criteria.near(point); - } else { - if (isSpherical || !Metrics.NEUTRAL.equals(distance.getMetric())) { - criteria.nearSphere(point); + return distance.map(it -> { + + if (isSpherical || !Metrics.NEUTRAL.equals(it.getMetric())) { + criteria.nearSphere(pointToUse); } else { - criteria.near(point); + criteria.near(pointToUse); } - criteria.maxDistance(distance.getNormalizedValue()); - if (minDistance != null) { - criteria.minDistance(minDistance.getNormalizedValue()); + + if (pointToUse instanceof GeoJson) { // using GeoJson distance is in meters. + + criteria.maxDistance(MetricConversion.getDistanceInMeters(it)); + minDistance.map(MetricConversion::getDistanceInMeters).ifPresent(criteria::minDistance); + } else { + criteria.maxDistance(it.getNormalizedValue()); + minDistance.map(Distance::getNormalizedValue).ifPresent(criteria::minDistance); } - } - return criteria; + + return criteria; + + }).orElseGet(() -> isSpherical ? criteria.nearSphere(pointToUse) : criteria.near(pointToUse)); + case WITHIN: Object parameter = parameters.next(); return criteria.within((Shape) parameter); case SIMPLE_PROPERTY: - return isSimpleComparisionPossible(part) ? criteria.is(parameters.next()) + return isSimpleComparisonPossible(part) ? criteria.is(parameters.next()) : createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, false); case NEGATING_SIMPLE_PROPERTY: - return isSimpleComparisionPossible(part) ? criteria.ne(parameters.next()) + return isSimpleComparisonPossible(part) ? criteria.ne(parameters.next()) : createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, true); default: - throw new IllegalArgumentException("Unsupported keyword!"); + throw new IllegalArgumentException("Unsupported keyword"); } } - private boolean isSimpleComparisionPossible(Part part) { + private boolean isSimpleComparisonPossible(Part part) { - switch (part.shouldIgnoreCase()) { - case NEVER: - return true; - case WHEN_POSSIBLE: - return part.getProperty().getType() != String.class; - case ALWAYS: - return false; - default: - return true; - } + return switch (part.shouldIgnoreCase()) { + case NEVER -> true; + case WHEN_POSSIBLE -> part.getProperty().getType() != String.class; + case ALWAYS -> false; + }; } /** * Creates and extends the given criteria with a like-regex if necessary. - * + * * @param part * @param property * @param criteria @@ -296,7 +294,7 @@ private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProper criteria = criteria.not(); } - return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString()); + return addAppropriateLikeRegexTo(criteria, part, parameters.next()); case NEVER: // intentional no-op @@ -310,7 +308,7 @@ private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProper * If the target property of the comparison is of type String, then the operator checks for match using regular * expression. If the target property of the comparison is a {@link Collection} then the operator evaluates to true if * it finds an exact match within any member of the {@link Collection}. - * + * * @param part * @param property * @param criteria @@ -321,29 +319,36 @@ private Criteria createContainingCriteria(Part part, MongoPersistentProperty pro Iterator parameters) { if (property.isCollectionLike()) { - return criteria.in(nextAsArray(parameters)); + return criteria.in(nextAsList(parameters, part)); } - return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString()); + return addAppropriateLikeRegexTo(criteria, part, parameters.next()); } /** * Creates an appropriate like-regex and appends it to the given criteria. - * + * * @param criteria * @param part * @param value * @return the criteria extended with the regex. */ - private Criteria addAppropriateLikeRegexTo(Criteria criteria, Part part, String value) { + private Criteria addAppropriateLikeRegexTo(Criteria criteria, Part part, Object value) { + + if (value == null) { + + throw new IllegalArgumentException(String.format( + "Argument for creating $regex pattern for property '%s' must not be null", part.getProperty().getSegment())); + } - return criteria.regex(toLikeRegex(value, part), toRegexOptions(part)); + return criteria.regex(toLikeRegex(value.toString(), part), toRegexOptions(part)); } /** * @param part * @return the regex options or {@literal null}. */ + @Nullable private String toRegexOptions(Part part) { String regexOptions = null; @@ -358,7 +363,7 @@ private String toRegexOptions(Part part) { /** * Returns the next element from the given {@link Iterator} expecting it to be of a certain type. - * + * * @param * @param iterator * @param type @@ -367,35 +372,116 @@ private String toRegexOptions(Part part) { */ @SuppressWarnings("unchecked") private T nextAs(Iterator iterator, Class type) { + Object parameter = iterator.next(); - if (parameter.getClass().isAssignableFrom(type)) { + + if (ClassUtils.isAssignable(type, parameter.getClass())) { return (T) parameter; } throw new IllegalArgumentException( - String.format("Expected parameter type of %s but got %s!", type, parameter.getClass())); + String.format("Expected parameter type of %s but got %s", type, parameter.getClass())); } - private Object[] nextAsArray(Iterator iterator) { + private java.util.List nextAsList(Iterator iterator, Part part) { + + Streamable streamable = asStreamable(iterator.next()); + if (!isSimpleComparisonPossible(part)) { + + MatchMode matchMode = toMatchMode(part.getType()); + String regexOptions = toRegexOptions(part); - Object next = iterator.next(); + streamable = streamable.map(it -> { + if (it instanceof String value) { - if (next instanceof Collection) { - return ((Collection) next).toArray(); - } else if (next != null && next.getClass().isArray()) { - return (Object[]) next; + return new BsonRegularExpression(MongoRegexCreator.INSTANCE.toRegularExpression(value, matchMode), + regexOptions); + } + return it; + }); } - return new Object[] { next }; + return streamable.toList(); + } + + private Streamable asStreamable(Object value) { + + if (value instanceof Collection collection) { + return Streamable.of(collection); + } else if (ObjectUtils.isArray(value)) { + return Streamable.of((Object[]) value); + } + return Streamable.of(value); } private String toLikeRegex(String source, Part part) { - return MongoRegexCreator.INSTANCE.toRegularExpression(source, part.getType()); + return MongoRegexCreator.INSTANCE.toRegularExpression(source, toMatchMode(part.getType())); } private boolean isSpherical(MongoPersistentProperty property) { - GeoSpatialIndexed index = property.findAnnotation(GeoSpatialIndexed.class); - return index != null && index.type().equals(GeoSpatialIndexType.GEO_2DSPHERE); + if (property.isAnnotationPresent(GeoSpatialIndexed.class)) { + GeoSpatialIndexed index = property.findAnnotation(GeoSpatialIndexed.class); + return index.type().equals(GeoSpatialIndexType.GEO_2DSPHERE); + } + + return false; + } + + /** + * Compute a {@link Type#BETWEEN} typed {@link Part} using {@link Criteria#gt(Object) $gt}, + * {@link Criteria#gte(Object) $gte}, {@link Criteria#lt(Object) $lt} and {@link Criteria#lte(Object) $lte}. + *
                    + * In case the first {@literal value} is actually a {@link Range} the lower and upper bounds of the {@link Range} are + * used according to their {@link Bound#isInclusive() inclusion} definition. Otherwise the {@literal value} is used + * for {@literal $gt} and {@link Iterator#next() parameters.next()} as {@literal $lt}. + * + * @param criteria must not be {@literal null}. + * @param parameters must not be {@literal null}. + * @return + * @since 2.2 + */ + private static Criteria computeBetweenPart(Criteria criteria, Iterator parameters) { + + Object value = parameters.next(); + if (!(value instanceof Range range)) { + return criteria.gt(value).lt(parameters.next()); + } + + Optional min = range.getLowerBound().getValue(); + Optional max = range.getUpperBound().getValue(); + + min.ifPresent(it -> { + + if (range.getLowerBound().isInclusive()) { + criteria.gte(it); + } else { + criteria.gt(it); + } + }); + + max.ifPresent(it -> { + + if (range.getUpperBound().isInclusive()) { + criteria.lte(it); + } else { + criteria.lt(it); + } + }); + + return criteria; + } + + private static MatchMode toMatchMode(Type type) { + + return switch (type) { + case NOT_CONTAINING, CONTAINING -> MatchMode.CONTAINING; + case STARTING_WITH -> MatchMode.STARTING_WITH; + case ENDING_WITH -> MatchMode.ENDING_WITH; + case LIKE, NOT_LIKE -> MatchMode.LIKE; + case REGEX -> MatchMode.REGEX; + case NEGATING_SIMPLE_PROPERTY, SIMPLE_PROPERTY, IN -> MatchMode.EXACT; + default -> MatchMode.DEFAULT; + }; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java index c4274977b0..dd2b78de59 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,10 @@ */ package org.springframework.data.mongodb.repository.query; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -import java.util.Collections; import java.util.List; -import java.util.function.Function; +import java.util.function.Supplier; -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiators; -import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; import org.springframework.data.domain.Slice; @@ -34,69 +28,68 @@ import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ExecutableFindOperation; +import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.repository.query.ResultProcessor; -import org.springframework.data.repository.query.ReturnedType; -import org.springframework.data.util.CloseableIterator; -import org.springframework.data.util.StreamUtils; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import org.springframework.util.ClassUtils; -import com.mongodb.WriteResult; +import com.mongodb.client.result.DeleteResult; +/** + * Set of classes to contain query execution strategies. Depending (mostly) on the return type of a + * {@link org.springframework.data.repository.query.QueryMethod} a {@link AbstractMongoQuery} can be executed in various + * flavors. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + */ +@FunctionalInterface interface MongoQueryExecution { - Object execute(Query query, Class type, String collection); + @Nullable + Object execute(Query query); /** - * {@link MongoQueryExecution} for collection returning queries. - * + * {@link MongoQueryExecution} for {@link Slice} query methods. + * * @author Oliver Gierke + * @author Christoph Strobl + * @since 1.5 */ - @RequiredArgsConstructor - static final class CollectionExecution implements MongoQueryExecution { + final class SlicedExecution implements MongoQueryExecution { - private final @NonNull MongoOperations operations; + private final FindWithQuery find; private final Pageable pageable; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - @Override - public Object execute(Query query, Class type, String collection) { - return operations.find(query.with(pageable), type, collection); - } - } + public SlicedExecution(ExecutableFindOperation.FindWithQuery find, Pageable pageable) { - /** - * {@link MongoQueryExecution} for {@link Slice} query methods. - * - * @author Oliver Gierke - * @author Christoph Strobl - * @since 1.5 - */ - @RequiredArgsConstructor - static final class SlicedExecution implements MongoQueryExecution { + Assert.notNull(find, "Find must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); - private final @NonNull MongoOperations operations; - private final @NonNull Pageable pageable; + this.find = find; + this.pageable = pageable; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override @SuppressWarnings({ "unchecked", "rawtypes" }) - public Object execute(Query query, Class type, String collection) { + public Object execute(Query query) { int pageSize = pageable.getPageSize(); // Apply Pageable but tweak limit to peek into next page - Query modifiedQuery = query.with(pageable).limit(pageSize + 1); - List result = operations.find(modifiedQuery, type, collection); + Query modifiedQuery = SliceUtils.limitResult(query, pageable).with(pageable.getSort()); + List result = find.matching(modifiedQuery).all(); boolean hasNext = result.size() > pageSize; @@ -106,92 +99,80 @@ public Object execute(Query query, Class type, String collection) { /** * {@link MongoQueryExecution} for pagination queries. - * + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ - @RequiredArgsConstructor - static final class PagedExecution implements MongoQueryExecution { + final class PagedExecution implements MongoQueryExecution { + + private final FindWithQuery operation; + private final Pageable pageable; - private final @NonNull MongoOperations operations; - private final @NonNull Pageable pageable; + public PagedExecution(ExecutableFindOperation.FindWithQuery operation, Pageable pageable) { + + Assert.notNull(operation, "Operation must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + + this.operation = operation; + this.pageable = pageable; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) - public Object execute(Query query, Class type, String collection) { + public Object execute(Query query) { int overallLimit = query.getLimit(); - long count = operations.count(query, type, collection); - count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count; - boolean pageableOutOfScope = pageable.getOffset() > count; - - if (pageableOutOfScope) { - return new PageImpl(Collections.emptyList(), pageable, count); - } + TerminatingFind matching = operation.matching(query); // Apply raw pagination - query = query.with(pageable); + query.with(pageable); // Adjust limit if page would exceed the overall limit if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) { - query.limit(overallLimit - pageable.getOffset()); + query.limit((int) (overallLimit - pageable.getOffset())); } - List result = operations.find(query, type, collection); - return new PageImpl(result, pageable, count); - } - } - - /** - * {@link MongoQueryExecution} to return a single entity. - * - * @author Oliver Gierke - */ - @RequiredArgsConstructor - static final class SingleEntityExecution implements MongoQueryExecution { - - private final MongoOperations operations; - private final boolean countProjection; + return PageableExecutionUtils.getPage(matching.all(), pageable, () -> { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - @Override - public Object execute(Query query, Class type, String collection) { - return countProjection ? operations.count(query, type, collection) : operations.findOne(query, type, collection); + long count = operation.matching(Query.of(query).skip(-1).limit(-1)).count(); + return overallLimit != 0 ? Math.min(count, overallLimit) : count; + }); } } /** * {@link MongoQueryExecution} to execute geo-near queries. - * + * * @author Oliver Gierke */ - @RequiredArgsConstructor - static class GeoNearExecution implements MongoQueryExecution { + class GeoNearExecution implements MongoQueryExecution { - private final MongoOperations operations; + private final FindWithQuery operation; + private final MongoQueryMethod method; private final MongoParameterAccessor accessor; - private final TypeInformation returnType; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + public GeoNearExecution(ExecutableFindOperation.FindWithQuery operation, MongoQueryMethod method, + MongoParameterAccessor accessor) { + + Assert.notNull(operation, "Operation must not be null"); + Assert.notNull(method, "Method must not be null"); + Assert.notNull(accessor, "Accessor must not be null"); + + this.operation = operation; + this.method = method; + this.accessor = accessor; + } + @Override - public Object execute(Query query, Class type, String collection) { + public Object execute(Query query) { - GeoResults results = doExecuteQuery(query, type, collection); - return isListOfGeoResult() ? results.getContent() : results; + GeoResults results = doExecuteQuery(query); + return isListOfGeoResult(method.getReturnType()) ? results.getContent() : results; } @SuppressWarnings("unchecked") - protected GeoResults doExecuteQuery(Query query, Class type, String collection) { + GeoResults doExecuteQuery(Query query) { Point nearLocation = accessor.getGeoNearLocation(); NearQuery nearQuery = NearQuery.near(nearLocation); @@ -201,181 +182,135 @@ protected GeoResults doExecuteQuery(Query query, Class type, String c } Range distances = accessor.getDistanceRange(); - Distance maxDistance = distances.getUpperBound(); - - if (maxDistance != null) { - nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric()); - } - - Distance minDistance = distances.getLowerBound(); - - if (minDistance != null) { - nearQuery.minDistance(minDistance).in(minDistance.getMetric()); - } + distances.getLowerBound().getValue().ifPresent(it -> nearQuery.minDistance(it).in(it.getMetric())); + distances.getUpperBound().getValue().ifPresent(it -> nearQuery.maxDistance(it).in(it.getMetric())); Pageable pageable = accessor.getPageable(); + nearQuery.with(pageable); - if (pageable != null) { - nearQuery.with(pageable); - } - - return (GeoResults) operations.geoNear(nearQuery, type, collection); + return (GeoResults) operation.near(nearQuery).all(); } - private boolean isListOfGeoResult() { + private static boolean isListOfGeoResult(TypeInformation returnType) { if (!returnType.getType().equals(List.class)) { return false; } TypeInformation componentType = returnType.getComponentType(); - return componentType == null ? false : GeoResult.class.equals(componentType.getType()); + return componentType != null && GeoResult.class.equals(componentType.getType()); } } - static final class PagingGeoNearExecution extends GeoNearExecution { + /** + * {@link MongoQueryExecution} to execute geo-near queries with paging. + * + * @author Oliver Gierke + * @author Mark Paluch + */ + final class PagingGeoNearExecution extends GeoNearExecution { - private final MongoOperations operations; - private final MongoParameterAccessor accessor; + private final FindWithQuery operation; + private final ConvertingParameterAccessor accessor; private final AbstractMongoQuery mongoQuery; - public PagingGeoNearExecution(MongoOperations operations, MongoParameterAccessor accessor, - TypeInformation returnType, AbstractMongoQuery query) { + PagingGeoNearExecution(FindWithQuery operation, MongoQueryMethod method, ConvertingParameterAccessor accessor, + AbstractMongoQuery query) { - super(operations, accessor, returnType); + super(operation, method, accessor); this.accessor = accessor; - this.operations = operations; + this.operation = operation; this.mongoQuery = query; } - /** - * Executes the given {@link Query} to return a page. - * - * @param query must not be {@literal null}. - * @param countQuery must not be {@literal null}. - * @return - */ @Override - public Object execute(Query query, Class type, String collection) { + public Object execute(Query query) { + + GeoResults geoResults = doExecuteQuery(query); + + Page> page = PageableExecutionUtils.getPage(geoResults.getContent(), accessor.getPageable(), + () -> { - ConvertingParameterAccessor parameterAccessor = new ConvertingParameterAccessor(operations.getConverter(), - accessor); - Query countQuery = mongoQuery.applyQueryMetaAttributesWhenPresent(mongoQuery.createCountQuery(parameterAccessor)); - long count = operations.count(countQuery, collection); + Query countQuery = mongoQuery.createCountQuery(accessor); + countQuery = mongoQuery.applyQueryMetaAttributesWhenPresent(countQuery); - return new GeoPage(doExecuteQuery(query, type, collection), accessor.getPageable(), count); + return operation.matching(countQuery).count(); + }); + + // transform to GeoPage after applying optimization + return new GeoPage<>(geoResults, accessor.getPageable(), page.getTotalElements()); } } /** * {@link MongoQueryExecution} removing documents matching the query. - * + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Artyom Gabeev + * @author Christoph Strobl * @since 1.5 */ - @RequiredArgsConstructor - static final class DeleteExecution implements MongoQueryExecution { + final class DeleteExecution implements MongoQueryExecution { private final MongoOperations operations; private final MongoQueryMethod method; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - @Override - public Object execute(Query query, Class type, String collection) { + public DeleteExecution(MongoOperations operations, MongoQueryMethod method) { - if (method.isCollectionQuery()) { - return operations.findAllAndRemove(query, type, collection); - } + Assert.notNull(operations, "Operations must not be null"); + Assert.notNull(method, "Method must not be null"); - WriteResult writeResult = operations.remove(query, type, collection); - return writeResult != null ? writeResult.getN() : 0L; + this.operations = operations; + this.method = method; } - } - /** - * @author Thomas Darimont - * @since 1.7 - */ - @RequiredArgsConstructor - static final class StreamExecution implements MongoQueryExecution { - - private final @NonNull MongoOperations operations; - private final @NonNull Converter resultProcessing; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - @SuppressWarnings("unchecked") - public Object execute(Query query, Class type, String collection) { + public Object execute(Query query) { - return StreamUtils.createStreamFromIterator((CloseableIterator) operations.stream(query, type)) - .map(new Function() { - - @Override - public Object apply(Object t) { - return resultProcessing.convert(t); - } - }); - } - } + String collectionName = method.getEntityInformation().getCollectionName(); + Class type = method.getEntityInformation().getJavaType(); - /** - * An {@link MongoQueryExecution} that wraps the results of the given delegate with the given result processing. - * - * @author Oliver Gierke - * @since 1.9 - */ - @RequiredArgsConstructor - static final class ResultProcessingExecution implements MongoQueryExecution { + if (method.isCollectionQuery()) { + return operations.findAllAndRemove(query, type, collectionName); + } - private final @NonNull MongoQueryExecution delegate; - private final @NonNull Converter converter; + if (method.isQueryForEntity() && !ClassUtils.isPrimitiveOrWrapper(method.getReturnedObjectType())) { + return operations.findAndRemove(query, type, collectionName); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - @Override - public Object execute(Query query, Class type, String collection) { - return converter.convert(delegate.execute(query, type, collection)); + DeleteResult writeResult = operations.remove(query, type, collectionName); + return writeResult.wasAcknowledged() ? writeResult.getDeletedCount() : 0L; } } /** - * A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}. + * {@link MongoQueryExecution} updating documents matching the query. * - * @author Oliver Gierke - * @since 1.9 + * @author Christph Strobl + * @since 3.4 */ - @RequiredArgsConstructor - static final class ResultProcessingConverter implements Converter { - - private final @NonNull ResultProcessor processor; - private final @NonNull MongoOperations operations; - private final @NonNull EntityInstantiators instantiators; + final class UpdateExecution implements MongoQueryExecution { - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ - @Override - public Object convert(Object source) { + private final ExecutableUpdate updateOps; + private Supplier updateDefinitionSupplier; + private final MongoParameterAccessor accessor; - ReturnedType returnedType = processor.getReturnedType(); + UpdateExecution(ExecutableUpdate updateOps, MongoQueryMethod method, Supplier updateSupplier, + MongoParameterAccessor accessor) { - if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) { - return source; - } + this.updateOps = updateOps; + this.updateDefinitionSupplier = updateSupplier; + this.accessor = accessor; + } - Converter converter = new DtoInstantiatingConverter(returnedType.getReturnedType(), - operations.getConverter().getMappingContext(), instantiators); + @Override + public Object execute(Query query) { - return processor.processResult(source, converter); + return updateOps.matching(query.with(accessor.getSort())) // + .apply(updateDefinitionSupplier.get()) // + .all().getModifiedCount(); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java index afbecf83d3..d3fe22b4ef 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,50 +15,61 @@ */ package org.springframework.data.mongodb.repository.query; -import java.io.Serializable; +import java.lang.annotation.Annotation; import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; import org.springframework.core.annotation.AnnotatedElementUtils; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.data.geo.GeoPage; -import org.springframework.data.geo.GeoResult; -import org.springframework.data.geo.GeoResults; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.annotation.Collation; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.Tailable; +import org.springframework.data.mongodb.repository.Update; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.ParametersSource; import org.springframework.data.repository.query.QueryMethod; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ConcurrentReferenceHashMap; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** * Mongo specific implementation of {@link QueryMethod}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ public class MongoQueryMethod extends QueryMethod { - @SuppressWarnings("unchecked") private static final List> GEO_NEAR_RESULTS = Arrays - .asList(GeoResult.class, GeoResults.class, GeoPage.class); - private final Method method; private final MappingContext, MongoPersistentProperty> mappingContext; + private final Map, Optional> annotationCache; - private MongoEntityMetadata metadata; + private @Nullable MongoEntityMetadata metadata; + private final Lazy isModifying = Lazy.of(this::resolveModifyingQueryIndicators); /** * Creates a new {@link MongoQueryMethod} from the given {@link Method}. - * + * * @param method must not be {@literal null}. * @param metadata must not be {@literal null}. * @param projectionFactory must not be {@literal null}. @@ -66,60 +77,71 @@ public class MongoQueryMethod extends QueryMethod { */ public MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, MappingContext, MongoPersistentProperty> mappingContext) { + this(method, metadata, projectionFactory, mappingContext, MongoParameters::new); + } + + /** + * Creates a new {@link MongoQueryMethod} from the given {@link Method}. + * + * @param method must not be {@literal null}. + * @param metadata must not be {@literal null}. + * @param projectionFactory must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + */ + MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, + MappingContext, MongoPersistentProperty> mappingContext, + Function parametersFunction) { - super(method, metadata, projectionFactory); + super(method, metadata, projectionFactory, parametersFunction); - Assert.notNull(mappingContext, "MappingContext must not be null!"); + Assert.notNull(mappingContext, "MappingContext must not be null"); this.method = method; this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getParameters(java.lang.reflect.Method) - */ - @Override - protected MongoParameters createParameters(Method method) { - return new MongoParameters(method, isGeoNearQuery(method)); + this.annotationCache = new ConcurrentReferenceHashMap<>(); } /** * Returns whether the method has an annotated query. - * + * * @return */ public boolean hasAnnotatedQuery() { - return getAnnotatedQuery() != null; + return findAnnotatedQuery().isPresent(); } /** * Returns the query string declared in a {@link Query} annotation or {@literal null} if neither the annotation found * nor the attribute was specified. - * + * * @return */ + @Nullable String getAnnotatedQuery() { + return findAnnotatedQuery().orElse(null); + } - String query = (String) AnnotationUtils.getValue(getQueryAnnotation()); - return StringUtils.hasText(query) ? query : null; + private Optional findAnnotatedQuery() { + + return lookupQueryAnnotation() // + .map(Query::value) // + .filter(StringUtils::hasText); } /** * Returns the field specification to be used for the query. - * + * * @return */ + @Nullable String getFieldSpecification() { - String value = (String) AnnotationUtils.getValue(getQueryAnnotation(), "fields"); - return StringUtils.hasText(value) ? value : null; + return lookupQueryAnnotation() // + .map(Query::fields) // + .filter(StringUtils::hasText) // + .orElse(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getEntityInformation() - */ @Override @SuppressWarnings("unchecked") public MongoEntityMetadata getEntityInformation() { @@ -132,29 +154,28 @@ public MongoEntityMetadata getEntityInformation() { if (ClassUtils.isPrimitiveOrWrapper(returnedObjectType)) { this.metadata = new SimpleMongoEntityMetadata((Class) domainClass, - mappingContext.getPersistentEntity(domainClass)); + mappingContext.getRequiredPersistentEntity(domainClass)); } else { MongoPersistentEntity returnedEntity = mappingContext.getPersistentEntity(returnedObjectType); - MongoPersistentEntity managedEntity = mappingContext.getPersistentEntity(domainClass); + MongoPersistentEntity managedEntity = mappingContext.getRequiredPersistentEntity(domainClass); returnedEntity = returnedEntity == null || returnedEntity.getType().isInterface() ? managedEntity : returnedEntity; MongoPersistentEntity collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity : managedEntity; - this.metadata = new SimpleMongoEntityMetadata((Class) returnedEntity.getType(), - collectionEntity); + this.metadata = new SimpleMongoEntityMetadata<>((Class) returnedEntity.getType(), collectionEntity); } } return this.metadata; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getParameters() - */ + protected Class getDomainClass() { + return super.getDomainClass(); + } + @Override public MongoParameters getParameters() { return (MongoParameters) super.getParameters(); @@ -162,42 +183,29 @@ public MongoParameters getParameters() { /** * Returns whether the query is a geo near query. - * + * * @return */ public boolean isGeoNearQuery() { - return isGeoNearQuery(this.method); - } - - private boolean isGeoNearQuery(Method method) { - - Class returnType = method.getReturnType(); - - for (Class type : GEO_NEAR_RESULTS) { - if (type.isAssignableFrom(returnType)) { - return true; - } - } - - if (Iterable.class.isAssignableFrom(returnType)) { - TypeInformation from = ClassTypeInformation.fromReturnTypeOf(method); - return GeoResult.class.equals(from.getComponentType().getType()); - } - - return false; + return MongoParameters.isGeoNearQuery(this.method); } /** * Returns the {@link Query} annotation that is applied to the method or {@code null} if none available. - * + * * @return */ + @Nullable Query getQueryAnnotation() { - return AnnotatedElementUtils.findMergedAnnotation(method, Query.class); + return lookupQueryAnnotation().orElse(null); + } + + Optional lookupQueryAnnotation() { + return doFindAnnotation(Query.class); } TypeInformation getReturnType() { - return ClassTypeInformation.fromReturnTypeOf(method); + return TypeInformation.fromReturnTypeOf(method); } /** @@ -210,17 +218,29 @@ public boolean hasQueryMetaAttributes() { /** * Returns the {@link Meta} annotation that is applied to the method or {@code null} if not available. - * + * * @return * @since 1.6 */ + @Nullable Meta getMetaAnnotation() { - return AnnotatedElementUtils.findMergedAnnotation(method, Meta.class); + return doFindAnnotation(Meta.class).orElse(null); + } + + /** + * Returns the {@link Tailable} annotation that is applied to the method or {@code null} if not available. + * + * @return + * @since 2.0 + */ + @Nullable + Tailable getTailableAnnotation() { + return doFindAnnotation(Tailable.class).orElse(null); } /** * Returns the {@link org.springframework.data.mongodb.core.query.Meta} attributes to be applied. - * + * * @return never {@literal null}. * @since 1.6 */ @@ -232,22 +252,273 @@ public org.springframework.data.mongodb.core.query.Meta getQueryMetaAttributes() } org.springframework.data.mongodb.core.query.Meta metaAttributes = new org.springframework.data.mongodb.core.query.Meta(); - if (meta.maxExcecutionTime() > 0) { - metaAttributes.setMaxTimeMsec(meta.maxExcecutionTime()); + if (meta.maxExecutionTimeMs() > 0) { + metaAttributes.setMaxTimeMsec(meta.maxExecutionTimeMs()); } - if (meta.maxScanDocuments() > 0) { - metaAttributes.setMaxScan(meta.maxScanDocuments()); + if (meta.cursorBatchSize() != 0) { + metaAttributes.setCursorBatchSize(meta.cursorBatchSize()); } if (StringUtils.hasText(meta.comment())) { metaAttributes.setComment(meta.comment()); } - if (meta.snapshot()) { - metaAttributes.setSnapshot(meta.snapshot()); + if (!ObjectUtils.isEmpty(meta.flags())) { + + for (org.springframework.data.mongodb.core.query.Meta.CursorOption option : meta.flags()) { + metaAttributes.addFlag(option); + } + } + + if (meta.allowDiskUse()) { + metaAttributes.setAllowDiskUse(meta.allowDiskUse()); } return metaAttributes; } + + /** + * Check if the query method is decorated with a non-empty {@link Query#sort()}. + * + * @return true if method annotated with {@link Query} having a non-empty sort attribute. + * @since 2.1 + */ + public boolean hasAnnotatedSort() { + return lookupQueryAnnotation().map(Query::sort).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the sort value, used as default, extracted from the {@link Query} annotation. + * + * @return the {@link Query#sort()} value. + * @throws IllegalStateException if method not annotated with {@link Query}. Make sure to check + * {@link #hasAnnotatedQuery()} first. + * @since 2.1 + */ + public String getAnnotatedSort() { + + return lookupQueryAnnotation().map(Query::sort).orElseThrow(() -> new IllegalStateException( + "Expected to find @Query annotation but did not; Make sure to check hasAnnotatedSort() before.")); + } + + /** + * Check if the query method is decorated with a non-empty {@link ReadPreference}. + * + * @return true if method annotated with {@link Query} or {@link Aggregation} having a non-empty collation attribute. + * @since 4.2 + */ + public boolean hasAnnotatedReadPreference() { + return doFindReadPreferenceAnnotation().map(ReadPreference::value).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the {@link com.mongodb.ReadPreference} extracted from the {@link ReadPreference} annotation. + * + * @return the name of the {@link ReadPreference}. + * @throws IllegalStateException if method not annotated with {@link Query}. Make sure to check + * {@link #hasAnnotatedReadPreference()} first. + * @since 4.2 + */ + public String getAnnotatedReadPreference() { + + return doFindReadPreferenceAnnotation().map(ReadPreference::value).orElseThrow(() -> new IllegalStateException( + "Expected to find @ReadPreference annotation but did not; Make sure to check hasAnnotatedReadPreference() before.")); + } + + /** + * Get {@link com.mongodb.ReadPreference#getName() name} from query. First check if the method is annotated. If not, + * check if the class is annotated. So if the method and the class are annotated with @ReadPreference, the method + * annotation takes precedence. + * + * @return the {@link ReadPreference} + * @since 4.2 + */ + private Optional doFindReadPreferenceAnnotation() { + return doFindAnnotation(ReadPreference.class).or(() -> doFindAnnotationInClass(ReadPreference.class)); + } + + /** + * Check if the query method is decorated with a non-empty {@link Query#collation()} or + * {@link Aggregation#collation()}. + * + * @return true if method annotated with {@link Query} or {@link Aggregation} having a non-empty collation attribute. + * @since 2.2 + */ + public boolean hasAnnotatedCollation() { + return doFindAnnotation(Collation.class).map(Collation::value).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the collation value extracted from the {@link Query} or {@link Aggregation} annotation. + * + * @return the {@link Query#collation()} or {@link Aggregation#collation()} value. + * @throws IllegalStateException if method not annotated with {@link Query} or {@link Aggregation}. Make sure to check + * {@link #hasAnnotatedQuery()} first. + * @since 2.2 + */ + public String getAnnotatedCollation() { + + return doFindAnnotation(Collation.class).map(Collation::value) // + .orElseThrow(() -> new IllegalStateException( + "Expected to find @Collation annotation but did not; Make sure to check hasAnnotatedCollation() before.")); + } + + /** + * Returns whether the method has an annotated query. + * + * @return true if {@link Aggregation} is present. + * @since 2.2 + */ + public boolean hasAnnotatedAggregation() { + return findAnnotatedAggregation().isPresent(); + } + + /** + * Returns the aggregation pipeline declared in a {@link Aggregation} annotation. + * + * @return the aggregation pipeline. + * @throws IllegalStateException if method not annotated with {@link Aggregation}. Make sure to check + * {@link #hasAnnotatedAggregation()} first. + * @since 2.2 + */ + public String[] getAnnotatedAggregation() { + return findAnnotatedAggregation().orElseThrow(() -> new IllegalStateException( + "Expected to find @Aggregation annotation but did not; Make sure to check hasAnnotatedAggregation() before.")); + } + + /** + * @return {@literal true} if the {@link Hint} annotation is present and the index name is not empty. + * @since 4.1 + */ + public boolean hasAnnotatedHint() { + return doFindAnnotation(Hint.class).map(Hint::indexName).filter(StringUtils::hasText).isPresent(); + } + + /** + * Returns the aggregation pipeline declared via a {@link Hint} annotation. + * + * @return the index name (might be empty). + * @throws IllegalStateException if the method is not annotated with {@link Hint} + * @since 4.1 + */ + public String getAnnotatedHint() { + return doFindAnnotation(Hint.class).map(Hint::indexName).orElseThrow(() -> new IllegalStateException( + "Expected to find @Hint annotation but did not; Make sure to check hasAnnotatedHint() before.")); + } + + private Optional findAnnotatedAggregation() { + + return lookupAggregationAnnotation() // + .map(Aggregation::pipeline) // + .filter(it -> !ObjectUtils.isEmpty(it)); + } + + Optional lookupAggregationAnnotation() { + return doFindAnnotation(Aggregation.class); + } + + Optional lookupUpdateAnnotation() { + return doFindAnnotation(Update.class); + } + + @SuppressWarnings("unchecked") + private Optional doFindAnnotation(Class annotationType) { + + return (Optional) this.annotationCache.computeIfAbsent(annotationType, + it -> Optional.ofNullable(AnnotatedElementUtils.findMergedAnnotation(method, it))); + } + + @SuppressWarnings("unchecked") + private Optional doFindAnnotationInClass(Class annotationType) { + + Optional mergedAnnotation = Optional + .ofNullable(AnnotatedElementUtils.findMergedAnnotation(method.getDeclaringClass(), annotationType)); + annotationCache.put(annotationType, mergedAnnotation); + + return (Optional) mergedAnnotation; + } + + @Override + public boolean isModifyingQuery() { + return isModifying.get(); + } + + private boolean resolveModifyingQueryIndicators() { + return hasAnnotatedUpdate() + || QueryUtils.indexOfAssignableParameter(UpdateDefinition.class, method.getParameterTypes()) != -1; + } + + /** + * @return {@literal true} if {@link Update} annotation is present. + * @since 3.4 + */ + public boolean hasAnnotatedUpdate() { + return lookupUpdateAnnotation().isPresent(); + } + + /** + * @return the {@link Update} or {@literal null} if not present. + * @since 3.4 + */ + public Update getUpdateSource() { + return lookupUpdateAnnotation().orElse(null); + } + + /** + * Verify the actual {@link QueryMethod} is valid in terms of supported return and parameter types. + * + * @since 3.4 + * @throws IllegalStateException + */ + public void verify() { + + if (isModifyingQuery()) { + + if (isCollectionQuery() || isScrollQuery() || isSliceQuery() || isPageQuery() || isGeoNearQuery() + || !isNumericOrVoidReturnValue()) { // + throw new IllegalStateException( + String.format( + "Update method may be void or return a numeric value (the number of updated documents)." + + " Offending Method: %s.%s", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + + if (hasAnnotatedUpdate()) { // must define either an update or an update pipeline + if (!StringUtils.hasText(getUpdateSource().update()) && ObjectUtils.isEmpty(getUpdateSource().pipeline())) { + throw new IllegalStateException( + String.format( + "Update method must define either 'Update#update' or 'Update#pipeline' attribute;" + + " Offending Method: %s.%s", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + } + } + + if (hasAnnotatedAggregation()) { + for (String stage : getAnnotatedAggregation()) { + if (BsonUtils.isJsonArray(stage)) { + throw new IllegalStateException(String.format( + """ + Invalid aggregation pipeline. Please split the definition from @Aggregation("[{...}, {...}]") to @Aggregation({ "{...}", "{...}" }). + Offending Method: %s.%s + """, + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + } + } + } + + private boolean isNumericOrVoidReturnValue() { + + Class resultType = getReturnedObjectType(); + if (ReactiveWrappers.usesReactiveType(resultType)) { + resultType = getReturnType().getComponentType().getType(); + } + + boolean isUpdateCountReturnType = ClassUtils.isAssignable(Number.class, resultType); + boolean isVoidReturnType = ReflectionUtils.isVoid(resultType); + + return isUpdateCountReturnType || isVoidReturnType; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java index 349df7a2c9..afabf9c37e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2015 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,11 @@ */ package org.springframework.data.mongodb.repository.query; +import org.bson.Document; +import org.bson.json.JsonParseException; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; @@ -24,20 +29,23 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.ValueExpressionDelegate; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.expression.ExpressionParser; import org.springframework.util.StringUtils; -import com.mongodb.util.JSONParseException; - /** * {@link RepositoryQuery} implementation for Mongo. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ public class PartTreeMongoQuery extends AbstractMongoQuery { @@ -48,13 +56,36 @@ public class PartTreeMongoQuery extends AbstractMongoQuery { /** * Creates a new {@link PartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. - * + * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. */ - public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) { + @Deprecated(since = "4.4.0") + public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, ExpressionParser expressionParser, + QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); - super(method, mongoOperations); + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Creates a new {@link PartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); this.processor = method.getResultProcessor(); this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); @@ -64,17 +95,13 @@ public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperatio /** * Return the {@link PartTree} backing the query. - * + * * @return the tree */ public PartTree getTree() { return tree; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, boolean) - */ @Override protected Query createQuery(ConvertingParameterAccessor accessor) { @@ -96,7 +123,7 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType(); - if (returnedType.isProjecting()) { + if (returnedType.needsCustomConstruction()) { Field fields = query.fields(); @@ -110,41 +137,39 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { try { - BasicQuery result = new BasicQuery(query.getQueryObject().toString(), fieldSpec); + BasicQuery result = new BasicQuery(query.getQueryObject(), Document.parse(fieldSpec)); result.setSortObject(query.getSortObject()); return result; - } catch (JSONParseException o_O) { - throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()), + } catch (JsonParseException o_O) { + throw new IllegalStateException(String.format("Invalid query or field specification in %s", getQueryMethod()), o_O); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createCountQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) - */ @Override protected Query createCountQuery(ConvertingParameterAccessor accessor) { return new MongoQueryCreator(tree, accessor, context, false).createQuery(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() - */ @Override protected boolean isCountQuery() { return tree.isCountProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() - */ + @Override + protected boolean isExistsQuery() { + return tree.isExistsProjection(); + } + @Override protected boolean isDeleteQuery() { return tree.isDelete(); } + + @Override + protected boolean isLimiting() { + return tree.isLimiting(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java index 0d92fe7b09..431510f11b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2013 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,110 @@ */ package org.springframework.data.mongodb.repository.query; -import org.springframework.data.domain.Sort.Order; +import java.util.Arrays; +import java.util.List; -import com.mongodb.DBCursor; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; /** - * Collection of utility methods to apply sorting and pagination to a {@link DBCursor}. - * - * @author Oliver Gierke + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Query} support + * offered by repositories. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @currentRead Assassin's Apprentice - Robin Hobb */ -@Deprecated -public abstract class QueryUtils { +class QueryUtils { + + protected static final Log LOGGER = LogFactory.getLog(QueryUtils.class); + + /** + * Decorate {@link Query} and add a default sort expression to the given {@link Query}. Attributes of the given + * {@code sort} may be overwritten by the sort explicitly defined by the {@link Query} itself. + * + * @param query the {@link Query} to decorate. + * @param defaultSort the default sort expression to apply to the query. + * @return the query having the given {@code sort} applied. + */ + static Query decorateSort(Query query, Document defaultSort) { + + if (defaultSort.isEmpty()) { + return query; + } + + BasicQuery defaultSortQuery = query instanceof BasicQuery bq ? bq : new BasicQuery(query); + + Document combinedSort = new Document(defaultSort); + combinedSort.putAll(defaultSortQuery.getSortObject()); + defaultSortQuery.setSortObject(combinedSort); + + return defaultSortQuery; + } - private QueryUtils() { + /** + * Apply a collation extracted from the given {@literal collationExpression} to the given {@link Query}. Potentially + * replace parameter placeholders with values from the {@link ConvertingParameterAccessor accessor}. + * + * @param query must not be {@literal null}. + * @param collationExpression must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param expressionEvaluator must not be {@literal null}. + * @return the {@link Query} having proper {@link Collation}. + * @see Query#collation(Collation) + * @since 2.2 + */ + static Query applyCollation(Query query, @Nullable String collationExpression, ConvertingParameterAccessor accessor, + ValueExpressionEvaluator expressionEvaluator) { + + Collation collation = CollationUtils.computeCollation(collationExpression, accessor, expressionEvaluator); + return collation == null ? query : query.collation(collation); + } + /** + * Get the first index of the parameter that can be assigned to the given type. + * + * @param type the type to look for. + * @param parameters the actual parameters. + * @return -1 if not found. + * @since 3.4 + */ + static int indexOfAssignableParameter(Class type, Class[] parameters) { + return indexOfAssignableParameter(type, Arrays.asList(parameters)); } /** - * Turns an {@link Order} into an {@link org.springframework.data.mongodb.core.query.Order}. - * - * @deprecated use {@link Order} directly. - * @param order - * @return + * Get the first index of the parameter that can be assigned to the given type. + * + * @param type the type to look for. + * @param parameters the actual parameters. + * @return -1 if not found. + * @since 3.4 */ - @Deprecated - public static org.springframework.data.mongodb.core.query.Order toOrder(Order order) { - return order.isAscending() ? org.springframework.data.mongodb.core.query.Order.ASCENDING - : org.springframework.data.mongodb.core.query.Order.DESCENDING; + static int indexOfAssignableParameter(Class type, List> parameters) { + + if (parameters.isEmpty()) { + return -1; + } + + int i = 0; + for (Class parameterType : parameters) { + if (ClassUtils.isAssignable(type, parameterType)) { + return i; + } + i++; + } + return -1; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java new file mode 100644 index 0000000000..324f01d61f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java @@ -0,0 +1,123 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +import org.reactivestreams.Publisher; +import org.springframework.data.repository.util.ReactiveWrapperConverters; +import org.springframework.data.util.ReactiveWrappers; + +/** + * Reactive {@link org.springframework.data.repository.query.ParametersParameterAccessor} implementation that subscribes + * to reactive parameter wrapper types upon creation. This class performs synchronization when accessing parameters. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +class ReactiveMongoParameterAccessor extends MongoParametersParameterAccessor { + + private final Object[] values; + + public ReactiveMongoParameterAccessor(MongoQueryMethod method, Object[] values) { + + super(method, values); + this.values = values; + + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.MongoParametersParameterAccessor#getValues() + */ + @Override + public Object[] getValues() { + + Object[] result = new Object[super.getValues().length]; + for (int i = 0; i < result.length; i++) { + result[i] = getValue(i); + } + return result; + } + + public Object getBindableValue(int index) { + return getValue(getParameters().getBindableParameter(index).getIndex()); + } + + /** + * Resolve parameters that were provided through reactive wrapper types. Flux is collected into a list, values from + * Mono's are used directly. + * + * @return + */ + @SuppressWarnings("unchecked") + public Mono resolveParameters() { + + boolean hasReactiveWrapper = false; + + for (Object value : values) { + if (value == null || !ReactiveWrappers.supports(value.getClass())) { + continue; + } + + hasReactiveWrapper = true; + break; + } + + if (!hasReactiveWrapper) { + return Mono.just(this); + } + + Object[] resolved = new Object[values.length]; + Map> holder = new ConcurrentHashMap<>(); + List> publishers = new ArrayList<>(); + + for (int i = 0; i < values.length; i++) { + + Object value = resolved[i] = values[i]; + if (value == null || !ReactiveWrappers.supports(value.getClass())) { + continue; + } + + if (ReactiveWrappers.isSingleValueType(value.getClass())) { + + int index = i; + publishers.add(ReactiveWrapperConverters.toWrapper(value, Mono.class) // + .map(Optional::of) // + .defaultIfEmpty(Optional.empty()) // + .doOnNext(it -> holder.put(index, (Optional) it))); + } else { + + int index = i; + publishers.add(ReactiveWrapperConverters.toWrapper(value, Flux.class) // + .collectList() // + .doOnNext(it -> holder.put(index, Optional.of(it)))); + } + } + + return Flux.merge(publishers).then().thenReturn(resolved).map(values -> { + holder.forEach((index, v) -> values[index] = v.orElse(null)); + return new ReactiveMongoParameterAccessor(method, values); + }); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java new file mode 100644 index 0000000000..d18c6a989c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java @@ -0,0 +1,256 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.reactivestreams.Publisher; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.DtoInstantiatingConverter; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Range; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mapping.model.EntityInstantiators; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.client.result.UpdateResult; + +/** + * Set of classes to contain query execution strategies. Depending (mostly) on the return type of a + * {@link org.springframework.data.repository.query.QueryMethod} a {@link AbstractReactiveMongoQuery} can be executed in + * various flavors. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +interface ReactiveMongoQueryExecution { + + Publisher execute(Query query, Class type, String collection); + + /** + * {@link MongoQueryExecution} to execute geo-near queries. + * + * @author Mark Paluch + */ + final class GeoNearExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoOperations operations; + private final MongoParameterAccessor accessor; + private final TypeInformation returnType; + + public GeoNearExecution(ReactiveMongoOperations operations, MongoParameterAccessor accessor, + TypeInformation returnType) { + + this.operations = operations; + this.accessor = accessor; + this.returnType = returnType; + } + + @Override + public Publisher execute(Query query, Class type, String collection) { + + Flux> results = doExecuteQuery(query, type, collection); + return isStreamOfGeoResult() ? results : results.map(GeoResult::getContent); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private Flux> doExecuteQuery(@Nullable Query query, Class type, String collection) { + + Point nearLocation = accessor.getGeoNearLocation(); + NearQuery nearQuery = NearQuery.near(nearLocation); + + if (query != null) { + nearQuery.query(query); + } + + Range distances = accessor.getDistanceRange(); + distances.getUpperBound().getValue().ifPresent(it -> nearQuery.maxDistance(it).in(it.getMetric())); + distances.getLowerBound().getValue().ifPresent(it -> nearQuery.minDistance(it).in(it.getMetric())); + + Pageable pageable = accessor.getPageable(); + nearQuery.with(pageable); + + return (Flux) operations.geoNear(nearQuery, type, collection); + } + + private boolean isStreamOfGeoResult() { + + if (!ReactiveWrappers.supports(returnType.getType())) { + return false; + } + + TypeInformation componentType = returnType.getComponentType(); + return (componentType != null) && GeoResult.class.equals(componentType.getType()); + } + } + + /** + * {@link ReactiveMongoQueryExecution} removing documents matching the query. + * + * @author Mark Paluch + * @author Artyom Gabeev + */ + final class DeleteExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoOperations operations; + private final MongoQueryMethod method; + + public DeleteExecution(ReactiveMongoOperations operations, MongoQueryMethod method) { + this.operations = operations; + this.method = method; + } + + @Override + public Publisher execute(Query query, Class type, String collection) { + + if (method.isCollectionQuery()) { + return operations.findAllAndRemove(query, type, collection); + } + + if (method.isQueryForEntity() && !ClassUtils.isPrimitiveOrWrapper(method.getReturnedObjectType())) { + return operations.findAndRemove(query, type, collection); + } + + return operations.remove(query, type, collection) + .map(deleteResult -> deleteResult.wasAcknowledged() ? deleteResult.getDeletedCount() : 0L); + } + } + + /** + * {@link MongoQueryExecution} updating documents matching the query. + * + * @author Christph Strobl + * @since 3.4 + */ + final class UpdateExecution implements ReactiveMongoQueryExecution { + + private final ReactiveUpdate updateOps; + private final MongoParameterAccessor accessor; + private Mono update; + + UpdateExecution(ReactiveUpdate updateOps, ReactiveMongoQueryMethod method, MongoParameterAccessor accessor, + Mono update) { + + this.updateOps = updateOps; + this.accessor = accessor; + this.update = update; + } + + @Override + public Publisher execute(Query query, Class type, String collection) { + + return update.flatMap(it -> updateOps.inCollection(collection) // + .matching(query.with(accessor.getSort())) // actually we could do it unsorted + .apply(it) // + .all() // + .map(UpdateResult::getModifiedCount)); + } + } + + /** + * An {@link ReactiveMongoQueryExecution} that wraps the results of the given delegate with the given result + * processing. + */ + final class ResultProcessingExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoQueryExecution delegate; + private final Converter converter; + + public ResultProcessingExecution(ReactiveMongoQueryExecution delegate, Converter converter) { + + Assert.notNull(delegate, "Delegate must not be null"); + Assert.notNull(converter, "Converter must not be null"); + + this.delegate = delegate; + this.converter = converter; + } + + @Override + public Publisher execute(Query query, Class type, String collection) { + return (Publisher) converter.convert(delegate.execute(query, type, collection)); + } + } + + /** + * A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}. + * + * @author Mark Paluch + */ + final class ResultProcessingConverter implements Converter { + + private final ResultProcessor processor; + private final ReactiveMongoOperations operations; + private final EntityInstantiators instantiators; + + public ResultProcessingConverter(ResultProcessor processor, ReactiveMongoOperations operations, + EntityInstantiators instantiators) { + + Assert.notNull(processor, "Processor must not be null"); + Assert.notNull(operations, "Operations must not be null"); + Assert.notNull(instantiators, "Instantiators must not be null"); + + this.processor = processor; + this.operations = operations; + this.instantiators = instantiators; + } + + @Override + public Object convert(Object source) { + + ReturnedType returnedType = processor.getReturnedType(); + + if (ReflectionUtils.isVoid(returnedType.getReturnedType())) { + + if (source instanceof Mono mono) { + return mono.then(); + } + + if (source instanceof Publisher publisher) { + return Flux.from(publisher).then(); + } + } + + if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) { + return source; + } + + if (!operations.getConverter().getMappingContext().hasPersistentEntityFor(returnedType.getReturnedType())) { + return source; + } + + Converter converter = new DtoInstantiatingConverter(returnedType.getReturnedType(), + operations.getConverter().getMappingContext(), instantiators); + + return processor.processResult(source, converter); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java new file mode 100644 index 0000000000..16354c2ff0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java @@ -0,0 +1,162 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.lang.reflect.Method; + +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.repository.query.MongoParameters.MongoParameter; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.util.ReactiveWrapperConverters; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.data.util.TypeInformation; +import org.springframework.util.ClassUtils; + +/** + * Reactive specific implementation of {@link MongoQueryMethod}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public class ReactiveMongoQueryMethod extends MongoQueryMethod { + + private static final TypeInformation PAGE_TYPE = TypeInformation.of(Page.class); + private static final TypeInformation SLICE_TYPE = TypeInformation.of(Slice.class); + + private final Method method; + private final Lazy isCollectionQuery; + + /** + * Creates a new {@link ReactiveMongoQueryMethod} from the given {@link Method}. + * + * @param method must not be {@literal null}. + * @param metadata must not be {@literal null}. + * @param projectionFactory must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + */ + public ReactiveMongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, + MappingContext, MongoPersistentProperty> mappingContext) { + + super(method, metadata, projectionFactory, mappingContext, parametersSource -> { + return new MongoParameters(parametersSource, + MongoParameters.isGeoNearQuery(parametersSource.getMethod()) || isGeoNearQuery(parametersSource.getMethod())); + }); + + this.method = method; + this.isCollectionQuery = Lazy.of(() -> (!(isPageQuery() || isSliceQuery() || isScrollQuery()) + && ReactiveWrappers.isMultiValueType(metadata.getReturnType(method).getType()) || super.isCollectionQuery())); + } + + @Override + public boolean isCollectionQuery() { + return isCollectionQuery.get(); + } + + @Override + public boolean isGeoNearQuery() { + return isGeoNearQuery(method); + } + + private static boolean isGeoNearQuery(Method method) { + + if (ReactiveWrappers.supports(method.getReturnType())) { + TypeInformation from = TypeInformation.fromReturnTypeOf(method); + return GeoResult.class.equals(from.getRequiredComponentType().getType()); + } + + return false; + } + + @Override + public boolean isModifyingQuery() { + return super.isModifyingQuery(); + } + + @Override + public boolean isQueryForEntity() { + return super.isQueryForEntity(); + } + + @Override + public boolean isStreamQuery() { + return true; + } + + /** + * Check if the given {@link org.springframework.data.repository.query.QueryMethod} receives a reactive parameter + * wrapper as one of its parameters. + * + * @return + */ + public boolean hasReactiveWrapperParameter() { + + for (MongoParameter mongoParameter : getParameters()) { + if (ReactiveWrapperConverters.supports(mongoParameter.getType())) { + return true; + } + } + return false; + } + + @Override + public void verify() { + + if (ReflectionUtils.hasParameterOfType(method, Pageable.class)) { + + TypeInformation returnType = TypeInformation.fromReturnTypeOf(method); + + boolean multiWrapper = ReactiveWrappers.isMultiValueType(returnType.getType()); + boolean singleWrapperWithWrappedPageableResult = ReactiveWrappers.isSingleValueType(returnType.getType()) + && (PAGE_TYPE.isAssignableFrom(returnType.getRequiredComponentType()) + || SLICE_TYPE.isAssignableFrom(returnType.getRequiredComponentType())); + + if (ReflectionUtils.hasParameterOfType(method, Sort.class)) { + throw new IllegalStateException(String.format("Method must not have Pageable *and* Sort parameter;" + + " Use sorting capabilities on Pageable instead; Offending method: %s", method)); + } + + if (isScrollQuery()) { + return; + } + + if (singleWrapperWithWrappedPageableResult) { + throw new InvalidDataAccessApiUsageException( + String.format("'%s.%s' must not use sliced or paged execution; Please use Flux.buffer(size, skip).", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + + if (!multiWrapper) { + throw new IllegalStateException(String.format( + "Method has to use a either multi-item reactive wrapper return type or a wrapped Page/Slice type; Offending method: %s", + method)); + } + } + + super.verify(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java new file mode 100644 index 0000000000..5787cca5a5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java @@ -0,0 +1,175 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.bson.json.JsonParseException; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.expression.ExpressionParser; +import org.springframework.util.StringUtils; + +/** + * Reactive PartTree {@link RepositoryQuery} implementation for Mongo. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public class ReactivePartTreeMongoQuery extends AbstractReactiveMongoQuery { + + private final PartTree tree; + private final boolean isGeoNearQuery; + private final MappingContext context; + private final ResultProcessor processor; + + /** + * Creates a new {@link ReactivePartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. + */ + @Deprecated(since = "4.4.0") + public ReactivePartTreeMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Creates a new {@link ReactivePartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactivePartTreeMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); + + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Return the {@link PartTree} backing the query. + * + * @return the tree + */ + public PartTree getTree() { + return tree; + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + return Mono.fromSupplier(() -> createQueryInternal(accessor, false)); + } + + @Override + protected Mono createCountQuery(ConvertingParameterAccessor accessor) { + return Mono.fromSupplier(() -> createQueryInternal(accessor, true)); + } + + private Query createQueryInternal(ConvertingParameterAccessor accessor, boolean isCountQuery) { + + MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, !isCountQuery && isGeoNearQuery); + Query query = creator.createQuery(); + + if (isCountQuery) { + return query; + } + + if (tree.isLimiting()) { + query.limit(tree.getMaxResults()); + } + + TextCriteria textCriteria = accessor.getFullText(); + if (textCriteria != null) { + query.addCriteria(textCriteria); + } + + String fieldSpec = getQueryMethod().getFieldSpecification(); + + if (!StringUtils.hasText(fieldSpec)) { + + ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType(); + + if (returnedType.isProjecting()) { + returnedType.getInputProperties().forEach(query.fields()::include); + } + + return query; + } + + try { + + BasicQuery result = new BasicQuery(query.getQueryObject(), Document.parse(fieldSpec)); + result.setSortObject(query.getSortObject()); + + return result; + } catch (JsonParseException o_O) { + throw new IllegalStateException(String.format("Invalid query or field specification in %s", getQueryMethod()), + o_O); + } + } + + @Override + protected boolean isCountQuery() { + return tree.isCountProjection(); + } + + @Override + protected boolean isExistsQuery() { + return tree.isExistsProjection(); + } + + @Override + protected boolean isDeleteQuery() { + return tree.isDelete(); + } + + @Override + protected boolean isLimiting() { + return tree.isLimiting(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java new file mode 100644 index 0000000000..ff01d8f8a3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.expression.ExpressionParser; +import org.springframework.lang.Nullable; + +/** + * A reactive {@link org.springframework.data.repository.query.RepositoryQuery} to use a plain JSON String to create an + * {@link AggregationOperation aggregation} pipeline to actually execute. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery { + + private final ReactiveMongoOperations reactiveMongoOperations; + private final MongoConverter mongoConverter; + + /** + * @param method must not be {@literal null}. + * @param reactiveMongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "4.4.0") + public ReactiveStringBasedAggregation(ReactiveMongoQueryMethod method, + ReactiveMongoOperations reactiveMongoOperations, ExpressionParser expressionParser, + ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + + super(method, reactiveMongoOperations, expressionParser, evaluationContextProvider); + + this.reactiveMongoOperations = reactiveMongoOperations; + this.mongoConverter = reactiveMongoOperations.getConverter(); + } + + /** + * @param method must not be {@literal null}. + * @param reactiveMongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactiveStringBasedAggregation(ReactiveMongoQueryMethod method, + ReactiveMongoOperations reactiveMongoOperations, ValueExpressionDelegate delegate) { + + super(method, reactiveMongoOperations, delegate); + + this.reactiveMongoOperations = reactiveMongoOperations; + this.mongoConverter = reactiveMongoOperations.getConverter(); + } + + @Override + @SuppressWarnings("ReactiveStreamsNullableInLambdaInTransform") + protected Publisher doExecute(ReactiveMongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, @Nullable Class ignored) { + + return computePipeline(accessor).flatMapMany(it -> { + + return AggregationUtils.doAggregate(new AggregationPipeline(it), method, processor, accessor, + this::getValueExpressionEvaluator, + (aggregation, sourceType, typeToRead, elementType, simpleType, rawResult) -> { + + Flux flux = reactiveMongoOperations.aggregate(aggregation, typeToRead); + if (ReflectionUtils.isVoid(elementType)) { + return flux.then(); + } + + ReactiveMongoQueryExecution.ResultProcessingConverter resultProcessing = getResultProcessing(processor); + + if (simpleType && !rawResult && !elementType.equals(Document.class)) { + + flux = flux.handle((item, sink) -> { + + Object result = AggregationUtils.extractSimpleTypeResult((Document) item, elementType, mongoConverter); + + if (result != null) { + sink.next(result); + } + }); + } + + flux = flux.map(resultProcessing::convert); + + return method.isCollectionQuery() ? flux : flux.next(); + }); + }); + } + + private boolean isSimpleReturnType(Class targetType) { + return MongoSimpleTypes.HOLDER.isSimpleType(targetType); + } + + private Mono> computePipeline(ConvertingParameterAccessor accessor) { + return parseAggregationPipeline(getQueryMethod().getAnnotatedAggregation(), accessor); + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + throw new UnsupportedOperationException("No query support for aggregation"); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return false; + } + + @Override + protected boolean isLimiting() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java new file mode 100644 index 0000000000..0e980fcfaf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java @@ -0,0 +1,233 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Mono; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; + +/** + * Query to use a plain JSON String to create the {@link Query} to actually execute. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + */ +public class ReactiveStringBasedMongoQuery extends AbstractReactiveMongoQuery { + + private static final String COUNT_EXISTS_AND_DELETE = "Manually defined query for %s cannot be a count and exists or delete query at the same time"; + private static final Log LOG = LogFactory.getLog(ReactiveStringBasedMongoQuery.class); + + private final String query; + private final String fieldSpec; + + private final ValueExpressionParser expressionParser; + + private final boolean isCountQuery; + private final boolean isExistsQuery; + private final boolean isDeleteQuery; + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod} and + * {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "4.4.0") + public ReactiveStringBasedMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider); + } + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, + * {@link MongoOperations}, {@link SpelExpressionParser} and + * {@link ReactiveExtensionAwareQueryMethodEvaluationContextProvider}. + * + * @param query must not be {@literal null}. + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "4.4.0") + public ReactiveStringBasedMongoQuery(String query, ReactiveMongoQueryMethod method, + ReactiveMongoOperations mongoOperations, ExpressionParser expressionParser, + ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + Assert.notNull(query, "Query must not be null"); + + this.query = query; + this.expressionParser = ValueExpressionParser.create(() -> expressionParser); + this.fieldSpec = method.getFieldSpecification(); + + if (method.hasAnnotatedQuery()) { + + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); + + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); + + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; + } + } + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod}, + * {@link MongoOperations} and {@link ValueExpressionDelegate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactiveStringBasedMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + this(method.getAnnotatedQuery(), method, mongoOperations, delegate); + } + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, + * {@link MongoOperations}, {@link ValueExpressionDelegate}. + * + * @param query must not be {@literal null}. + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactiveStringBasedMongoQuery(@NonNull String query, ReactiveMongoQueryMethod method, + ReactiveMongoOperations mongoOperations, ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); + + Assert.notNull(query, "Query must not be null"); + + this.query = query; + this.expressionParser = delegate.getValueExpressionParser(); + this.fieldSpec = method.getFieldSpecification(); + + if (method.hasAnnotatedQuery()) { + + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); + + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); + + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; + } + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + + return getCodecRegistry().map(ParameterBindingDocumentCodec::new).flatMap(codec -> { + + Mono queryObject = getBindingContext(query, accessor, codec) + .map(context -> codec.decode(query, context)); + Mono fieldsObject = getBindingContext(fieldSpec, accessor, codec) + .map(context -> codec.decode(fieldSpec, context)); + + return queryObject.zipWith(fieldsObject).map(tuple -> { + + Query query = new BasicQuery(tuple.getT1(), tuple.getT2()).with(accessor.getSort()); + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); + } + + return query; + }); + }); + } + + private Mono getBindingContext(String json, ConvertingParameterAccessor accessor, + ParameterBindingDocumentCodec codec) { + + ExpressionDependencies dependencies = codec.captureExpressionDependencies(json, accessor::getBindableValue, + expressionParser); + + return getValueExpressionEvaluatorLater(dependencies, accessor) + .map(it -> new ParameterBindingContext(accessor::getBindableValue, it)); + } + + @Override + protected boolean isCountQuery() { + return isCountQuery; + } + + @Override + protected boolean isExistsQuery() { + return isExistsQuery; + } + + @Override + protected boolean isDeleteQuery() { + return this.isDeleteQuery; + } + + @Override + protected boolean isLimiting() { + return false; + } + + private static boolean hasAmbiguousProjectionFlags(boolean isCountQuery, boolean isExistsQuery, + boolean isDeleteQuery) { + return BooleanUtil.countBooleanTrueValues(isCountQuery, isExistsQuery, isDeleteQuery) > 1; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java index 888802995e..2c2af25e10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ /** * Bean based implementation of {@link MongoEntityMetadata}. - * + * * @author Oliver Gierke */ class SimpleMongoEntityMetadata implements MongoEntityMetadata { @@ -31,32 +31,28 @@ class SimpleMongoEntityMetadata implements MongoEntityMetadata { /** * Creates a new {@link SimpleMongoEntityMetadata} using the given type and {@link MongoPersistentEntity} to use for * collection lookups. - * + * * @param type must not be {@literal null}. * @param collectionEntity must not be {@literal null} or empty. */ public SimpleMongoEntityMetadata(Class type, MongoPersistentEntity collectionEntity) { - Assert.notNull(type, "Type must not be null!"); - Assert.notNull(collectionEntity, "Collection entity must not be null or empty!"); + Assert.notNull(type, "Type must not be null"); + Assert.notNull(collectionEntity, "Collection entity must not be null or empty"); this.type = type; this.collectionEntity = collectionEntity; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.EntityMetadata#getJavaType() - */ public Class getJavaType() { return type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoEntityMetadata#getCollectionName() - */ public String getCollectionName() { return collectionEntity.getCollection(); } + + public MongoPersistentEntity getCollectionEntity() { + return this.collectionEntity; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java new file mode 100644 index 0000000000..724c8f29ef --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java @@ -0,0 +1,61 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.lang.Nullable; + +/** + * String-based aggregation operation for a repository query method. + * + * @author Christoph Strobl + * @since 4.3.1 + */ +class StringAggregationOperation implements AggregationOperation { + + private static final Pattern OPERATOR_PATTERN = Pattern.compile("\\$\\w+"); + + private final String source; + private final Class domainType; + private final Function bindFunction; + private final @Nullable String operator; + + StringAggregationOperation(String source, Class domainType, Function bindFunction) { + + this.source = source; + this.domainType = domainType; + this.bindFunction = bindFunction; + + Matcher matcher = OPERATOR_PATTERN.matcher(source); + this.operator = matcher.find() ? matcher.group() : null; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return context.getMappedObject(bindFunction.apply(source), domainType); + } + + @Override + public String getOperator() { + return operator != null ? operator : AggregationOperation.super.getOperator(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java new file mode 100644 index 0000000000..7ad5d78fa6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java @@ -0,0 +1,194 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import org.bson.Document; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.expression.ExpressionParser; +import org.springframework.lang.Nullable; + +/** + * {@link AbstractMongoQuery} implementation to run string-based aggregations using + * {@link org.springframework.data.mongodb.repository.Aggregation}. + * + * @author Christoph Strobl + * @author Divya Srivastava + * @author Mark Paluch + * @since 2.2 + */ +public class StringBasedAggregation extends AbstractMongoQuery { + + private final MongoOperations mongoOperations; + private final MongoConverter mongoConverter; + + /** + * Creates a new {@link StringBasedAggregation} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. + */ + @Deprecated(since = "4.4.0") + public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOperations, + ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + if (method.isPageQuery()) { + throw new InvalidMongoDbApiUsageException(String.format( + "Repository aggregation method '%s' does not support '%s' return type; Please use 'Slice' or 'List' instead", + method.getName(), method.getReturnType().getType().getSimpleName())); + } + + this.mongoOperations = mongoOperations; + this.mongoConverter = mongoOperations.getConverter(); + } + + /** + * Creates a new {@link StringBasedAggregation} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + super(method, mongoOperations, delegate); + + if (method.isPageQuery()) { + throw new InvalidMongoDbApiUsageException(String.format( + "Repository aggregation method '%s' does not support '%s' return type; Please use 'Slice' or 'List' instead", + method.getName(), method.getReturnType().getType().getSimpleName())); + } + + this.mongoOperations = mongoOperations; + this.mongoConverter = mongoOperations.getConverter(); + } + + @SuppressWarnings("unchecked") + @Override + @Nullable + protected Object doExecute(MongoQueryMethod method, ResultProcessor processor, ConvertingParameterAccessor accessor, + @Nullable Class ignore) { + + return AggregationUtils.doAggregate(AggregationUtils.computePipeline(this, method, accessor), method, processor, + accessor, this::getExpressionEvaluatorFor, + (aggregation, sourceType, typeToRead, elementType, simpleType, rawResult) -> { + + if (method.isStreamQuery()) { + + Stream stream = mongoOperations.aggregateStream(aggregation, typeToRead); + + if (!simpleType || elementType.equals(Document.class)) { + return stream; + } + + return stream + .map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, elementType, mongoConverter)); + } + + AggregationResults result = (AggregationResults) mongoOperations.aggregate(aggregation, + typeToRead); + + if (ReflectionUtils.isVoid(elementType)) { + return null; + } + + if (rawResult) { + return result; + } + + List results = result.getMappedResults(); + if (method.isCollectionQuery()) { + return simpleType ? convertResults(elementType, (List) results) : results; + } + + if (method.isSliceQuery()) { + + Pageable pageable = accessor.getPageable(); + int pageSize = pageable.getPageSize(); + List resultsToUse = simpleType ? convertResults(elementType, (List) results) + : (List) results; + boolean hasNext = resultsToUse.size() > pageSize; + return new SliceImpl<>(hasNext ? resultsToUse.subList(0, pageSize) : resultsToUse, pageable, hasNext); + } + + Object uniqueResult = result.getUniqueMappedResult(); + + return simpleType + ? AggregationUtils.extractSimpleTypeResult((Document) uniqueResult, elementType, mongoConverter) + : uniqueResult; + }); + } + + private List convertResults(Class targetType, List mappedResults) { + + List list = new ArrayList<>(mappedResults.size()); + for (Document it : mappedResults) { + Object extractSimpleTypeResult = AggregationUtils.extractSimpleTypeResult(it, targetType, mongoConverter); + list.add(extractSimpleTypeResult); + } + return list; + } + + private boolean isSimpleReturnType(Class targetType) { + return MongoSimpleTypes.HOLDER.isSimpleType(targetType); + } + + @Override + protected Query createQuery(ConvertingParameterAccessor accessor) { + throw new UnsupportedOperationException("No query support for aggregation"); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return false; + } + + @Override + protected boolean isLimiting() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java index 33d2e15e7d..abc158f88a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,109 +15,142 @@ */ package org.springframework.data.mongodb.repository.query; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext; -import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import com.mongodb.DBObject; -import com.mongodb.DBRef; -import com.mongodb.util.JSON; /** * Query to use a plain JSON String to create the {@link Query} to actually execute. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ public class StringBasedMongoQuery extends AbstractMongoQuery { - private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!"; - private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class); - private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE; + private static final String COUNT_EXISTS_AND_DELETE = "Manually defined query for %s cannot be a count and exists or delete query at the same time"; + private static final Log LOG = LogFactory.getLog(StringBasedMongoQuery.class); private final String query; private final String fieldSpec; + private final boolean isCountQuery; + private final boolean isExistsQuery; private final boolean isDeleteQuery; - private final List queryParameterBindings; - private final List fieldSpecParameterBindings; - private final ExpressionEvaluatingParameterBinder parameterBinder; /** - * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}. - * + * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod}, {@link MongoOperations}, + * {@link SpelExpressionParser} and {@link QueryMethodEvaluationContextProvider}. + * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. * @param expressionParser must not be {@literal null}. * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. */ + @Deprecated(since = "4.4.0") public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, - SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) { - this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider); + ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + String query = method.getAnnotatedQuery(); + Assert.notNull(query, "Query must not be null"); + + this.query = query; + this.fieldSpec = method.getFieldSpecification(); + + if (method.hasAnnotatedQuery()) { + + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); + + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); + + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; + } + } + + /** + * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod}, {@link MongoOperations}, + * {@link ValueExpressionDelegate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionSupport must not be {@literal null}. + * @since 4.4.0 + */ + public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, + ValueExpressionDelegate expressionSupport) { + this(method.getAnnotatedQuery(), method, mongoOperations, expressionSupport); } /** * Creates a new {@link StringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, - * {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}. + * {@link MongoOperations}, {@link ValueExpressionDelegate}, {@link QueryMethodValueEvaluationContextAccessor}. * * @param query must not be {@literal null}. * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. - * @param expressionParser must not be {@literal null}. + * @param expressionSupport must not be {@literal null}. + * @since 4.3 */ public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperations mongoOperations, - SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) { + ValueExpressionDelegate expressionSupport) { - super(method, mongoOperations); + super(method, mongoOperations, expressionSupport); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(expressionParser, "SpelExpressionParser must not be null!"); + Assert.notNull(query, "Query must not be null"); - this.queryParameterBindings = new ArrayList(); - this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query, - this.queryParameterBindings); + this.query = query; + this.fieldSpec = method.getFieldSpecification(); - this.fieldSpecParameterBindings = new ArrayList(); - this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings( - method.getFieldSpecification(), this.fieldSpecParameterBindings); + if (method.hasAnnotatedQuery()) { - this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false; - this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false; + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); - if (isCountQuery && isDeleteQuery) { - throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method)); - } + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); + + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { - this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider); + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) - */ @Override protected Query createQuery(ConvertingParameterAccessor accessor) { - String queryString = parameterBinder.bind(this.query, accessor, new BindingContext(getQueryMethod() - .getParameters(), queryParameterBindings)); - String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, new BindingContext(getQueryMethod() - .getParameters(), fieldSpecParameterBindings)); + Document queryObject = decode(this.query, prepareBindingContext(this.query, accessor)); + Document fieldsObject = decode(this.fieldSpec, prepareBindingContext(this.fieldSpec, accessor)); - Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort()); + Query query = new BasicQuery(queryObject, fieldsObject).with(accessor.getSort()); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); @@ -126,239 +159,28 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { return query; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() - */ @Override protected boolean isCountQuery() { return isCountQuery; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() - */ + @Override + protected boolean isExistsQuery() { + return isExistsQuery; + } + @Override protected boolean isDeleteQuery() { return this.isDeleteQuery; } - /** - * A parser that extracts the parameter bindings from a given query string. - * - * @author Thomas Darimont - */ - private static enum ParameterBindingParser { - - INSTANCE; - - private static final String EXPRESSION_PARAM_QUOTE = "'"; - private static final String EXPRESSION_PARAM_PREFIX = "?expr"; - private static final String INDEX_BASED_EXPRESSION_PARAM_START = "?#{"; - private static final String NAME_BASED_EXPRESSION_PARAM_START = ":#{"; - private static final char CURRLY_BRACE_OPEN = '{'; - private static final char CURRLY_BRACE_CLOSE = '}'; - private static final String PARAMETER_PREFIX = "_param_"; - private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\""; - private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); - private static final Pattern PARSEABLE_BINDING_PATTERN = Pattern.compile("\"?" + PARAMETER_PREFIX + "(\\d+)\"?"); - - private final static int PARAMETER_INDEX_GROUP = 1; - - /** - * Returns a list of {@link ParameterBinding}s found in the given {@code input} or an - * {@link Collections#emptyList()}. - * - * @param input can be {@literal null} or empty. - * @param bindings must not be {@literal null}. - * @return - */ - public String parseAndCollectParameterBindingsFromQueryIntoBindings(String input, List bindings) { - - if (!StringUtils.hasText(input)) { - return input; - } - - Assert.notNull(bindings, "Parameter bindings must not be null!"); - - String transformedInput = transformQueryAndCollectExpressionParametersIntoBindings(input, bindings); - String parseableInput = makeParameterReferencesParseable(transformedInput); - - collectParameterReferencesIntoBindings(bindings, JSON.parse(parseableInput)); - - return transformedInput; - } - - private static String transformQueryAndCollectExpressionParametersIntoBindings(String input, - List bindings) { - - StringBuilder result = new StringBuilder(); - - int startIndex = 0; - int currentPos = 0; - int exprIndex = 0; - - while (currentPos < input.length()) { - - int indexOfExpressionParameter = getIndexOfExpressionParameter(input, currentPos); - - // no expression parameter found - if (indexOfExpressionParameter < 0) { - break; - } - - int exprStart = indexOfExpressionParameter + 3; - currentPos = exprStart; - - // eat parameter expression - int curlyBraceOpenCnt = 1; - - while (curlyBraceOpenCnt > 0) { - switch (input.charAt(currentPos++)) { - case CURRLY_BRACE_OPEN: - curlyBraceOpenCnt++; - break; - case CURRLY_BRACE_CLOSE: - curlyBraceOpenCnt--; - break; - default: - } - } - - result.append(input.subSequence(startIndex, indexOfExpressionParameter)); - result.append(EXPRESSION_PARAM_QUOTE).append(EXPRESSION_PARAM_PREFIX); - result.append(exprIndex); - result.append(EXPRESSION_PARAM_QUOTE); - - bindings.add(new ParameterBinding(exprIndex, true, input.substring(exprStart, currentPos - 1))); - - startIndex = currentPos; - - exprIndex++; - } - - return result.append(input.subSequence(currentPos, input.length())).toString(); - } - - private static String makeParameterReferencesParseable(String input) { - - Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input); - return matcher.replaceAll(PARSEABLE_PARAMETER); - } - - private static void collectParameterReferencesIntoBindings(List bindings, Object value) { - - if (value instanceof String) { - - String string = ((String) value).trim(); - potentiallyAddBinding(string, bindings); - - } else if (value instanceof Pattern) { - - String string = ((Pattern) value).toString().trim(); - Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string); - - while (valueMatcher.find()) { - - int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP)); - - /* - * The pattern is used as a direct parameter replacement, e.g. 'field': ?1, - * therefore we treat it as not quoted to remain backwards compatible. - */ - boolean quoted = !string.equals(PARAMETER_PREFIX + paramIndex); - - bindings.add(new ParameterBinding(paramIndex, quoted)); - } - - } else if (value instanceof DBRef) { - - DBRef dbref = (DBRef) value; - - potentiallyAddBinding(dbref.getCollectionName(), bindings); - potentiallyAddBinding(dbref.getId().toString(), bindings); - - } else if (value instanceof DBObject) { - - DBObject dbo = (DBObject) value; - - for (String field : dbo.keySet()) { - collectParameterReferencesIntoBindings(bindings, field); - collectParameterReferencesIntoBindings(bindings, dbo.get(field)); - } - } - } - - private static void potentiallyAddBinding(String source, List bindings) { - - Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(source); - - while (valueMatcher.find()) { - - int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP)); - boolean quoted = (source.startsWith("'") && source.endsWith("'")) - || (source.startsWith("\"") && source.endsWith("\"")); - - bindings.add(new ParameterBinding(paramIndex, quoted)); - } - } - - private static int getIndexOfExpressionParameter(String input, int position) { - - int indexOfExpressionParameter = input.indexOf(INDEX_BASED_EXPRESSION_PARAM_START, position); - - return indexOfExpressionParameter < 0 ? input.indexOf(NAME_BASED_EXPRESSION_PARAM_START, position) - : indexOfExpressionParameter; - } + @Override + protected boolean isLimiting() { + return false; } - /** - * A generic parameter binding with name or position information. - * - * @author Thomas Darimont - */ - static class ParameterBinding { - - private final int parameterIndex; - private final boolean quoted; - private final String expression; - - /** - * Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information. - * - * @param parameterIndex - * @param quoted whether or not the parameter is already quoted. - */ - public ParameterBinding(int parameterIndex, boolean quoted) { - this(parameterIndex, quoted, null); - } - - public ParameterBinding(int parameterIndex, boolean quoted, String expression) { - - this.parameterIndex = parameterIndex; - this.quoted = quoted; - this.expression = expression; - } - - public boolean isQuoted() { - return quoted; - } - - public int getParameterIndex() { - return parameterIndex; - } - - public String getParameter() { - return "?" + (isExpression() ? "expr" : "") + parameterIndex; - } - - public String getExpression() { - return expression; - } - - public boolean isExpression() { - return this.expression != null; - } + private static boolean hasAmbiguousProjectionFlags(boolean isCountQuery, boolean isExistsQuery, + boolean isDeleteQuery) { + return BooleanUtil.countBooleanTrueValues(isCountQuery, isExistsQuery, isDeleteQuery) > 1; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java new file mode 100644 index 0000000000..c479f3faa9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java @@ -0,0 +1,41 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.function.Function; + +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.repository.query.ValueExpressionDelegate; + +class ValueExpressionDelegateValueExpressionEvaluator implements ValueExpressionEvaluator { + + private final ValueExpressionDelegate delegate; + private final Function expressionToContext; + + ValueExpressionDelegateValueExpressionEvaluator(ValueExpressionDelegate delegate, Function expressionToContext) { + this.delegate = delegate; + this.expressionToContext = expressionToContext; + } + + @SuppressWarnings("unchecked") + @Override + public T evaluate(String expressionString) { + ValueExpression expression = delegate.parse(expressionString); + return (T) expression.evaluate(expressionToContext.apply(expression)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/package-info.java index f31ea0222d..20c77e22aa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/package-info.java @@ -1,5 +1,6 @@ /** * Query derivation mechanism for MongoDB specific repositories. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.repository.query; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java new file mode 100644 index 0000000000..6f1049e01f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.Optional; + +import com.mongodb.ReadPreference; + +/** + * Interface to abstract {@link CrudMethodMetadata} that provide the {@link ReadPreference} to be used for query + * execution. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.2 + */ +public interface CrudMethodMetadata { + + /** + * Returns the {@link ReadPreference} to be used. + * + * @return the {@link ReadPreference} to be used. + */ + Optional getReadPreference(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java new file mode 100644 index 0000000000..f59a995170 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java @@ -0,0 +1,232 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.lang.reflect.AnnotatedElement; +import java.lang.reflect.Method; +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.springframework.aop.TargetSource; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.beans.factory.BeanClassLoaderAware; +import org.springframework.core.NamedThreadLocal; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.data.repository.core.RepositoryInformation; +import org.springframework.data.repository.core.support.RepositoryProxyPostProcessor; +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.ReadPreference; + +/** + * {@link RepositoryProxyPostProcessor} that sets up interceptors to read metadata information from the invoked method. + * This is necessary to allow redeclaration of CRUD methods in repository interfaces and configure read preference + * information or query hints on them. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.2 + */ +class CrudMethodMetadataPostProcessor implements RepositoryProxyPostProcessor, BeanClassLoaderAware { + + private @Nullable ClassLoader classLoader = ClassUtils.getDefaultClassLoader(); + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public void postProcess(ProxyFactory factory, RepositoryInformation repositoryInformation) { + factory.addAdvice(new CrudMethodMetadataPopulatingMethodInterceptor(repositoryInformation)); + } + + /** + * Returns a {@link CrudMethodMetadata} proxy that will lookup the actual target object by obtaining a thread bound + * instance from the {@link TransactionSynchronizationManager} later. + */ + CrudMethodMetadata getCrudMethodMetadata() { + + ProxyFactory factory = new ProxyFactory(); + + factory.addInterface(CrudMethodMetadata.class); + factory.setTargetSource(new ThreadBoundTargetSource()); + + return (CrudMethodMetadata) factory.getProxy(this.classLoader); + } + + /** + * {@link MethodInterceptor} to build and cache {@link DefaultCrudMethodMetadata} instances for the invoked methods. + * Will bind the found information to a {@link TransactionSynchronizationManager} for later lookup. + * + * @see DefaultCrudMethodMetadata + */ + static class CrudMethodMetadataPopulatingMethodInterceptor implements MethodInterceptor { + + private static final ThreadLocal currentInvocation = new NamedThreadLocal<>( + "Current AOP method invocation"); + + private final ConcurrentMap metadataCache = new ConcurrentHashMap<>(); + private final Set implementations = new HashSet<>(); + private final RepositoryInformation repositoryInformation; + + CrudMethodMetadataPopulatingMethodInterceptor(RepositoryInformation repositoryInformation) { + + this.repositoryInformation = repositoryInformation; + + ReflectionUtils.doWithMethods(repositoryInformation.getRepositoryInterface(), implementations::add, + method -> !repositoryInformation.isQueryMethod(method)); + } + + /** + * Return the AOP Alliance {@link MethodInvocation} object associated with the current invocation. + * + * @return the invocation object associated with the current invocation. + * @throws IllegalStateException if there is no AOP invocation in progress, or if the + * {@link CrudMethodMetadataPopulatingMethodInterceptor} was not added to this interceptor chain. + */ + static MethodInvocation currentInvocation() throws IllegalStateException { + + MethodInvocation invocation = currentInvocation.get(); + + if (invocation != null) { + return invocation; + } + + throw new IllegalStateException( + "No MethodInvocation found: Check that an AOP invocation is in progress, and that the " + + "CrudMethodMetadataPopulatingMethodInterceptor is upfront in the interceptor chain."); + } + + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + + Method method = invocation.getMethod(); + + if (!implementations.contains(method)) { + return invocation.proceed(); + } + + MethodInvocation oldInvocation = currentInvocation.get(); + currentInvocation.set(invocation); + + try { + + CrudMethodMetadata metadata = (CrudMethodMetadata) TransactionSynchronizationManager.getResource(method); + + if (metadata != null) { + return invocation.proceed(); + } + + CrudMethodMetadata methodMetadata = metadataCache.get(method); + + if (methodMetadata == null) { + + methodMetadata = new DefaultCrudMethodMetadata(repositoryInformation.getRepositoryInterface(), method); + CrudMethodMetadata tmp = metadataCache.putIfAbsent(method, methodMetadata); + + if (tmp != null) { + methodMetadata = tmp; + } + } + + TransactionSynchronizationManager.bindResource(method, methodMetadata); + + try { + return invocation.proceed(); + } finally { + TransactionSynchronizationManager.unbindResource(method); + } + } finally { + currentInvocation.set(oldInvocation); + } + } + } + + /** + * Default implementation of {@link CrudMethodMetadata} that will inspect the backing method for annotations. + */ + static class DefaultCrudMethodMetadata implements CrudMethodMetadata { + + private final Optional readPreference; + + /** + * Creates a new {@link DefaultCrudMethodMetadata} for the given {@link Method}. + * + * @param repositoryInterface the target repository interface. + * @param method must not be {@literal null}. + */ + DefaultCrudMethodMetadata(Class repositoryInterface, Method method) { + + Assert.notNull(repositoryInterface, "Repository interface must not be null"); + Assert.notNull(method, "Method must not be null"); + + this.readPreference = findReadPreference(method, repositoryInterface); + } + + private static Optional findReadPreference(AnnotatedElement... annotatedElements) { + + for (AnnotatedElement element : annotatedElements) { + + org.springframework.data.mongodb.repository.ReadPreference preference = AnnotatedElementUtils + .findMergedAnnotation(element, org.springframework.data.mongodb.repository.ReadPreference.class); + + if (preference != null) { + return Optional.of(com.mongodb.ReadPreference.valueOf(preference.value())); + } + } + + return Optional.empty(); + } + + @Override + public Optional getReadPreference() { + return readPreference; + } + } + + private static class ThreadBoundTargetSource implements TargetSource { + + @Override + public Class getTargetClass() { + return CrudMethodMetadata.class; + } + + @Override + public boolean isStatic() { + return false; + } + + @Override + public Object getTarget() { + + MethodInvocation invocation = CrudMethodMetadataPopulatingMethodInterceptor.currentInvocation(); + return TransactionSynchronizationManager.getResource(invocation.getMethod()); + } + + @Override + public void releaseTarget(Object target) {} + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java new file mode 100644 index 0000000000..8590768b8b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +/** + * Support class for {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} implementations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class FetchableFluentQuerySupport implements FluentQuery.FetchableFluentQuery { + + private final P predicate; + private final Sort sort; + + private final int limit; + + private final Class resultType; + private final List fieldsToInclude; + + FetchableFluentQuerySupport(P predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + this.predicate = predicate; + this.sort = sort; + this.limit = limit; + this.resultType = resultType; + this.fieldsToInclude = fieldsToInclude; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#sortBy(org.springframework.data.domain.Sort) + */ + @Override + public FluentQuery.FetchableFluentQuery sortBy(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public FluentQuery.FetchableFluentQuery limit(int limit) { + + Assert.isTrue(limit > 0, "Limit must be greater zero"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#as(java.lang.Class) + */ + @Override + public FluentQuery.FetchableFluentQuery as(Class projection) { + + Assert.notNull(projection, "Projection target type must not be null"); + + return create(predicate, sort, limit, projection, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#project(java.util.Collection) + */ + @Override + public FluentQuery.FetchableFluentQuery project(Collection properties) { + + Assert.notNull(properties, "Projection properties must not be null"); + + return create(predicate, sort, limit, resultType, new ArrayList<>(properties)); + } + + protected abstract FetchableFluentQuerySupport create(P predicate, Sort sort, int limit, + Class resultType, List fieldsToInclude); + + P getPredicate() { + return predicate; + } + + Sort getSort() { + return sort; + } + + int getLimit() { + return limit; + } + + Class getResultType() { + return resultType; + } + + List getFieldsToInclude() { + return fieldsToInclude; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java index 4ae998db9d..23f161890d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,21 @@ */ package org.springframework.data.mongodb.repository.support; -import java.util.Arrays; -import java.util.HashSet; +import java.lang.reflect.Field; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.IndexOperationsProvider; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.query.MongoEntityMetadata; import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; import org.springframework.data.repository.core.support.QueryCreationListener; @@ -32,70 +37,124 @@ import org.springframework.data.repository.query.parser.Part.Type; import org.springframework.data.repository.query.parser.PartTree; import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoException; /** * {@link QueryCreationListener} inspecting {@link PartTreeMongoQuery}s and creating an index for the properties it * refers to. - * + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ class IndexEnsuringQueryCreationListener implements QueryCreationListener { - private static final Set GEOSPATIAL_TYPES = new HashSet(Arrays.asList(Type.NEAR, Type.WITHIN)); - private static final Logger LOG = LoggerFactory.getLogger(IndexEnsuringQueryCreationListener.class); + private static final Set GEOSPATIAL_TYPES = Set.of(Type.NEAR, Type.WITHIN); + private static final Log LOG = LogFactory.getLog(IndexEnsuringQueryCreationListener.class); - private final MongoOperations operations; + private final IndexOperationsProvider indexOperationsProvider; /** * Creates a new {@link IndexEnsuringQueryCreationListener} using the given {@link MongoOperations}. - * - * @param operations must not be {@literal null}. + * + * @param indexOperationsProvider must not be {@literal null}. */ - public IndexEnsuringQueryCreationListener(MongoOperations operations) { + public IndexEnsuringQueryCreationListener(IndexOperationsProvider indexOperationsProvider) { - Assert.notNull(operations); - this.operations = operations; + Assert.notNull(indexOperationsProvider, "IndexOperationsProvider must not be null"); + this.indexOperationsProvider = indexOperationsProvider; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.QueryCreationListener#onCreation(org.springframework.data.repository.query.RepositoryQuery) - */ public void onCreation(PartTreeMongoQuery query) { PartTree tree = query.getTree(); + + if (!tree.hasPredicate()) { + return; + } + Index index = new Index(); index.named(query.getQueryMethod().getName()); Sort sort = tree.getSort(); for (Part part : tree.getParts()) { + if (GEOSPATIAL_TYPES.contains(part.getType())) { return; } + if (isIndexOnUnwrappedType(part)) { + return; + } + String property = part.getProperty().toDotPath(); Direction order = toDirection(sort, property); index.on(property, order); } // Add fixed sorting criteria to index - if (sort != null) { - for (Sort.Order order : sort) { + if (sort.isSorted()) { + for (Order order : sort) { index.on(order.getProperty(), order.getDirection()); } } + if (query.getQueryMethod().hasAnnotatedCollation()) { + + String collation = query.getQueryMethod().getAnnotatedCollation(); + if (!collation.contains("?")) { + index = index.collation(Collation.parse(collation)); + } + } + MongoEntityMetadata metadata = query.getQueryMethod().getEntityInformation(); - operations.indexOps(metadata.getCollectionName()).ensureIndex(index); - LOG.debug(String.format("Created %s!", index)); + try { + indexOperationsProvider.indexOps(metadata.getCollectionName(), metadata.getJavaType()).ensureIndex(index); + } catch (DataIntegrityViolationException e) { + + if (e.getCause() instanceof MongoException mongoException) { + + /* + * As of MongoDB 4.2 index creation raises an error when creating an index for the very same keys with + * different name, whereas previous versions silently ignored this. + * Because an index is by default named after the repository finder method it is not uncommon that an index + * for the very same property combination might already exist with a different name. + * So you see, that's why we need to ignore the error here. + * + * For details please see: https://docs.mongodb.com/master/release-notes/4.2-compatibility/#indexes + */ + if (mongoException.getCode() != 85) { + throw e; + } + } + } + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Created %s", index)); + } + } + + public boolean isIndexOnUnwrappedType(Part part) { + + // TODO we could do it for nested fields in the + Field field = ReflectionUtils.findField(part.getProperty().getOwningType().getType(), + part.getProperty().getSegment()); + + if (field == null) { + return false; + } + + return AnnotatedElementUtils.hasAnnotation(field, Unwrapped.class); } private static Direction toDirection(Sort sort, String property) { - if (sort == null) { + if (sort.isUnsorted()) { return Direction.DESC; } - org.springframework.data.domain.Sort.Order order = sort.getOrderFor(property); + Order order = sort.getOrderFor(property); return order == null ? Direction.DESC : order.isAscending() ? Direction.ASC : Direction.DESC; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java index 7d6191fbd0..1d876289be 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,33 @@ */ package org.springframework.data.mongodb.repository.support; -import java.io.Serializable; - import org.bson.types.ObjectId; +import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.data.repository.core.support.PersistentEntityInformation; +import org.springframework.lang.Nullable; /** * {@link MongoEntityInformation} implementation using a {@link MongoPersistentEntity} instance to lookup the necessary * information. Can be configured with a custom collection to be returned which will trump the one returned by the * {@link MongoPersistentEntity} if given. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -public class MappingMongoEntityInformation extends PersistentEntityInformation +public class MappingMongoEntityInformation extends PersistentEntityInformation implements MongoEntityInformation { private final MongoPersistentEntity entityMetadata; - private final String customCollectionName; + private final @Nullable String customCollectionName; private final Class fallbackIdType; /** * Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity}. - * + * * @param entity must not be {@literal null}. */ public MappingMongoEntityInformation(MongoPersistentEntity entity) { @@ -49,18 +51,18 @@ public MappingMongoEntityInformation(MongoPersistentEntity entity) { /** * Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity} and fallback * identifier type. - * + * * @param entity must not be {@literal null}. * @param fallbackIdType can be {@literal null}. */ - public MappingMongoEntityInformation(MongoPersistentEntity entity, Class fallbackIdType) { - this(entity, (String) null, fallbackIdType); + public MappingMongoEntityInformation(MongoPersistentEntity entity, @Nullable Class fallbackIdType) { + this(entity, null, fallbackIdType); } /** * Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity} and custom * collection name. - * + * * @param entity must not be {@literal null}. * @param customCollectionName can be {@literal null}. */ @@ -71,14 +73,14 @@ public MappingMongoEntityInformation(MongoPersistentEntity entity, String cus /** * Creates a new {@link MappingMongoEntityInformation} for the given {@link MongoPersistentEntity}, collection name * and identifier type. - * + * * @param entity must not be {@literal null}. * @param customCollectionName can be {@literal null}. * @param idType can be {@literal null}. */ @SuppressWarnings("unchecked") - private MappingMongoEntityInformation(MongoPersistentEntity entity, String customCollectionName, - Class idType) { + private MappingMongoEntityInformation(MongoPersistentEntity entity, @Nullable String customCollectionName, + @Nullable Class idType) { super(entity); @@ -87,32 +89,44 @@ private MappingMongoEntityInformation(MongoPersistentEntity entity, String cu this.fallbackIdType = idType != null ? idType : (Class) ObjectId.class; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getCollectionName() - */ public String getCollectionName() { return customCollectionName == null ? entityMetadata.getCollection() : customCollectionName; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getIdAttribute() - */ public String getIdAttribute() { - return entityMetadata.getIdProperty().getName(); + return entityMetadata.hasIdProperty() ? entityMetadata.getRequiredIdProperty().getName() : "_id"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.PersistentEntityInformation#getIdType() - */ @Override - @SuppressWarnings("unchecked") public Class getIdType() { if (this.entityMetadata.hasIdProperty()) { return super.getIdType(); } - return fallbackIdType != null ? fallbackIdType : (Class) ObjectId.class; + return fallbackIdType; } + + @Override + public boolean isVersioned() { + return this.entityMetadata.hasVersionProperty(); + } + + @Override + public Object getVersion(T entity) { + + if (!isVersioned()) { + return null; + } + + PersistentPropertyAccessor accessor = this.entityMetadata.getPropertyAccessor(entity); + + return accessor.getProperty(this.entityMetadata.getRequiredVersionProperty()); + } + + @Nullable + public Collation getCollation() { + return this.entityMetadata.getCollation(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java index aa036659b5..3c029ee5aa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,6 +24,7 @@ import javax.tools.Diagnostic; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.lang.Nullable; import com.querydsl.apt.AbstractQuerydslProcessor; import com.querydsl.apt.Configuration; @@ -36,25 +37,22 @@ /** * Annotation processor to create Querydsl query types for QueryDsl annotated classes. - * + * * @author Oliver Gierke + * @author Owen Q */ -@SupportedAnnotationTypes({ "com.mysema.query.annotations.*", "org.springframework.data.mongodb.core.mapping.*" }) +@SupportedAnnotationTypes({ "com.querydsl.core.annotations.*", "org.springframework.data.mongodb.core.mapping.*" }) @SupportedSourceVersion(SourceVersion.RELEASE_6) public class MongoAnnotationProcessor extends AbstractQuerydslProcessor { - /* - * (non-Javadoc) - * @see com.mysema.query.apt.AbstractQuerydslProcessor#createConfiguration(javax.annotation.processing.RoundEnvironment) - */ @Override - protected Configuration createConfiguration(RoundEnvironment roundEnv) { + protected Configuration createConfiguration(@Nullable RoundEnvironment roundEnv) { processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, "Running " + getClass().getSimpleName()); - DefaultConfiguration configuration = new DefaultConfiguration(roundEnv, processingEnv.getOptions(), - Collections. emptySet(), QueryEntities.class, Document.class, QuerySupertype.class, - QueryEmbeddable.class, QueryEmbedded.class, QueryTransient.class); + DefaultConfiguration configuration = new DefaultConfiguration(processingEnv, roundEnv, Collections.emptySet(), + QueryEntities.class, Document.class, QuerySupertype.class, QueryEmbeddable.class, QueryEmbedded.class, + QueryTransient.class); configuration.setUnknownAsEmbedded(true); return configuration; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java new file mode 100644 index 0000000000..d0a3f7a1e4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java @@ -0,0 +1,50 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Support class responsible for creating {@link MongoEntityInformation} instances for a given + * {@link MongoPersistentEntity}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 1.10 + */ +final class MongoEntityInformationSupport { + + private MongoEntityInformationSupport() {} + + /** + * Factory method for creating {@link MongoEntityInformation}. + * + * @param entity must not be {@literal null}. + * @param idType can be {@literal null}. + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + static MongoEntityInformation entityInformationFor(MongoPersistentEntity entity, + @Nullable Class idType) { + + Assert.notNull(entity, "Entity must not be null"); + + return new MappingMongoEntityInformation<>((MongoPersistentEntity) entity, (Class) idType); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java index 03d75f2c82..baf069c3a4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,15 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.springframework.data.querydsl.QueryDslUtils.*; +import static org.springframework.data.querydsl.QuerydslUtils.*; import java.io.Serializable; import java.lang.reflect.Method; +import java.util.Optional; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.MappingException; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -29,143 +31,165 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.data.mongodb.repository.query.MongoQueryMethod; import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.StringBasedAggregation; import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery; import org.springframework.data.projection.ProjectionFactory; -import org.springframework.data.querydsl.QueryDslPredicateExecutor; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.repository.core.NamedQueries; import org.springframework.data.repository.core.RepositoryInformation; import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments; import org.springframework.data.repository.core.support.RepositoryFactorySupport; -import org.springframework.data.repository.query.EvaluationContextProvider; import org.springframework.data.repository.query.QueryLookupStrategy; import org.springframework.data.repository.query.QueryLookupStrategy.Key; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; -import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * Factory to create {@link MongoRepository} instances. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ public class MongoRepositoryFactory extends RepositoryFactorySupport { - private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); - + private final CrudMethodMetadataPostProcessor crudMethodMetadataPostProcessor = new CrudMethodMetadataPostProcessor(); private final MongoOperations operations; private final MappingContext, MongoPersistentProperty> mappingContext; + @Nullable private QueryMethodValueEvaluationContextAccessor accessor; /** * Creates a new {@link MongoRepositoryFactory} with the given {@link MongoOperations}. - * + * * @param mongoOperations must not be {@literal null}. */ public MongoRepositoryFactory(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.operations = mongoOperations; this.mappingContext = mongoOperations.getConverter().getMappingContext(); + + addRepositoryProxyPostProcessor(crudMethodMetadataPostProcessor); + } + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + + super.setBeanClassLoader(classLoader); + crudMethodMetadataPostProcessor.setBeanClassLoader(classLoader); + } + + @Override + protected ProjectionFactory getProjectionFactory(ClassLoader classLoader, BeanFactory beanFactory) { + return this.operations.getConverter().getProjectionFactory(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryBaseClass(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { + return SimpleMongoRepository.class; + } + + @Override + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) { + return getRepositoryFragments(metadata, operations); + } + + /** + * Creates {@link RepositoryFragments} based on {@link RepositoryMetadata} to add Mongo-specific extensions. Typically + * adds a {@link QuerydslMongoPredicateExecutor} if the repository interface uses Querydsl. + *

                    + * Can be overridden by subclasses to customize {@link RepositoryFragments}. + * + * @param metadata repository metadata. + * @param operations the MongoDB operations manager. + * @return + * @since 3.2.1 + */ + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata, MongoOperations operations) { boolean isQueryDslRepository = QUERY_DSL_PRESENT - && QueryDslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); + && QuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); + + if (isQueryDslRepository) { - return isQueryDslRepository ? QueryDslMongoRepository.class : SimpleMongoRepository.class; + if (metadata.isReactiveRepository()) { + throw new InvalidDataAccessApiUsageException( + "Cannot combine Querydsl and reactive repository support in a single interface"); + } + + return RepositoryFragments + .just(new QuerydslMongoPredicateExecutor<>(getEntityInformation(metadata.getDomainType()), operations)); + } + + return RepositoryFragments.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation) - */ @Override protected Object getTargetRepository(RepositoryInformation information) { MongoEntityInformation entityInformation = getEntityInformation(information.getDomainType(), information); - return getTargetRepositoryViaReflection(information, entityInformation, operations); + Object targetRepository = getTargetRepositoryViaReflection(information, entityInformation, operations); + + if (targetRepository instanceof SimpleMongoRepository repository) { + repository.setRepositoryMethodMetadata(crudMethodMetadataPostProcessor.getCrudMethodMetadata()); + } + + return targetRepository; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider) - */ @Override - protected QueryLookupStrategy getQueryLookupStrategy(Key key, EvaluationContextProvider evaluationContextProvider) { - return new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext); + protected Optional getQueryLookupStrategy(@Nullable Key key, + ValueExpressionDelegate valueExpressionDelegate) { + return Optional.of(new MongoQueryLookupStrategy(operations, mappingContext, valueExpressionDelegate)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class) - */ - public MongoEntityInformation getEntityInformation(Class domainClass) { + public MongoEntityInformation getEntityInformation(Class domainClass) { return getEntityInformation(domainClass, null); } - @SuppressWarnings("unchecked") - private MongoEntityInformation getEntityInformation(Class domainClass, - RepositoryInformation information) { - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(domainClass); + private MongoEntityInformation getEntityInformation(Class domainClass, + @Nullable RepositoryMetadata metadata) { - if (entity == null) { - throw new MappingException( - String.format("Could not lookup mapping metadata for domain class %s!", domainClass.getName())); - } - - return new MappingMongoEntityInformation((MongoPersistentEntity) entity, - information != null ? (Class) information.getIdType() : null); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(domainClass); + return MongoEntityInformationSupport. entityInformationFor(entity, + metadata != null ? metadata.getIdType() : null); } /** * {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances. - * + * * @author Oliver Gierke * @author Thomas Darimont */ - private static class MongoQueryLookupStrategy implements QueryLookupStrategy { - - private final MongoOperations operations; - private final EvaluationContextProvider evaluationContextProvider; - MappingContext, MongoPersistentProperty> mappingContext; + private record MongoQueryLookupStrategy(MongoOperations operations, + MappingContext, MongoPersistentProperty> mappingContext, + ValueExpressionDelegate expressionSupport) implements QueryLookupStrategy { - public MongoQueryLookupStrategy(MongoOperations operations, EvaluationContextProvider evaluationContextProvider, - MappingContext, MongoPersistentProperty> mappingContext) { - - this.operations = operations; - this.evaluationContextProvider = evaluationContextProvider; - this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries) - */ @Override public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory, NamedQueries namedQueries) { MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, mappingContext); + queryMethod.verify(); + String namedQueryName = queryMethod.getNamedQueryName(); if (namedQueries.hasQuery(namedQueryName)) { String namedQuery = namedQueries.getQuery(namedQueryName); - return new StringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER, - evaluationContextProvider); + return new StringBasedMongoQuery(namedQuery, queryMethod, operations, expressionSupport); + } else if (queryMethod.hasAnnotatedAggregation()) { + return new StringBasedAggregation(queryMethod, operations, expressionSupport); } else if (queryMethod.hasAnnotatedQuery()) { - return new StringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider); + return new StringBasedMongoQuery(queryMethod, operations, expressionSupport); } else { - return new PartTreeMongoQuery(queryMethod, operations); + return new PartTreeMongoQuery(queryMethod, operations, expressionSupport); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java index abaee0270a..c98d38c5f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,23 +23,33 @@ import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport; import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** * {@link org.springframework.beans.factory.FactoryBean} to create {@link MongoRepository} instances. - * + * * @author Oliver Gierke */ -public class MongoRepositoryFactoryBean, S, ID extends Serializable> extends - RepositoryFactoryBeanSupport { +public class MongoRepositoryFactoryBean, S, ID extends Serializable> + extends RepositoryFactoryBeanSupport { - private MongoOperations operations; + private @Nullable MongoOperations operations; private boolean createIndexesForQueryMethods = false; private boolean mappingContextConfigured = false; + /** + * Creates a new {@link MongoRepositoryFactoryBean} for the given repository interface. + * + * @param repositoryInterface must not be {@literal null}. + */ + public MongoRepositoryFactoryBean(Class repositoryInterface) { + super(repositoryInterface); + } + /** * Configures the {@link MongoOperations} to be used. - * + * * @param operations the operations to set */ public void setMongoOperations(MongoOperations operations) { @@ -48,38 +58,28 @@ public void setMongoOperations(MongoOperations operations) { /** * Configures whether to automatically create indexes for the properties referenced in a query method. - * + * * @param createIndexesForQueryMethods the createIndexesForQueryMethods to set */ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods) { this.createIndexesForQueryMethods = createIndexesForQueryMethods; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) - */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #createRepositoryFactory() - */ @Override - protected final RepositoryFactorySupport createRepositoryFactory() { + protected RepositoryFactorySupport createRepositoryFactory() { RepositoryFactorySupport factory = getFactoryInstance(operations); if (createIndexesForQueryMethods) { - factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(operations)); + factory.addQueryCreationListener( + new IndexEnsuringQueryCreationListener((collectionName, javaType) -> operations.indexOps(javaType))); } return factory; @@ -87,7 +87,7 @@ protected final RepositoryFactorySupport createRepositoryFactory() { /** * Creates and initializes a {@link RepositoryFactorySupport} instance. - * + * * @param operations * @return */ @@ -95,18 +95,11 @@ protected RepositoryFactorySupport getFactoryInstance(MongoOperations operations return new MongoRepositoryFactory(operations); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #afterPropertiesSet() - */ @Override public void afterPropertiesSet() { super.afterPropertiesSet(); - Assert.notNull(operations, "MongoTemplate must not be null!"); + Assert.state(operations != null, "MongoTemplate must not be null"); if (!mappingContextConfigured) { setMappingContext(operations.getConverter().getMappingContext()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepository.java deleted file mode 100644 index 9f759617ca..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepository.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.io.Serializable; -import java.util.List; - -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Order; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.querydsl.EntityPathResolver; -import org.springframework.data.querydsl.QSort; -import org.springframework.data.querydsl.QueryDslPredicateExecutor; -import org.springframework.data.querydsl.SimpleEntityPathResolver; -import org.springframework.data.repository.core.EntityInformation; -import org.springframework.data.repository.core.EntityMetadata; -import org.springframework.util.Assert; - -import com.querydsl.core.types.EntityPath; -import com.querydsl.core.types.Expression; -import com.querydsl.core.types.OrderSpecifier; -import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.PathBuilder; -import com.querydsl.mongodb.AbstractMongodbQuery; - -/** - * Special QueryDsl based repository implementation that allows execution {@link Predicate}s in various forms. - * - * @author Oliver Gierke - * @author Thomas Darimont - */ -public class QueryDslMongoRepository extends SimpleMongoRepository - implements QueryDslPredicateExecutor { - - private final PathBuilder builder; - private final EntityInformation entityInformation; - private final MongoOperations mongoOperations; - - /** - * Creates a new {@link QueryDslMongoRepository} for the given {@link EntityMetadata} and {@link MongoTemplate}. Uses - * the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the given domain class. - * - * @param entityInformation must not be {@literal null}. - * @param mongoOperations must not be {@literal null}. - */ - public QueryDslMongoRepository(MongoEntityInformation entityInformation, MongoOperations mongoOperations) { - this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE); - } - - /** - * Creates a new {@link QueryDslMongoRepository} for the given {@link MongoEntityInformation}, {@link MongoTemplate} - * and {@link EntityPathResolver}. - * - * @param entityInformation must not be {@literal null}. - * @param mongoOperations must not be {@literal null}. - * @param resolver must not be {@literal null}. - */ - public QueryDslMongoRepository(MongoEntityInformation entityInformation, MongoOperations mongoOperations, - EntityPathResolver resolver) { - - super(entityInformation, mongoOperations); - - Assert.notNull(resolver); - EntityPath path = resolver.createPath(entityInformation.getJavaType()); - - this.builder = new PathBuilder(path.getType(), path.getMetadata()); - this.entityInformation = entityInformation; - this.mongoOperations = mongoOperations; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findOne(com.mysema.query.types.Predicate) - */ - @Override - public T findOne(Predicate predicate) { - return createQueryFor(predicate).fetchOne(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate) - */ - @Override - public List findAll(Predicate predicate) { - return createQueryFor(predicate).fetchResults().getResults(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, com.mysema.query.types.OrderSpecifier[]) - */ - @Override - public List findAll(Predicate predicate, OrderSpecifier... orders) { - return createQueryFor(predicate).orderBy(orders).fetchResults().getResults(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, org.springframework.data.domain.Sort) - */ - @Override - public List findAll(Predicate predicate, Sort sort) { - return applySorting(createQueryFor(predicate), sort).fetchResults().getResults(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.OrderSpecifier[]) - */ - @Override - public Iterable findAll(OrderSpecifier... orders) { - return createQuery().orderBy(orders).fetchResults().getResults(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, org.springframework.data.domain.Pageable) - */ - @Override - public Page findAll(Predicate predicate, Pageable pageable) { - - AbstractMongodbQuery> countQuery = createQueryFor(predicate); - AbstractMongodbQuery> query = createQueryFor(predicate); - - return new PageImpl(applyPagination(query, pageable).fetchResults().getResults(), pageable, - countQuery.fetchCount()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.support.SimpleMongoRepository#findAll(org.springframework.data.domain.Pageable) - */ - @Override - public Page findAll(Pageable pageable) { - - AbstractMongodbQuery> countQuery = createQuery(); - AbstractMongodbQuery> query = createQuery(); - - return new PageImpl(applyPagination(query, pageable).fetchResults().getResults(), pageable, - countQuery.fetchCount()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.support.SimpleMongoRepository#findAll(org.springframework.data.domain.Sort) - */ - @Override - public List findAll(Sort sort) { - return applySorting(createQuery(), sort).fetchResults().getResults(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#count(com.mysema.query.types.Predicate) - */ - @Override - public long count(Predicate predicate) { - return createQueryFor(predicate).fetchCount(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QueryDslPredicateExecutor#exists(com.mysema.query.types.Predicate) - */ - @Override - public boolean exists(Predicate predicate) { - return createQueryFor(predicate).fetchCount() > 0; - } - - /** - * Creates a {@link MongodbQuery} for the given {@link Predicate}. - * - * @param predicate - * @return - */ - private AbstractMongodbQuery> createQueryFor(Predicate predicate) { - return createQuery().where(predicate); - } - - /** - * Creates a {@link MongodbQuery}. - * - * @return - */ - private AbstractMongodbQuery> createQuery() { - return new SpringDataMongodbQuery(mongoOperations, entityInformation.getJavaType()); - } - - /** - * Applies the given {@link Pageable} to the given {@link MongodbQuery}. - * - * @param query - * @param pageable - * @return - */ - private AbstractMongodbQuery> applyPagination( - AbstractMongodbQuery> query, Pageable pageable) { - - if (pageable == null) { - return query; - } - - query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); - return applySorting(query, pageable.getSort()); - } - - /** - * Applies the given {@link Sort} to the given {@link MongodbQuery}. - * - * @param query - * @param sort - * @return - */ - private AbstractMongodbQuery> applySorting( - AbstractMongodbQuery> query, Sort sort) { - - if (sort == null) { - return query; - } - - // TODO: find better solution than instanceof check - if (sort instanceof QSort) { - - List> orderSpecifiers = ((QSort) sort).getOrderSpecifiers(); - query.orderBy(orderSpecifiers.toArray(new OrderSpecifier[orderSpecifiers.size()])); - - return query; - } - - for (Order order : sort) { - query.orderBy(toOrder(order)); - } - - return query; - } - - /** - * Transforms a plain {@link Order} into a QueryDsl specific {@link OrderSpecifier}. - * - * @param order - * @return - */ - @SuppressWarnings({ "rawtypes", "unchecked" }) - private OrderSpecifier toOrder(Order order) { - - Expression property = builder.get(order.getProperty()); - - return new OrderSpecifier( - order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java new file mode 100644 index 0000000000..ec845510ce --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -0,0 +1,319 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; + +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.querydsl.EntityPathResolver; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; +import org.springframework.data.querydsl.SimpleEntityPathResolver; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.support.PageableExecutionUtils; +import org.springframework.util.Assert; + +import com.querydsl.core.NonUniqueResultException; +import com.querydsl.core.types.EntityPath; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; + +/** + * MongoDB-specific {@link QuerydslPredicateExecutor} that allows execution {@link Predicate}s in various forms. + * + * @author Oliver Gierke + * @author Thomas Darimont + * @author Mark Paluch + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public class QuerydslMongoPredicateExecutor extends QuerydslPredicateExecutorSupport + implements QuerydslPredicateExecutor { + + private final MongoOperations mongoOperations; + + /** + * Creates a new {@link QuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation} and + * {@link MongoOperations}. Uses the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the given + * domain class. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + */ + public QuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, + MongoOperations mongoOperations) { + this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE); + } + + /** + * Creates a new {@link QuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation}, + * {@link MongoOperations} and {@link EntityPathResolver}. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param resolver must not be {@literal null}. + */ + public QuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, MongoOperations mongoOperations, + EntityPathResolver resolver) { + + super(mongoOperations.getConverter(), pathBuilderFor(resolver.createPath(entityInformation.getJavaType())), + entityInformation); + this.mongoOperations = mongoOperations; + } + + @Override + public Optional findOne(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + try { + return Optional.ofNullable(createQueryFor(predicate).fetchOne()); + } catch (NonUniqueResultException ex) { + throw new IncorrectResultSizeDataAccessException(ex.getMessage(), 1, ex); + } + } + + @Override + public List findAll(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetch(); + } + + @Override + public List findAll(Predicate predicate, OrderSpecifier... orders) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQueryFor(predicate).orderBy(orders).fetch(); + } + + @Override + public List findAll(Predicate predicate, Sort sort) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(sort, "Sort must not be null"); + + return applySorting(createQueryFor(predicate), sort).fetch(); + } + + @Override + public Iterable findAll(OrderSpecifier... orders) { + + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQuery().orderBy(orders).fetch(); + } + + @Override + public Page findAll(Predicate predicate, Pageable pageable) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + + SpringDataMongodbQuery query = createQueryFor(predicate); + + return PageableExecutionUtils.getPage(applyPagination(query, pageable).fetch(), pageable, query::fetchCount); + } + + @Override + public long count(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount(); + } + + @Override + public boolean exists(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount() > 0; + } + + @Override + @SuppressWarnings("unchecked") + public R findBy(Predicate predicate, + Function, R> queryFunction) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction.apply(new FluentQuerydsl<>(predicate, (Class) typeInformation().getJavaType())); + } + + /** + * Creates a {@link SpringDataMongodbQuery} for the given {@link Predicate}. + * + * @param predicate + * @return + */ + private SpringDataMongodbQuery createQueryFor(Predicate predicate) { + return createQuery().where(predicate); + } + + /** + * Creates a {@link SpringDataMongodbQuery}. + * + * @return + */ + private SpringDataMongodbQuery createQuery() { + return new SpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType()); + } + + /** + * Applies the given {@link Pageable} to the given {@link SpringDataMongodbQuery}. + * + * @param query + * @param pageable + * @return + */ + private SpringDataMongodbQuery applyPagination(SpringDataMongodbQuery query, Pageable pageable) { + + if (pageable.isPaged()) { + query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); + } + + return applySorting(query, pageable.getSort()); + } + + /** + * Applies the given {@link Sort} to the given {@link SpringDataMongodbQuery}. + * + * @param query + * @param sort + * @return + */ + private SpringDataMongodbQuery applySorting(SpringDataMongodbQuery query, Sort sort) { + + toOrderSpecifiers(sort).forEach(query::orderBy); + return query; + } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} using Querydsl + * {@link Predicate}. + * + * @author Mark Paluch + * @since 3.3 + */ + class FluentQuerydsl extends FetchableFluentQuerySupport { + + FluentQuerydsl(Predicate predicate, Class resultType) { + this(predicate, Sort.unsorted(), 0, resultType, Collections.emptyList()); + } + + FluentQuerydsl(Predicate predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + super(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + protected FluentQuerydsl create(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new FluentQuerydsl<>(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public T oneValue() { + return createQuery().fetchOne(); + } + + @Override + public T firstValue() { + return createQuery().fetchFirst(); + } + + @Override + public List all() { + return createQuery().fetch(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Page page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchPage(pageable); + } + + @Override + public Slice slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchSlice(pageable); + } + + @Override + public Stream stream() { + return createQuery().stream(); + } + + @Override + public long count() { + return createQuery().fetchCount(); + } + + @Override + public boolean exists() { + return count() > 0; + } + + private SpringDataMongodbQuery createQuery() { + return new SpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType(), getResultType(), + mongoOperations.getCollectionName(typeInformation().getJavaType()), this::customize).where(getPredicate()); + } + + private void customize(BasicQuery query) { + + List fieldsToInclude = getFieldsToInclude(); + if (!fieldsToInclude.isEmpty()) { + Document fields = new Document(); + fieldsToInclude.forEach(field -> fields.put(field, 1)); + query.setFieldsObject(fields); + } + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java new file mode 100644 index 0000000000..02c5a67d7e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.querydsl.QSort; +import org.springframework.data.repository.core.EntityInformation; + +import com.querydsl.core.types.EntityPath; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.dsl.PathBuilder; + +/** + * @author Christoph Strobl + * @since 2.2 + */ +abstract class QuerydslPredicateExecutorSupport { + + private final SpringDataMongodbSerializer serializer; + private final PathBuilder builder; + private final EntityInformation entityInformation; + + QuerydslPredicateExecutorSupport(MongoConverter converter, PathBuilder builder, + EntityInformation entityInformation) { + + this.serializer = new SpringDataMongodbSerializer(converter); + this.builder = builder; + this.entityInformation = entityInformation; + } + + protected static PathBuilder pathBuilderFor(EntityPath path) { + return new PathBuilder<>(path.getType(), path.getMetadata()); + } + + protected EntityInformation typeInformation() { + return entityInformation; + } + + protected SpringDataMongodbSerializer mongodbSerializer() { + return serializer; + } + + /** + * Transforms a plain {@link Order} into a Querydsl specific {@link OrderSpecifier}. + * + * @param order + * @return + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected OrderSpecifier toOrder(Order order) { + + Expression property = builder.get(order.getProperty()); + + return new OrderSpecifier( + order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property); + } + + /** + * Converts the given {@link Sort} to {@link OrderSpecifier}. + * + * @param sort + * @return + */ + protected List> toOrderSpecifiers(Sort sort) { + + if (sort instanceof QSort qSort) { + return qSort.getOrderSpecifiers(); + } + + return sort.stream().map(this::toOrder).collect(Collectors.toList()); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java index bce77cd8b8..3d46babd69 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,12 +21,12 @@ import org.springframework.util.Assert; import com.querydsl.core.types.EntityPath; -import com.querydsl.mongodb.AbstractMongodbQuery; /** * Base class to create repository implementations based on Querydsl. - * + * * @author Oliver Gierke + * @author Mark Paluch */ public abstract class QuerydslRepositorySupport { @@ -35,41 +35,42 @@ public abstract class QuerydslRepositorySupport { /** * Creates a new {@link QuerydslRepositorySupport} for the given {@link MongoOperations}. - * + * * @param operations must not be {@literal null}. */ public QuerydslRepositorySupport(MongoOperations operations) { - Assert.notNull(operations); + Assert.notNull(operations, "MongoOperations must not be null"); this.template = operations; this.context = operations.getConverter().getMappingContext(); } /** - * Returns a {@link MongodbQuery} for the given {@link EntityPath}. The collection being queried is derived from the + * Returns a {@link SpringDataMongodbQuery} for the given {@link EntityPath}. The collection being queried is derived from the * entity metadata. - * + * * @param path * @return */ - protected AbstractMongodbQuery> from(final EntityPath path) { - Assert.notNull(path); - MongoPersistentEntity entity = context.getPersistentEntity(path.getType()); + protected SpringDataMongodbQuery from(final EntityPath path) { + + Assert.notNull(path, "EntityPath must not be null"); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(path.getType()); return from(path, entity.getCollection()); } /** - * Returns a {@link MongodbQuery} for the given {@link EntityPath} querying the given collection. - * + * Returns a {@link SpringDataMongodbQuery} for the given {@link EntityPath} querying the given collection. + * * @param path must not be {@literal null} * @param collection must not be blank or {@literal null} * @return */ - protected AbstractMongodbQuery> from(final EntityPath path, String collection) { + protected SpringDataMongodbQuery from(final EntityPath path, String collection) { - Assert.notNull(path); - Assert.hasText(collection); + Assert.notNull(path, "EntityPath must not be null"); + Assert.hasText(collection, "Collection name must not be null or empty"); return new SpringDataMongodbQuery(template, path.getType(), collection); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java new file mode 100644 index 0000000000..1b1c9c3275 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java @@ -0,0 +1,114 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +/** + * Support class for {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} implementations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class ReactiveFluentQuerySupport implements FluentQuery.ReactiveFluentQuery { + + private final P predicate; + private final Sort sort; + private final int limit; + private final Class resultType; + private final List fieldsToInclude; + + ReactiveFluentQuerySupport(P predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + this.predicate = predicate; + this.sort = sort; + this.limit = limit; + this.resultType = resultType; + this.fieldsToInclude = fieldsToInclude; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#sortBy(org.springframework.data.domain.Sort) + */ + @Override + public ReactiveFluentQuery sortBy(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public ReactiveFluentQuery limit(int limit) { + + Assert.isTrue(limit > 0, "Limit must be greater zero"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#as(java.lang.Class) + */ + @Override + public ReactiveFluentQuery as(Class projection) { + + Assert.notNull(projection, "Projection target type must not be null"); + + return create(predicate, sort, limit, projection, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#project(java.util.Collection) + */ + @Override + public ReactiveFluentQuery project(Collection properties) { + + Assert.notNull(properties, "Projection properties must not be null"); + + return create(predicate, sort, limit, resultType, new ArrayList<>(properties)); + } + + protected abstract ReactiveFluentQuerySupport create(P predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude); + + P getPredicate() { + return predicate; + } + + Sort getSort() { + return sort; + } + + int getLimit() { + return limit; + } + + Class getResultType() { + return resultType; + } + + List getFieldsToInclude() { + return fieldsToInclude; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java new file mode 100644 index 0000000000..3edfcdd2db --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java @@ -0,0 +1,186 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.springframework.data.querydsl.QuerydslUtils.*; + +import java.io.Serializable; +import java.lang.reflect.Method; +import java.util.Optional; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryMethod; +import org.springframework.data.mongodb.repository.query.ReactivePartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.ReactiveStringBasedAggregation; +import org.springframework.data.mongodb.repository.query.ReactiveStringBasedMongoQuery; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryInformation; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.ReactiveRepositoryFactorySupport; +import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments; +import org.springframework.data.repository.core.support.RepositoryFragment; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.QueryLookupStrategy.Key; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Factory to create {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Oliver Gierke + * @since 2.0 + */ +public class ReactiveMongoRepositoryFactory extends ReactiveRepositoryFactorySupport { + + private final CrudMethodMetadataPostProcessor crudMethodMetadataPostProcessor = new CrudMethodMetadataPostProcessor(); + private final ReactiveMongoOperations operations; + private final MappingContext, MongoPersistentProperty> mappingContext; + @Nullable private QueryMethodValueEvaluationContextAccessor accessor; + + /** + * Creates a new {@link ReactiveMongoRepositoryFactory} with the given {@link ReactiveMongoOperations}. + * + * @param mongoOperations must not be {@literal null}. + */ + public ReactiveMongoRepositoryFactory(ReactiveMongoOperations mongoOperations) { + + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); + + this.operations = mongoOperations; + this.mappingContext = mongoOperations.getConverter().getMappingContext(); + + setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + addRepositoryProxyPostProcessor(crudMethodMetadataPostProcessor); + } + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + + super.setBeanClassLoader(classLoader); + crudMethodMetadataPostProcessor.setBeanClassLoader(classLoader); + } + + @Override + protected ProjectionFactory getProjectionFactory(ClassLoader classLoader, BeanFactory beanFactory) { + return this.operations.getConverter().getProjectionFactory(); + } + + @Override + protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { + return SimpleReactiveMongoRepository.class; + } + + @Override + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) { + + RepositoryFragments fragments = RepositoryFragments.empty(); + + boolean isQueryDslRepository = QUERY_DSL_PRESENT + && ReactiveQuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); + + if (isQueryDslRepository) { + + MongoEntityInformation entityInformation = getEntityInformation(metadata.getDomainType(), + metadata); + + fragments = fragments.append(RepositoryFragment + .implemented(instantiateClass(ReactiveQuerydslMongoPredicateExecutor.class, entityInformation, operations))); + } + + return fragments; + } + + @Override + protected Object getTargetRepository(RepositoryInformation information) { + + MongoEntityInformation entityInformation = getEntityInformation(information.getDomainType(), + information); + Object targetRepository = getTargetRepositoryViaReflection(information, entityInformation, operations); + + if (targetRepository instanceof SimpleReactiveMongoRepository repository) { + repository.setRepositoryMethodMetadata(crudMethodMetadataPostProcessor.getCrudMethodMetadata()); + } + + return targetRepository; + } + + @Override protected Optional getQueryLookupStrategy(Key key, + ValueExpressionDelegate valueExpressionDelegate) { + return Optional.of(new MongoQueryLookupStrategy(operations, mappingContext, valueExpressionDelegate)); + } + + @Override + public MongoEntityInformation getEntityInformation(Class domainClass) { + return getEntityInformation(domainClass, null); + } + + @SuppressWarnings("unchecked") + private MongoEntityInformation getEntityInformation(Class domainClass, + @Nullable RepositoryMetadata metadata) { + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(domainClass); + + return new MappingMongoEntityInformation<>((MongoPersistentEntity) entity, + metadata != null ? (Class) metadata.getIdType() : null); + } + + /** + * {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances. + * + * @author Mark Paluch + * @author Christoph Strobl + */ + private record MongoQueryLookupStrategy(ReactiveMongoOperations operations, + MappingContext, MongoPersistentProperty> mappingContext, + ValueExpressionDelegate delegate) implements QueryLookupStrategy { + + @Override + public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory, + NamedQueries namedQueries) { + + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, metadata, factory, mappingContext); + queryMethod.verify(); + + String namedQueryName = queryMethod.getNamedQueryName(); + + if (namedQueries.hasQuery(namedQueryName)) { + String namedQuery = namedQueries.getQuery(namedQueryName); + return new ReactiveStringBasedMongoQuery(namedQuery, queryMethod, operations, delegate); + } else if (queryMethod.hasAnnotatedAggregation()) { + return new ReactiveStringBasedAggregation(queryMethod, operations, delegate); + } else if (queryMethod.hasAnnotatedQuery()) { + return new ReactiveStringBasedMongoQuery(queryMethod, operations, delegate); + } else { + return new ReactivePartTreeMongoQuery(queryMethod, operations, delegate); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java new file mode 100644 index 0000000000..4f9c0d945c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java @@ -0,0 +1,123 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.io.Serializable; +import java.util.Optional; + +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.index.IndexOperationsAdapter; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport; +import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * {@link org.springframework.beans.factory.FactoryBean} to create + * {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.0 + * @see org.springframework.data.repository.reactive.ReactiveSortingRepository + * @see org.springframework.data.repository.reactive.RxJava3SortingRepository + */ +public class ReactiveMongoRepositoryFactoryBean, S, ID extends Serializable> + extends RepositoryFactoryBeanSupport { + + private @Nullable ReactiveMongoOperations operations; + private boolean createIndexesForQueryMethods = false; + private boolean mappingContextConfigured = false; + + /** + * Creates a new {@link ReactiveMongoRepositoryFactoryBean} for the given repository interface. + * + * @param repositoryInterface must not be {@literal null}. + */ + public ReactiveMongoRepositoryFactoryBean(Class repositoryInterface) { + super(repositoryInterface); + } + + /** + * Configures the {@link ReactiveMongoOperations} to be used. + * + * @param operations the operations to set + */ + public void setReactiveMongoOperations(@Nullable ReactiveMongoOperations operations) { + this.operations = operations; + } + + /** + * Configures whether to automatically create indexes for the properties referenced in a query method. + * + * @param createIndexesForQueryMethods the createIndexesForQueryMethods to set + */ + public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods) { + this.createIndexesForQueryMethods = createIndexesForQueryMethods; + } + + @Override + public void setMappingContext(MappingContext mappingContext) { + + super.setMappingContext(mappingContext); + this.mappingContextConfigured = true; + } + + @Override + protected RepositoryFactorySupport createRepositoryFactory() { + + RepositoryFactorySupport factory = getFactoryInstance(operations); + + if (createIndexesForQueryMethods) { + factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener( + (collectionName, javaType) -> IndexOperationsAdapter.blocking(operations.indexOps(javaType)))); + } + + return factory; + } + + @Override + protected Optional createDefaultQueryMethodEvaluationContextProvider( + ListableBeanFactory beanFactory) { + return Optional.of(new ReactiveExtensionAwareQueryMethodEvaluationContextProvider(beanFactory)); + } + + /** + * Creates and initializes a {@link RepositoryFactorySupport} instance. + * + * @param operations + * @return + */ + protected RepositoryFactorySupport getFactoryInstance(ReactiveMongoOperations operations) { + return new ReactiveMongoRepositoryFactory(operations); + } + + @Override + public void afterPropertiesSet() { + + super.afterPropertiesSet(); + Assert.state(operations != null, "ReactiveMongoOperations must not be null"); + + if (!mappingContextConfigured) { + setMappingContext(operations.getConverter().getMappingContext()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java new file mode 100644 index 0000000000..fcf420212a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.util.Assert; + +/** + * Support for query execution using {@link Pageable}. Using {@link ReactivePageableExecutionUtils} assumes that data + * queries are cheaper than {@code COUNT} queries and so some cases can take advantage of optimizations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class ReactivePageableExecutionUtils { + + private ReactivePageableExecutionUtils() {} + + /** + * Constructs a {@link Page} based on the given {@code content}, {@link Pageable} and {@link Mono} applying + * optimizations. The construction of {@link Page} omits a count query if the total can be determined based on the + * result size and {@link Pageable}. + * + * @param content must not be {@literal null}. + * @param pageable must not be {@literal null}. + * @param totalSupplier must not be {@literal null}. + * @return the {@link Page}. + */ + public static Mono> getPage(List content, Pageable pageable, Mono totalSupplier) { + + Assert.notNull(content, "Content must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + Assert.notNull(totalSupplier, "TotalSupplier must not be null"); + + if (pageable.isUnpaged() || pageable.getOffset() == 0) { + + if (pageable.isUnpaged() || pageable.getPageSize() > content.size()) { + return Mono.just(new PageImpl<>(content, pageable, content.size())); + } + + return totalSupplier.map(total -> new PageImpl<>(content, pageable, total)); + } + + if (content.size() != 0 && pageable.getPageSize() > content.size()) { + return Mono.just(new PageImpl<>(content, pageable, pageable.getOffset() + content.size())); + } + + return totalSupplier.map(total -> new PageImpl<>(content, pageable, total)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java new file mode 100644 index 0000000000..9a1482823f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java @@ -0,0 +1,286 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.querydsl.EntityPathResolver; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; +import org.springframework.data.querydsl.SimpleEntityPathResolver; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +import com.querydsl.core.types.EntityPath; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; + +/** + * MongoDB-specific {@link QuerydslPredicateExecutor} that allows execution {@link Predicate}s in various forms. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class ReactiveQuerydslMongoPredicateExecutor extends QuerydslPredicateExecutorSupport + implements ReactiveQuerydslPredicateExecutor { + + private final ReactiveMongoOperations mongoOperations; + + /** + * Creates a new {@link ReactiveQuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation} and + * {@link ReactiveMongoOperations}. Uses the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the + * given domain class. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + */ + public ReactiveQuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations) { + + this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE); + } + + /** + * Creates a new {@link ReactiveQuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation}, + * {@link ReactiveMongoOperations} and {@link EntityPathResolver}. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param resolver must not be {@literal null}. + */ + public ReactiveQuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations, EntityPathResolver resolver) { + + super(mongoOperations.getConverter(), pathBuilderFor(resolver.createPath(entityInformation.getJavaType())), + entityInformation); + this.mongoOperations = mongoOperations; + } + + @Override + public Mono findOne(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchOne(); + } + + @Override + public Flux findAll(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetch(); + } + + @Override + public Flux findAll(Predicate predicate, OrderSpecifier... orders) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQueryFor(predicate).orderBy(orders).fetch(); + } + + @Override + public Flux findAll(Predicate predicate, Sort sort) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(sort, "Sort must not be null"); + + return applySorting(createQueryFor(predicate), sort).fetch(); + } + + @Override + public Flux findAll(OrderSpecifier... orders) { + + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQuery().orderBy(orders).fetch(); + } + + @Override + public Mono count(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount(); + } + + @Override + public Mono exists(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount().map(it -> it != 0); + } + + @Override + public > P findBy(Predicate predicate, + Function, P> queryFunction) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction.apply(new ReactiveFluentQuerydsl(predicate, (Class) typeInformation().getJavaType())); + } + + /** + * Creates a {@link ReactiveSpringDataMongodbQuery} for the given {@link Predicate}. + * + * @param predicate + * @return + */ + private ReactiveSpringDataMongodbQuery createQueryFor(Predicate predicate) { + return createQuery().where(predicate); + } + + /** + * Creates a {@link ReactiveSpringDataMongodbQuery}. + * + * @return + */ + private ReactiveSpringDataMongodbQuery createQuery() { + + Class javaType = typeInformation().getJavaType(); + return new ReactiveSpringDataMongodbQuery<>(mongoOperations, javaType, javaType, + mongoOperations.getCollectionName(javaType), it -> {}); + } + + /** + * Applies the given {@link Sort} to the given {@link ReactiveSpringDataMongodbQuery}. + * + * @param query + * @param sort + * @return + */ + private ReactiveSpringDataMongodbQuery applySorting(ReactiveSpringDataMongodbQuery query, Sort sort) { + + toOrderSpecifiers(sort).forEach(query::orderBy); + return query; + } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} using Querydsl {@link Predicate}. + * + * @since 3.3 + * @author Mark Paluch + */ + class ReactiveFluentQuerydsl extends ReactiveFluentQuerySupport { + + ReactiveFluentQuerydsl(Predicate predicate, Class resultType) { + this(predicate, Sort.unsorted(), 0, resultType, Collections.emptyList()); + } + + ReactiveFluentQuerydsl(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + super(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + protected ReactiveFluentQuerydsl create(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new ReactiveFluentQuerydsl<>(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public Mono one() { + return createQuery().fetchOne(); + } + + @Override + public Mono first() { + return createQuery().fetchFirst(); + } + + @Override + public Flux all() { + return createQuery().fetch(); + } + + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Mono> page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchPage(pageable); + } + + @Override + public Mono> slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchSlice(pageable); + } + + @Override + public Mono count() { + return createQuery().fetchCount(); + } + + @Override + public Mono exists() { + return count().map(it -> it > 0).defaultIfEmpty(false); + } + + private ReactiveSpringDataMongodbQuery createQuery() { + + return new ReactiveSpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType(), getResultType(), + mongoOperations.getCollectionName(typeInformation().getJavaType()), this::customize).where(getPredicate()); + } + + private void customize(BasicQuery query) { + + List fieldsToInclude = getFieldsToInclude(); + + if (!fieldsToInclude.isEmpty()) { + Document fields = new Document(); + fieldsToInclude.forEach(field -> fields.put(field, 1)); + query.setFieldsObject(fields); + } + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java new file mode 100644 index 0000000000..cf5191fd42 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java @@ -0,0 +1,310 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import org.springframework.data.mongodb.repository.util.SliceUtils; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.Document; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveFindOperation; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.StringUtils; + +import com.querydsl.core.JoinExpression; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.ExpressionUtils; +import com.querydsl.core.types.Operation; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Path; +import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.MongodbOps; + +/** + * MongoDB query utilizing {@link ReactiveMongoOperations} for command execution. + * + * @implNote This class uses {@link MongoOperations} to directly convert documents into the target entity type. Also, we + * want entities to participate in lifecycle events and entity callbacks. + * @param result type + * @author Mark Paluch + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +class ReactiveSpringDataMongodbQuery extends SpringDataMongodbQuerySupport> { + + private final ReactiveMongoOperations mongoOperations; + private final Consumer queryCustomizer; + private final ReactiveFindOperation.FindWithQuery find; + + ReactiveSpringDataMongodbQuery(ReactiveMongoOperations mongoOperations, Class entityClass) { + this(mongoOperations, entityClass, entityClass, null, it -> {}); + } + + @SuppressWarnings("unchecked") + ReactiveSpringDataMongodbQuery(ReactiveMongoOperations mongoOperations, Class domainType, + Class resultType, @Nullable String collection, Consumer queryCustomizer) { + + super(new SpringDataMongodbSerializer(mongoOperations.getConverter())); + + this.mongoOperations = mongoOperations; + this.queryCustomizer = queryCustomizer; + this.find = (StringUtils.hasText(collection) ? mongoOperations.query(domainType).inCollection(collection) + : mongoOperations.query(domainType)).as((Class) resultType); + } + + /** + * Fetch all matching query results. + * + * @return {@link Flux} emitting all query results or {@link Flux#empty()} if there are none. + */ + Flux fetch() { + return createQuery().flatMapMany(it -> find.matching(it).all()); + } + + Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().flatMap(it -> find.matching(it).scroll(scrollPosition)); + } + + /** + * Fetch all matching query results as page. + * + * @return {@link Mono} emitting the requested page. + */ + Mono> fetchPage(Pageable pageable) { + + Mono> content = createQuery().map(it -> it.with(pageable)).flatMapMany(it -> find.matching(it).all()) + .collectList(); + + return content.flatMap(it -> ReactivePageableExecutionUtils.getPage(it, pageable, fetchCount())); + } + + /** + * Fetch all matching query results as Slice. + * + * @param pageable defines range and sort of requested slice + * @return {@link Mono} emitting the requested Slice. + * @since 4.5 + */ + Mono> fetchSlice(Pageable pageable) { + + Mono> content = createQuery().map(it -> SliceUtils.limitResult(it, pageable).with(pageable.getSort())) + .flatMapMany(it -> find.matching(it).all()).collectList(); + + return content.map(it -> SliceUtils.sliceResult(it, pageable)); + } + + /** + * Fetch the one matching query result. + * + * @return {@link Mono} emitting the first query result or {@link Mono#empty()} if there are none. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + */ + Mono fetchOne() { + return createQuery().flatMap(it -> find.matching(it).one()); + } + + /** + * Fetch the first matching query result. @return {@link Mono} emitting the first query result or {@link Mono#empty()} + * if there are none. + * + * @since 3.3 + */ + Mono fetchFirst() { + return createQuery().flatMap(it -> find.matching(it).first()); + } + + /** + * Fetch the count of matching query results. + * + * @return {@link Mono} emitting the first query result count. Emits always a count even item. + */ + Mono fetchCount() { + return createQuery().flatMap(it -> find.matching(it).count()); + } + + protected Mono createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createReactiveFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); + } + + /** + * Creates a MongoDB query that is emitted through a {@link Mono} given {@link Mono} of {@link Predicate}. + * + * @param filter must not be {@literal null}. + * @param projection can be {@literal null} if no projection is given. Query requests all fields in such case. + * @param modifiers must not be {@literal null}. + * @param orderBy must not be {@literal null}. + * @return {@link Mono} emitting the {@link Query}. + */ + protected Mono createQuery(Mono filter, @Nullable Expression projection, + QueryModifiers modifiers, List> orderBy) { + + return filter.map(this::createQuery) // + .defaultIfEmpty(createQuery(null)) // + .map(it -> { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(it, fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + queryCustomizer.accept(basicQuery); + + return basicQuery; + }); + } + + protected Mono createReactiveFilter(QueryMetadata metadata) { + + if (!metadata.getJoins().isEmpty()) { + + return createReactiveJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) + .switchIfEmpty(Mono.justOrEmpty(metadata.getWhere())); + } + + return Mono.justOrEmpty(metadata.getWhere()); + } + + /** + * Creates a Join filter by querying {@link com.mongodb.DBRef references}. + * + * @param metadata + * @return + */ + @SuppressWarnings("unchecked") + protected Mono createReactiveJoinFilter(QueryMetadata metadata) { + + MultiValueMap, Mono> predicates = new LinkedMultiValueMap<>(); + List joins = metadata.getJoins(); + + for (int i = joins.size() - 1; i >= 0; i--) { + + JoinExpression join = joins.get(i); + Path source = (Path) ((Operation) join.getTarget()).getArg(0); + Path target = (Path) ((Operation) join.getTarget()).getArg(1); + Collection> extraFilters = predicates.get(target.getRoot()); + + Mono filter = allOf(extraFilters).map(it -> ExpressionUtils.allOf(join.getCondition(), it)) + .switchIfEmpty(Mono.justOrEmpty(join.getCondition())); + + Mono predicate = getIds(target.getType(), filter) // + .collectList() // + .handle((it, sink) -> { + + if (it.isEmpty()) { + sink.error(new NoMatchException(source)); + return; + } + + Path path = ExpressionUtils.path(String.class, source, "$id"); + sink.next(ExpressionUtils.in((Path) path, it)); + }); + + predicates.add(source.getRoot(), predicate); + } + + Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); + return allOf(predicates.get(source.getRoot())).onErrorResume(NoMatchException.class, + e -> Mono.just(ExpressionUtils.predicate(MongodbOps.NO_MATCH, e.source))); + } + + private Mono allOf(@Nullable Collection> predicates) { + return predicates != null ? Flux.concat(predicates).collectList().map(ExpressionUtils::allOf) : Mono.empty(); + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected Flux getIds(Class targetType, Mono condition) { + + return condition.flatMapMany(it -> getJoinIds(targetType, it)) + .switchIfEmpty(Flux.defer(() -> getJoinIds(targetType, null))); + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected Flux getJoinIds(Class targetType, @Nullable Predicate condition) { + + return createQuery(Mono.justOrEmpty(condition), null, QueryModifiers.EMPTY, Collections.emptyList()) + .flatMapMany(query -> mongoOperations.findDistinct(query, FieldName.ID.name(), targetType, Object.class)); + } + + @Override + protected List getIds(Class aClass, Predicate predicate) { + throw new UnsupportedOperationException( + "Use create Flux getIds(Class targetType, Mono condition)"); + } + + /** + * Marker exception to indicate no matches for a query using reference Id's. + */ + static class NoMatchException extends RuntimeException { + + final Path source; + + NoMatchException(Path source) { + this.source = source; + } + + @Override + public synchronized Throwable fillInStackTrace() { + return null; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java index 21af693ba6..2f4c30ee7a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,368 +17,496 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; -import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; +import org.springframework.data.util.StreamUtils; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; + /** * Repository base implementation for Mongo. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont * @author Mark Paluch + * @author Mehran Behnam + * @author Jens Schauder + * @author Kirill Egorov */ -public class SimpleMongoRepository implements MongoRepository { +public class SimpleMongoRepository implements MongoRepository { - private final MongoOperations mongoOperations; + private @Nullable CrudMethodMetadata crudMethodMetadata; private final MongoEntityInformation entityInformation; + private final MongoOperations mongoOperations; /** * Creates a new {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}. - * + * * @param metadata must not be {@literal null}. * @param mongoOperations must not be {@literal null}. */ public SimpleMongoRepository(MongoEntityInformation metadata, MongoOperations mongoOperations) { - Assert.notNull(mongoOperations); - Assert.notNull(metadata); + Assert.notNull(metadata, "MongoEntityInformation must not be null"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.entityInformation = metadata; this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(java.lang.Object) - */ + // ------------------------------------------------------------------------- + // Methods from CrudRepository + // ------------------------------------------------------------------------- + + @Override public S save(S entity) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entity, "Entity must not be null"); if (entityInformation.isNew(entity)) { - mongoOperations.insert(entity, entityInformation.getCollectionName()); - } else { - mongoOperations.save(entity, entityInformation.getCollectionName()); + return mongoOperations.insert(entity, entityInformation.getCollectionName()); } - return entity; + return mongoOperations.save(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(java.lang.Iterable) - */ - public List save(Iterable entities) { - - Assert.notNull(entities, "The given Iterable of entities not be null!"); + @Override + public List saveAll(Iterable entities) { - List result = convertIterableToList(entities); - boolean allNew = true; + Assert.notNull(entities, "The given Iterable of entities not be null"); - for (S entity : entities) { - if (allNew && !entityInformation.isNew(entity)) { - allNew = false; - } - } + Streamable source = Streamable.of(entities); + boolean allNew = source.stream().allMatch(entityInformation::isNew); if (allNew) { - mongoOperations.insertAll(result); - } else { - for (S entity : result) { - save(entity); - } + List result = source.stream().collect(Collectors.toList()); + return new ArrayList<>(mongoOperations.insert(result, entityInformation.getCollectionName())); } - return result; + return source.stream().map(this::save).collect(Collectors.toList()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findOne(java.io.Serializable) - */ - public T findOne(ID id) { - Assert.notNull(id, "The given id must not be null!"); - return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()); + @Override + public Optional findById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + + return Optional.ofNullable( + mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName())); } - private Query getIdQuery(Object id) { - return new Query(getIdCriteria(id)); + @Override + public boolean existsById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - private Criteria getIdCriteria(Object id) { - return where(entityInformation.getIdAttribute()).is(id); + @Override + public List findAll() { + return findAll(new Query()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#exists(java.io.Serializable) - */ - public boolean exists(ID id) { + @Override + public List findAllById(Iterable ids) { + + Assert.notNull(ids, "The given Ids of entities not be null"); - Assert.notNull(id, "The given id must not be null!"); - return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), - entityInformation.getCollectionName()); + return findAll(getIdQuery(ids)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#count() - */ + @Override public long count() { - return mongoOperations.getCollection(entityInformation.getCollectionName()).count(); + + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.count(query, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.io.Serializable) - */ - public void delete(ID id) { - Assert.notNull(id, "The given id must not be null!"); - mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName()); + @Override + public void deleteById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Object) - */ + @Override public void delete(T entity) { - Assert.notNull(entity, "The given entity must not be null!"); - delete(entityInformation.getId(entity)); - } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Iterable) - */ - public void delete(Iterable entities) { + Assert.notNull(entity, "The given entity must not be null"); - Assert.notNull(entities, "The given Iterable of entities not be null!"); + DeleteResult deleteResult = mongoOperations.remove(entity, entityInformation.getCollectionName()); - for (T entity : entities) { - delete(entity); + if (entityInformation.isVersioned() && deleteResult.wasAcknowledged() && deleteResult.getDeletedCount() == 0) { + throw new OptimisticLockingFailureException(String.format( + "The entity with id %s with version %s in %s cannot be deleted; Was it modified or deleted in the meantime", + entityInformation.getId(entity), entityInformation.getVersion(entity), + entityInformation.getCollectionName())); } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#deleteAll() - */ - public void deleteAll() { - mongoOperations.remove(new Query(), entityInformation.getCollectionName()); + @Override + public void deleteAllById(Iterable ids) { + + Assert.notNull(ids, "The given Iterable of ids must not be null"); + + Query query = getIdQuery(ids); + getReadPreference().ifPresent(query::withReadPreference); + mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll() - */ - public List findAll() { - return findAll(new Query()); + @Override + public void deleteAll(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null"); + + entities.forEach(this::delete); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll(java.lang.Iterable) - */ - public Iterable findAll(Iterable ids) { + @Override + public void deleteAll() { - Set parameters = new HashSet(tryDetermineRealSizeOrReturn(ids, 10)); - for (ID id : ids) { - parameters.add(id); - } + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); - return findAll(new Query(new Criteria(entityInformation.getIdAttribute()).in(parameters))); + mongoOperations.remove(query, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Pageable) - */ - public Page findAll(final Pageable pageable) { + // ------------------------------------------------------------------------- + // Methods from PagingAndSortingRepository + // ------------------------------------------------------------------------- - Long count = count(); + @Override + public Page findAll(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + long count = count(); List list = findAll(new Query().with(pageable)); - return new PageImpl(list, pageable, count); + return new PageImpl<>(list, pageable, count); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort) - */ + @Override public List findAll(Sort sort) { - return findAll(new Query().with(sort)); + + Assert.notNull(sort, "Sort must not be null"); + + Query query = new Query().with(sort); + return findAll(query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Object) - */ + // ------------------------------------------------------------------------- + // Methods from MongoRepository + // ------------------------------------------------------------------------- + @Override public S insert(S entity) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entity, "Entity must not be null"); - mongoOperations.insert(entity, entityInformation.getCollectionName()); - return entity; + return mongoOperations.insert(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Iterable) - */ @Override public List insert(Iterable entities) { - Assert.notNull(entities, "The given Iterable of entities not be null!"); + Assert.notNull(entities, "The given Iterable of entities not be null"); - List list = convertIterableToList(entities); + Collection list = toCollection(entities); if (list.isEmpty()) { - return list; + return Collections.emptyList(); } - mongoOperations.insertAll(list); - return list; + return new ArrayList<>(mongoOperations.insertAll(list)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Pageable) - */ - @Override - public Page findAll(Example example, Pageable pageable) { + // ------------------------------------------------------------------------- + // Methods from QueryByExampleExecutor + // ------------------------------------------------------------------------- - Assert.notNull(example, "Sample must not be null!"); + @Override + public Optional findOne(Example example) { - Query q = new Query(new Criteria().alike(example)).with(pageable); + Assert.notNull(example, "Sample must not be null"); - long count = mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName()); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); - if (count == 0) { - return new PageImpl(Collections. emptyList()); - } + return Optional + .ofNullable(mongoOperations.findOne(query, example.getProbeType(), entityInformation.getCollectionName())); + } - return new PageImpl(mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()), - pageable, count); + @Override + public List findAll(Example example) { + return findAll(example, Sort.unsorted()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) - */ @Override public List findAll(Example example, Sort sort) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(sort, "Sort must not be null"); - Query q = new Query(new Criteria().alike(example)); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .with(sort); + getReadPreference().ifPresent(query::withReadPreference); - if (sort != null) { - q.with(sort); - } + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Page findAll(Example example, Pageable pageable) { - return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()); + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()).with(pageable); // + getReadPreference().ifPresent(query::withReadPreference); + + List list = mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); + + return PageableExecutionUtils.getPage(list, pageable, () -> mongoOperations + .count(Query.of(query).limit(-1).skip(-1), example.getProbeType(), entityInformation.getCollectionName())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example) - */ @Override - public List findAll(Example example) { - return findAll(example, (Sort) null); + public long count(Example example) { + + Assert.notNull(example, "Sample must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example) - */ @Override - public S findOne(Example example) { + public boolean exists(Example example) { + + Assert.notNull(example, "Sample must not be null"); - Assert.notNull(example, "Sample must not be null!"); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName()); + return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#count(org.springframework.data.domain.Example) - */ @Override - public long count(Example example) { + public R findBy(Example example, + Function, R> queryFunction) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName()); + return queryFunction.apply(new FluentQueryByExample<>(example, example.getProbeType())); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#exists(org.springframework.data.domain.Example) + // ------------------------------------------------------------------------- + // Utility methods + // ------------------------------------------------------------------------- + + /** + * Configures a custom {@link CrudMethodMetadata} to be used to detect {@link ReadPreference}s and query hints to be + * applied to queries. + * + * @param crudMethodMetadata + * @since 4.2 */ - @Override - public boolean exists(Example example) { + void setRepositoryMethodMetadata(CrudMethodMetadata crudMethodMetadata) { + this.crudMethodMetadata = crudMethodMetadata; + } + + private Optional getReadPreference() { - Assert.notNull(example, "Sample must not be null!"); + if (crudMethodMetadata == null) { + return Optional.empty(); + } + + return crudMethodMetadata.getReadPreference(); + } + + private Query getIdQuery(Object id) { + return new Query(getIdCriteria(id)); + } + + private Criteria getIdCriteria(Object id) { + return where(entityInformation.getIdAttribute()).is(id); + } + + private Query getIdQuery(Iterable ids) { + + Query query = new Query(new Criteria(entityInformation.getIdAttribute()).in(toCollection(ids))); + getReadPreference().ifPresent(query::withReadPreference); + return query; + } - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName()); + private static Collection toCollection(Iterable ids) { + return ids instanceof Collection collection ? collection + : StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList()); } - private List findAll(Query query) { + private List findAll(@Nullable Query query) { if (query == null) { return Collections.emptyList(); } + getReadPreference().ifPresent(query::withReadPreference); return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - private static List convertIterableToList(Iterable entities) { + /** + * {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} using {@link Example}. + * + * @author Mark Paluch + * @since 3.3 + */ + class FluentQueryByExample extends FetchableFluentQuerySupport, T> { - if (entities instanceof List) { - return (List) entities; + FluentQueryByExample(Example example, Class resultType) { + this(example, Sort.unsorted(), 0, resultType, Collections.emptyList()); } - int capacity = tryDetermineRealSizeOrReturn(entities, 10); + FluentQueryByExample(Example example, Sort sort, int limit, Class resultType, List fieldsToInclude) { + super(example, sort, limit, resultType, fieldsToInclude); + } - if (capacity == 0 || entities == null) { - return Collections. emptyList(); + @Override + protected FluentQueryByExample create(Example predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new FluentQueryByExample<>(predicate, sort, limit, resultType, fieldsToInclude); } - List list = new ArrayList(capacity); - for (T entity : entities) { - list.add(entity); + @Override + public T oneValue() { + return createQuery().oneValue(); } - return list; - } + @Override + public T firstValue() { + return createQuery().firstValue(); + } - private static int tryDetermineRealSizeOrReturn(Iterable iterable, int defaultSize) { - return iterable == null ? 0 : (iterable instanceof Collection) ? ((Collection) iterable).size() : defaultSize; - } + @Override + public List all() { + return createQuery().all(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Page page(Pageable pageable) { + Assert.notNull(pageable, "Pageable must not be null"); + + List list = createQuery(q -> q.with(pageable)).all(); + + return PageableExecutionUtils.getPage(list, pageable, this::count); + } + + @Override + public Slice slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + List resultList = createQuery(q -> SliceUtils.limitResult(q, pageable).with(pageable.getSort())).all(); + + return SliceUtils.sliceResult(resultList, pageable); + } + + @Override + public Stream stream() { + return createQuery().stream(); + } + + @Override + public long count() { + return createQuery().count(); + } + + @Override + public boolean exists() { + return createQuery().exists(); + } + + private ExecutableFindOperation.TerminatingFind createQuery() { + return createQuery(UnaryOperator.identity()); + } + + private ExecutableFindOperation.TerminatingFind createQuery(UnaryOperator queryCustomizer) { + + Query query = new Query(new Criteria().alike(getPredicate())) // + .collation(entityInformation.getCollation()); + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + + if (!getFieldsToInclude().isEmpty()) { + query.fields().include(getFieldsToInclude()); + } + + getReadPreference().ifPresent(query::withReadPreference); + + query = queryCustomizer.apply(query); + + return mongoOperations.query(getPredicate().getProbeType()).inCollection(entityInformation.getCollectionName()) + .as(getResultType()).matching(query); + } + + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java new file mode 100644 index 0000000000..1c1df2c9a1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java @@ -0,0 +1,642 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.springframework.data.mongodb.repository.util.SliceUtils; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.UnaryOperator; + +import org.reactivestreams.Publisher; + +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveFindOperation; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; + +/** + * Reactive repository base implementation for Mongo. + * + * @author Mark Paluch + * @author Oliver Gierke + * @author Christoph Strobl + * @author Ruben J Garcia + * @author Jens Schauder + * @author Clément Petit + * @author Kirill Egorov + * @since 2.0 + */ +public class SimpleReactiveMongoRepository implements ReactiveMongoRepository { + + private @Nullable CrudMethodMetadata crudMethodMetadata; + private final MongoEntityInformation entityInformation; + private final ReactiveMongoOperations mongoOperations; + + /** + * Creates a new {@link SimpleReactiveMongoRepository} for the given {@link MongoEntityInformation} and + * {@link MongoTemplate}. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + */ + public SimpleReactiveMongoRepository(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations) { + + Assert.notNull(entityInformation, "EntityInformation must not be null"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + + this.entityInformation = entityInformation; + this.mongoOperations = mongoOperations; + } + + // ------------------------------------------------------------------------- + // Methods from ReactiveCrudRepository + // ------------------------------------------------------------------------- + + @Override + public Mono save(S entity) { + + Assert.notNull(entity, "Entity must not be null"); + + if (entityInformation.isNew(entity)) { + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + } + + return mongoOperations.save(entity, entityInformation.getCollectionName()); + } + + @Override + public Flux saveAll(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null"); + + List source = toList(entities); + return source.stream().allMatch(entityInformation::isNew) ? // + insert(source) : concatMapSequentially(source, this::save); + } + + @Override + public Flux saveAll(Publisher publisher) { + + Assert.notNull(publisher, "The given Publisher of entities must not be null"); + + return concatMapSequentially(publisher, this::save); + } + + @Override + public Mono findById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + @Override + public Mono findById(Publisher publisher) { + + Assert.notNull(publisher, "The given id must not be null"); + Optional readPreference = getReadPreference(); + + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); + } + + @Override + public Mono existsById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + @Override + public Mono existsById(Publisher publisher) { + + Assert.notNull(publisher, "The given id must not be null"); + Optional readPreference = getReadPreference(); + + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); + } + + @Override + public Flux findAll() { + return findAll(new Query()); + } + + @Override + public Flux findAllById(Iterable ids) { + + Assert.notNull(ids, "The given Iterable of Id's must not be null"); + + return findAll(getIdQuery(ids)); + } + + @Override + public Flux findAllById(Publisher ids) { + + Assert.notNull(ids, "The given Publisher of Id's must not be null"); + + Optional readPreference = getReadPreference(); + return Flux.from(ids).buffer().flatMapSequential(listOfIds -> { + Query query = getIdQuery(listOfIds); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); + } + + @Override + public Mono count() { + + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.count(query, entityInformation.getCollectionName()); + } + + @Override + public Mono deleteById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + return deleteById(id, getReadPreference()); + } + + private Mono deleteById(ID id, Optional readPreference) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()).then(); + } + + @Override + public Mono deleteById(Publisher publisher) { + + Assert.notNull(publisher, "Id must not be null"); + + Optional readPreference = getReadPreference(); + + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }).then(); + } + + @Override + public Mono delete(T entity) { + + Assert.notNull(entity, "The given entity must not be null"); + + Mono remove = mongoOperations.remove(entity, entityInformation.getCollectionName()); + + if (entityInformation.isVersioned()) { + + remove = remove.handle((deleteResult, sink) -> { + + if (deleteResult.wasAcknowledged() && deleteResult.getDeletedCount() == 0) { + sink.error(new OptimisticLockingFailureException(String.format( + "The entity with id %s with version %s in %s cannot be deleted; Was it modified or deleted in the meantime", + entityInformation.getId(entity), entityInformation.getVersion(entity), + entityInformation.getCollectionName()))); + } else { + sink.next(deleteResult); + } + }); + } + + return remove.then(); + } + + @Override + public Mono deleteAllById(Iterable ids) { + + Assert.notNull(ids, "The given Iterable of Id's must not be null"); + + return deleteAllById(ids, getReadPreference()); + } + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private Mono deleteAllById(Iterable ids, Optional readPreference) { + + Query query = getIdQuery(ids); + readPreference.ifPresent(query::withReadPreference); + + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()).then(); + } + + @Override + public Mono deleteAll(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null"); + + Optional readPreference = getReadPreference(); + return Flux.fromIterable(entities).map(entityInformation::getRequiredId).collectList() + .flatMap(ids -> deleteAllById(ids, readPreference)); + } + + @Override + public Mono deleteAll(Publisher entityStream) { + + Assert.notNull(entityStream, "The given Publisher of entities must not be null"); + + Optional readPreference = getReadPreference(); + return Flux.from(entityStream)// + .map(entityInformation::getRequiredId)// + .concatMap(id -> deleteById(id, readPreference))// + .then(); + } + + @Override + public Mono deleteAll() { + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getCollectionName()).then(Mono.empty()); + } + + // ------------------------------------------------------------------------- + // Methods from ReactiveSortingRepository + // ------------------------------------------------------------------------- + + @Override + public Flux findAll(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + return findAll(new Query().with(sort)); + } + + // ------------------------------------------------------------------------- + // Methods from ReactiveMongoRepository + // ------------------------------------------------------------------------- + + @Override + public Mono insert(S entity) { + + Assert.notNull(entity, "Entity must not be null"); + + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + } + + @Override + public Flux insert(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null"); + + return insert(toCollection(entities)); + } + + private Flux insert(Collection entities) { + return entities.isEmpty() ? Flux.empty() : mongoOperations.insert(entities, entityInformation.getCollectionName()); + } + + @Override + public Flux insert(Publisher entities) { + + Assert.notNull(entities, "The given Publisher of entities must not be null"); + + return Flux.from(entities).concatMap(this::insert); + } + + // ------------------------------------------------------------------------- + // Methods from ReactiveMongoRepository + // ------------------------------------------------------------------------- + + @Override + public Mono findOne(Example example) { + + Assert.notNull(example, "Sample must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .limit(2); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()).buffer(2) + .map(vals -> { + + if (vals.size() > 1) { + throw new IncorrectResultSizeDataAccessException(1); + } + return vals.iterator().next(); + }).next(); + } + + @Override + public Flux findAll(Example example) { + + Assert.notNull(example, "Example must not be null"); + + return findAll(example, Sort.unsorted()); + } + + @Override + public Flux findAll(Example example, Sort sort) { + + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(sort, "Sort must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .with(sort); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Mono count(Example example) { + + Assert.notNull(example, "Sample must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Mono exists(Example example) { + + Assert.notNull(example, "Sample must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public > P findBy(Example example, + Function, P> queryFunction) { + + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction + .apply(new ReactiveFluentQueryByExample<>(example, example.getProbeType(), getReadPreference())); + } + + /** + * Configures a custom {@link CrudMethodMetadata} to be used to detect {@link ReadPreference}s and query hints to be + * applied to queries. + * + * @param crudMethodMetadata + * @since 4.2 + */ + void setRepositoryMethodMetadata(CrudMethodMetadata crudMethodMetadata) { + this.crudMethodMetadata = crudMethodMetadata; + } + + private Flux findAll(Query query) { + + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + private Optional getReadPreference() { + + if (crudMethodMetadata == null) { + return Optional.empty(); + } + + return crudMethodMetadata.getReadPreference(); + } + + private Query getIdQuery(Object id) { + return new Query(getIdCriteria(id)); + } + + private Criteria getIdCriteria(Object id) { + return where(entityInformation.getIdAttribute()).is(id); + } + + private Query getIdQuery(Iterable ids) { + return new Query(where(entityInformation.getIdAttribute()).in(toCollection(ids))); + } + + /** + * Transform the elements emitted by this Flux into Publishers, then flatten these inner publishers into a single + * Flux. The operation does not allow interleaving between performing the map operation for the first and second + * source element guaranteeing the mapping operation completed before subscribing to its following inners, that will + * then be subscribed to eagerly emitting elements in order of their source. + * + *
                    +	 * Flux.just(first-element).flatMap(...)
                    +	 *     .concatWith(Flux.fromIterable(remaining-elements).flatMapSequential(...))
                    +	 * 
                    + * + * @param source the collection of elements to transform. + * @param mapper the transformation {@link Function}. Must not be {@literal null}. + * @return never {@literal null}. + * @param source type + */ + static Flux concatMapSequentially(List source, + Function> mapper) { + + return switch (source.size()) { + case 0 -> Flux.empty(); + case 1 -> Flux.just(source.get(0)).flatMap(mapper); + case 2 -> Flux.fromIterable(source).concatMap(mapper); + default -> { + + Flux first = Flux.just(source.get(0)).flatMap(mapper); + Flux theRest = Flux.fromIterable(source.subList(1, source.size())).flatMapSequential(mapper); + yield first.concatWith(theRest); + } + }; + } + + static Flux concatMapSequentially(Publisher publisher, + Function> mapper) { + + return Flux.from(publisher).switchOnFirst((signal, source) -> { + + if (!signal.hasValue()) { + return source.concatMap(mapper); + } + + Mono firstCall = Mono.from(mapper.apply(signal.get())); + return firstCall.concatWith(source.skip(1).flatMapSequential(mapper)); + }); + } + + private static List toList(Iterable source) { + + Collection collection = toCollection(source); + + if (collection instanceof List list) { + return list; + } + + return new ArrayList<>(collection); + } + + private static Collection toCollection(Iterable source) { + + if (source instanceof Collection collection) { + return collection; + } + + List list = new ArrayList<>(); + source.forEach(list::add); + + return list; + } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} using {@link Example}. + * + * @author Mark Paluch + * @since 3.3 + */ + class ReactiveFluentQueryByExample extends ReactiveFluentQuerySupport, T> { + + private final Optional readPreference; + + ReactiveFluentQueryByExample(Example example, Class resultType, Optional readPreference) { + this(example, Sort.unsorted(), 0, resultType, Collections.emptyList(), readPreference); + } + + ReactiveFluentQueryByExample(Example example, Sort sort, int limit, Class resultType, + List fieldsToInclude, Optional readPreference) { + super(example, sort, limit, resultType, fieldsToInclude); + this.readPreference = readPreference; + } + + @Override + protected ReactiveFluentQueryByExample create(Example predicate, Sort sort, int limit, + Class resultType, List fieldsToInclude) { + return new ReactiveFluentQueryByExample<>(predicate, sort, limit, resultType, fieldsToInclude, readPreference); + } + + @Override + public Mono one() { + return createQuery().one(); + } + + @Override + public Mono first() { + return createQuery().first(); + } + + @Override + public Flux all() { + return createQuery().all(); + } + + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Mono> page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + Mono> items = createQuery(q -> q.with(pageable)).all().collectList(); + + return items.flatMap(content -> ReactivePageableExecutionUtils.getPage(content, pageable, this.count())); + } + + @Override + public Mono> slice(Pageable pageable) { + + return createQuery(q -> SliceUtils.limitResult(q, pageable).with(pageable.getSort())).all().collectList() + .map(it -> SliceUtils.sliceResult(it, pageable)); + } + + @Override + public Mono count() { + return createQuery().count(); + } + + @Override + public Mono exists() { + return createQuery().exists(); + } + + private ReactiveFindOperation.TerminatingFind createQuery() { + return createQuery(UnaryOperator.identity()); + } + + private ReactiveFindOperation.TerminatingFind createQuery(UnaryOperator queryCustomizer) { + + Query query = new Query(new Criteria().alike(getPredicate())) // + .collation(entityInformation.getCollation()); + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + + if (!getFieldsToInclude().isEmpty()) { + query.fields().include(getFieldsToInclude()); + } + + readPreference.ifPresent(query::withReadPreference); + + query = queryCustomizer.apply(query); + + return mongoOperations.query(getPredicate().getProbeType()).inCollection(entityInformation.getCollectionName()) + .as(getResultType()).matching(query); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java index 68e155685c..0ef6c38744 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2015 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,57 +15,277 @@ */ package org.springframework.data.mongodb.repository.support; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.bson.Document; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; +import org.springframework.lang.Nullable; -import com.google.common.base.Function; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.querydsl.mongodb.AbstractMongodbQuery; +import com.mysema.commons.lang.CloseableIterator; +import com.mysema.commons.lang.EmptyCloseableIterator; +import com.querydsl.core.Fetchable; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.QueryResults; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; /** - * Spring Data specific {@link MongodbQuery} implementation. - * + * Spring Data specific simple {@link com.querydsl.core.Fetchable} {@link com.querydsl.core.SimpleQuery Query} + * implementation. + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -class SpringDataMongodbQuery extends AbstractMongodbQuery> { +public class SpringDataMongodbQuery extends SpringDataMongodbQuerySupport> + implements Fetchable { - private final MongoOperations operations; + private final MongoOperations mongoOperations; + private final Consumer queryCustomizer; + private final ExecutableFindOperation.FindWithQuery find; /** * Creates a new {@link SpringDataMongodbQuery}. - * + * * @param operations must not be {@literal null}. * @param type must not be {@literal null}. */ - public SpringDataMongodbQuery(final MongoOperations operations, final Class type) { + public SpringDataMongodbQuery(MongoOperations operations, Class type) { this(operations, type, operations.getCollectionName(type)); } /** * Creates a new {@link SpringDataMongodbQuery} to query the given collection. - * + * * @param operations must not be {@literal null}. * @param type must not be {@literal null}. * @param collectionName must not be {@literal null} or empty. */ - public SpringDataMongodbQuery(final MongoOperations operations, final Class type, - String collectionName) { + public SpringDataMongodbQuery(MongoOperations operations, Class type, String collectionName) { + this(operations, type, type, collectionName, it -> {}); + } + + /** + * Creates a new {@link SpringDataMongodbQuery}. + * + * @param operations must not be {@literal null}. + * @param domainType must not be {@literal null}. + * @param resultType must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @since 3.3 + */ + SpringDataMongodbQuery(MongoOperations operations, Class domainType, Class resultType, + String collectionName, Consumer queryCustomizer) { + super(new SpringDataMongodbSerializer(operations.getConverter())); + + Class resultType1 = (Class) resultType; + this.mongoOperations = operations; + this.queryCustomizer = queryCustomizer; + this.find = mongoOperations.query(domainType).inCollection(collectionName).as(resultType1); + } + + @Override + public CloseableIterator iterate() { + + try { + Stream stream = stream(); + Iterator iterator = stream.iterator(); + + return new CloseableIterator() { + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); + } + + @Override + public void close() { + stream.close(); + } + }; + } catch (RuntimeException e) { + return handleException(e, new EmptyCloseableIterator<>()); + } + } + + public Window scroll(ScrollPosition scrollPosition) { + + try { + return find.matching(createQuery()).scroll(scrollPosition); + } catch (RuntimeException e) { + return handleException(e, Window.from(Collections.emptyList(), value -> { + throw new UnsupportedOperationException(); + })); + } + } + + @Override + public Stream stream() { + + try { + return find.matching(createQuery()).stream(); + } catch (RuntimeException e) { + return handleException(e, Stream.empty()); + } + } + + @Override + public List fetch() { + try { + return find.matching(createQuery()).all(); + } catch (RuntimeException e) { + return handleException(e, Collections.emptyList()); + } + } + + /** + * Fetch a {@link Page}. + * + * @param pageable + * @return + */ + public Page fetchPage(Pageable pageable) { - super(operations.getCollection(collectionName), new Function() { - public T apply(DBObject input) { - return operations.getConverter().read(type, input); - } - }, new SpringDataMongodbSerializer(operations.getConverter())); + try { - this.operations = operations; + List content = find.matching(createQuery().with(pageable)).all(); + + return PageableExecutionUtils.getPage(content, pageable, this::fetchCount); + } catch (RuntimeException e) { + return handleException(e, new PageImpl<>(Collections.emptyList(), pageable, 0)); + } } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.AbstractMongodbQuery#getCollection(java.lang.Class) + /** + * Fetch a {@link Slice}. + * + * @param pageable defines range and sort of requested slice + * @return new instance of {@link Slice} containing matching results within range. + * @since 4.5 */ + public Slice fetchSlice(Pageable pageable) { + + List content = find.matching(SliceUtils.limitResult(createQuery(), pageable).with(pageable.getSort())).all(); + + return SliceUtils.sliceResult(content, pageable); + } + + @Override + public T fetchFirst() { + try { + return find.matching(createQuery()).firstValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + @Override + public T fetchOne() { + try { + return find.matching(createQuery()).oneValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + @Override + public QueryResults fetchResults() { + + long total = fetchCount(); + return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) + : QueryResults.emptyResults(); + } + @Override - protected DBCollection getCollection(Class type) { - return operations.getCollection(operations.getCollectionName(type)); + public long fetchCount() { + try { + return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); + } catch (RuntimeException e) { + return handleException(e, 0L); + } + } + + protected org.springframework.data.mongodb.core.query.Query createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); } + + protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, + @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(createQuery(filter), fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + queryCustomizer.accept(basicQuery); + + return basicQuery; + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected List getIds(Class targetType, Predicate condition) { + + Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); + return mongoOperations.findDistinct(query, FieldName.ID.name(), targetType, Object.class); + } + + private static T handleException(RuntimeException e, T defaultValue) { + + if (e.getClass().getName().endsWith("$NoResults")) { + return defaultValue; + } + + throw e; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java new file mode 100644 index 0000000000..a64f666f3f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -0,0 +1,139 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.json.JsonMode; +import org.bson.json.JsonWriterSettings; + +import com.mongodb.MongoClientSettings; +import com.querydsl.core.support.QueryMixin; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; + +/** + * Support query type to augment Spring Data-specific {@link #toString} representations and + * {@link org.springframework.data.domain.Sort} creation. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class SpringDataMongodbQuerySupport> + extends AbstractMongodbQuery { + + private final QueryMixin superQueryMixin; + + private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) + .build(); + + private final MongodbDocumentSerializer serializer; + + @SuppressWarnings("unchecked") + SpringDataMongodbQuerySupport(MongodbDocumentSerializer serializer) { + + super(serializer); + this.serializer = serializer; + this.superQueryMixin = super.getQueryMixin(); + } + + /** + * Returns the {@literal Mongo Shell} representation of the query.
                    + * The following query + * + *
                    +	 *
                    +	 * where(p.lastname.eq("Matthews")).orderBy(p.firstname.asc()).offset(1).limit(5);
                    +	 * 
                    + * + * results in + * + *
                    +	 *
                    +	 * find({"lastname" : "Matthews"}).sort({"firstname" : 1}).skip(1).limit(5)
                    +	 * 
                    + * + * Note that encoding to {@link String} may fail when using data types that cannot be encoded or DBRef's without an + * identifier. + * + * @return never {@literal null}. + */ + @Override + public String toString() { + + Document projection = createProjection(getQueryMixin().getMetadata().getProjection()); + Document sort = createSort(getQueryMixin().getMetadata().getOrderBy()); + DocumentCodec codec = new DocumentCodec(MongoClientSettings.getDefaultCodecRegistry()); + + StringBuilder sb = new StringBuilder("find(" + asDocument().toJson(JSON_WRITER_SETTINGS, codec)); + if (projection != null && projection.isEmpty()) { + sb.append(", ").append(projection.toJson(JSON_WRITER_SETTINGS, codec)); + } + sb.append(")"); + if (!sort.isEmpty()) { + sb.append(".sort(").append(sort.toJson(JSON_WRITER_SETTINGS, codec)).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getOffset() != null) { + sb.append(".skip(").append(getQueryMixin().getMetadata().getModifiers().getOffset()).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getLimit() != null) { + sb.append(".limit(").append(getQueryMixin().getMetadata().getModifiers().getLimit()).append(")"); + } + return sb.toString(); + } + + /** + * Get the where definition as a Document instance + * + * @return + */ + public Document asDocument() { + return createQuery(getQueryMixin().getMetadata().getWhere()); + } + + /** + * Obtain the {@literal Mongo Shell} json query representation. + * + * @return never {@literal null}. + */ + public String toJson() { + return toJson(JSON_WRITER_SETTINGS); + } + + /** + * Obtain the json query representation applying given {@link JsonWriterSettings settings}. + * + * @param settings must not be {@literal null}. + * @return never {@literal null}. + */ + public String toJson(JsonWriterSettings settings) { + return asDocument().toJson(settings); + } + + /** + * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. + * + * @param orderSpecifiers can be {@literal null}. + * @return an empty {@link Document} if predicate is {@literal null}. + * @see MongodbDocumentSerializer#toSort(List) + */ + protected Document createSort(List> orderSpecifiers) { + return serializer.toSort(orderSpecifiers); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java index 6ee178bbbb..d9a550a0f7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,67 +15,72 @@ */ package org.springframework.data.mongodb.repository.support; -import java.util.Collections; -import java.util.HashSet; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; +import org.bson.Document; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; -import com.mongodb.DBObject; import com.mongodb.DBRef; +import com.querydsl.core.types.Constant; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.Operation; import com.querydsl.core.types.Path; import com.querydsl.core.types.PathMetadata; import com.querydsl.core.types.PathType; import com.querydsl.mongodb.MongodbSerializer; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -class SpringDataMongodbSerializer extends MongodbSerializer { - - private static final String ID_KEY = "_id"; - private static final Set PATH_TYPES; - - static { - - Set pathTypes = new HashSet(); - pathTypes.add(PathType.VARIABLE); - pathTypes.add(PathType.PROPERTY); +class SpringDataMongodbSerializer extends MongodbDocumentSerializer { - PATH_TYPES = Collections.unmodifiableSet(pathTypes); - } + private static final String ID_KEY = FieldName.ID.name(); + private static final Set PATH_TYPES = Set.of(PathType.VARIABLE, PathType.PROPERTY); private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; private final QueryMapper mapper; /** - * Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}. - * - * @param mappingContext must not be {@literal null}. + * Creates a new {@link SpringDataMongodbSerializer} for the given {@link MongoConverter}. + * + * @param converter must not be {@literal null}. */ public SpringDataMongodbSerializer(MongoConverter converter) { - Assert.notNull(converter, "MongoConverter must not be null!"); + Assert.notNull(converter, "MongoConverter must not be null"); this.mappingContext = converter.getMappingContext(); this.converter = converter; this.mapper = new QueryMapper(converter); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#getKeyForPath(com.querydsl.core.types.Path, com.querydsl.core.types.PathMetadata) - */ + @Override + public Object visit(Constant expr, Void context) { + + if (!ClassUtils.isAssignable(Enum.class, expr.getType())) { + return super.visit(expr, context); + } + + return converter.convertToMongoType(expr.getConstant()); + } + @Override protected String getKeyForPath(Path expr, PathMetadata metadata) { @@ -84,46 +89,109 @@ protected String getKeyForPath(Path expr, PathMetadata metadata) { } Path parent = metadata.getParent(); - MongoPersistentEntity entity = mappingContext.getPersistentEntity(parent.getType()); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(parent.getType()); MongoPersistentProperty property = entity.getPersistentProperty(metadata.getName()); return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName(); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object) - */ @Override - protected DBObject asDBObject(String key, Object value) { + protected Document asDocument(@Nullable String key, @Nullable Object value) { - if (ID_KEY.equals(key)) { - return mapper.getMappedObject(super.asDBObject(key, value), null); - } + value = value instanceof Optional optional ? optional.orElse(null) : value; - return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value)); + return super.asDocument(key, value instanceof Pattern ? value : converter.convertToMongoType(value)); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#isReference(com.querydsl.core.types.Path) - */ @Override - protected boolean isReference(Path path) { + protected boolean isReference(@Nullable Path path) { + + MongoPersistentProperty property = getPropertyForPotentialDbRef(path); + return property != null && property.isAssociation(); + } + + @Override + protected DBRef asReference(@Nullable Object constant) { + return asReference(constant, null); + } + + protected DBRef asReference(Object constant, @Nullable Path path) { + return converter.toDBRef(constant, getPropertyForPotentialDbRef(path)); + } + + @Override + protected String asDBKey(@Nullable Operation expr, int index) { + + Expression arg = expr.getArg(index); + String key = super.asDBKey(expr, index); + + if (!(arg instanceof Path path)) { + return key; + } + + if (!isReference(path)) { + return key; + } MongoPersistentProperty property = getPropertyFor(path); - return property == null ? false : property.isAssociation(); + + return property != null && property.getOwner().isIdProperty(property) ? key.replaceAll("." + ID_KEY + "$", "") + : key; } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#asReference(java.lang.Object) - */ @Override - protected DBRef asReference(Object constant) { - return converter.toDBRef(constant, null); + protected boolean isId(Path arg) { + MongoPersistentProperty propertyFor = getPropertyFor(arg); + return propertyFor == null ? super.isId(arg) : propertyFor.getOwner().isIdProperty(propertyFor); } + @Override + @Nullable + protected Object convert(@Nullable Path path, @Nullable Constant constant) { + + if (constant == null) { + return null; + } + + if (!isReference(path)) { + + MongoPersistentProperty property = getPropertyFor(path); + if (property == null) { + return super.convert(path, constant); + } + + if (property.getOwner().isIdProperty(property)) { + return mapper.convertId(constant.getConstant(), property.getFieldType()); + } + + if (property.hasExplicitWriteTarget()) { + return converter.convertToMongoType(constant.getConstant(), TypeInformation.of(property.getFieldType())); + } + + return converter.convertToMongoType(constant.getConstant()); + } + + MongoPersistentProperty property = getPropertyFor(path); + + if (property != null) { + if (property.isDocumentReference()) { + return converter.toDocumentPointer(constant.getConstant(), property).getPointer(); + } + + if (property.getOwner().isIdProperty(property)) { + + MongoPersistentProperty propertyForPotentialDbRef = getPropertyForPotentialDbRef(path); + if (propertyForPotentialDbRef != null && propertyForPotentialDbRef.isDocumentReference()) { + return converter.toDocumentPointer(constant.getConstant(), propertyForPotentialDbRef).getPointer(); + } + return asReference(constant.getConstant(), path.getMetadata().getParent()); + } + } + + return asReference(constant.getConstant(), path); + } + + @Nullable private MongoPersistentProperty getPropertyFor(Path path) { Path parent = path.getMetadata().getParent(); @@ -135,4 +203,30 @@ private MongoPersistentProperty getPropertyFor(Path path) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(parent.getType()); return entity != null ? entity.getPersistentProperty(path.getMetadata().getName()) : null; } + + /** + * Checks the given {@literal path} for referencing the {@literal id} property of a {@link DBRef} referenced object. + * If so it returns the referenced {@link MongoPersistentProperty} of the {@link DBRef} instead of the {@literal id} + * property. + * + * @param path + * @return + */ + @Nullable + private MongoPersistentProperty getPropertyForPotentialDbRef(@Nullable Path path) { + + if (path == null) { + return null; + } + + MongoPersistentProperty property = getPropertyFor(path); + PathMetadata metadata = path.getMetadata(); + + if (property != null && property.getOwner().isIdProperty(property) && metadata != null + && metadata.getParent() != null) { + return getPropertyFor(metadata.getParent()); + } + + return property; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/package-info.java index 9520a08334..1d0b8beeba 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/package-info.java @@ -1,5 +1,6 @@ /** * Support infrastructure for query derivation of MongoDB specific repositories. */ +@org.springframework.lang.NonNullApi package org.springframework.data.mongodb.repository.support; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java new file mode 100644 index 0000000000..b570687cb5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java @@ -0,0 +1,74 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.util; + +import java.util.List; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Utility methods for {@link Slice} handling. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.5 + */ +public class SliceUtils { + + /** + * Creates a {@link Slice} given {@link Pageable} and {@link List} of results. + * + * @param the element type. + * @param resultList the source list holding the result of the request. If the result list contains more elements + * (indicating a next slice is available) it is trimmed to the {@link Pageable#getPageSize() page size}. + * @param pageable the source pageable. + * @return new instance of {@link Slice}. + */ + public static Slice sliceResult(List resultList, Pageable pageable) { + + boolean hasNext = resultList.size() > pageable.getPageSize(); + + if (hasNext) { + resultList = resultList.subList(0, pageable.getPageSize()); + } + + return new SliceImpl<>(resultList, pageable, hasNext); + } + + /** + * Customize query for {@link #sliceResult sliced result} retrieval. If {@link Pageable#isPaged() paged} the + * {@link Query#limit(int) limit} is set to {@code pagesize + 1} in order to determine if more data is available. + * + * @param query the source query + * @param pageable paging to apply. + * @return new instance of {@link Query} if either {@link Pageable#isPaged() paged}, the source query otherwise. + */ + public static Query limitResult(Query query, Pageable pageable) { + + if (pageable.isUnpaged()) { + return query; + } + + Query target = Query.of(query); + target.skip(pageable.getOffset()); + target.limit(pageable.getPageSize() + 1); + + return target; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java new file mode 100644 index 0000000000..cbbd4a37a9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -0,0 +1,960 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.StringJoiner; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.StreamSupport; + +import org.bson.*; +import org.bson.codecs.Codec; +import org.bson.codecs.DocumentCodec; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.bson.json.JsonParseException; +import org.bson.types.Binary; +import org.bson.types.Decimal128; +import org.bson.types.ObjectId; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; +import com.mongodb.DBRef; +import com.mongodb.MongoClientSettings; + +/** + * Internal API for operations on {@link Bson} elements that can be either {@link Document} or {@link DBObject}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ +public class BsonUtils { + + /** + * The empty document (immutable). This document is serializable. + * + * @since 3.2.5 + */ + public static final Document EMPTY_DOCUMENT = new EmptyDocument(); + + @SuppressWarnings("unchecked") + @Nullable + public static T get(Bson bson, String key) { + return (T) asMap(bson).get(key); + } + + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + */ + public static Map asMap(Bson bson) { + return asMap(bson, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value) using the given {@link CodecRegistry} to + * obtain {@link org.bson.codecs.Codec codecs} that might be required for conversion. + * + * @param bson can be {@literal null}. + * @param codecRegistry must not be {@literal null}. + * @return never {@literal null}. Returns an empty {@link Map} if input {@link Bson} is {@literal null}. + * @since 4.0 + */ + public static Map asMap(@Nullable Bson bson, CodecRegistry codecRegistry) { + + if (bson == null) { + return Collections.emptyMap(); + } + + if (bson instanceof Document document) { + return document; + } + if (bson instanceof BasicDBObject dbo) { + return dbo; + } + if (bson instanceof DBObject dbo) { + return dbo.toMap(); + } + + return new Document(bson.toBsonDocument(Document.class, codecRegistry)); + } + + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + * @since 3.2.5 + */ + public static Document asDocument(Bson bson) { + return asDocument(bson, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value) using the given + * {@link CodecRegistry} to obtain {@link org.bson.codecs.Codec codecs} that might be required for conversion. + * + * @param bson + * @param codecRegistry must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + public static Document asDocument(Bson bson, CodecRegistry codecRegistry) { + + Map map = asMap(bson, codecRegistry); + + if (map instanceof Document document) { + return document; + } + + return new Document(map); + } + + /** + * Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}. + * + * @param bson + * @return a mutable {@link Document} containing all entries from {@link Bson}. + * @since 3.2.5 + */ + public static Document asMutableDocument(Bson bson) { + + if (bson instanceof EmptyDocument) { + bson = new Document(asDocument(bson)); + } + + if (bson instanceof Document document) { + return document; + } + + Map map = asMap(bson); + + if (map instanceof Document document) { + return document; + } + + return new Document(map); + } + + public static void addToMap(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document document) { + + document.put(key, value); + return; + } + if (bson instanceof BSONObject bsonObject) { + + bsonObject.put(key, value); + return; + } + + throw new IllegalArgumentException(String.format( + "Cannot add key/value pair to %s; as map given Bson must be a Document or BSONObject", bson.getClass())); + } + + /** + * Add all entries from the given {@literal source} {@link Map} to the {@literal target}. + * + * @param target must not be {@literal null}. + * @param source must not be {@literal null}. + * @since 3.2 + */ + public static void addAllToMap(Bson target, Map source) { + + if (target instanceof Document document) { + + document.putAll(source); + return; + } + + if (target instanceof BSONObject bsonObject) { + + bsonObject.putAll(source); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot add all to %s; Given Bson must be a Document or BSONObject.", target.getClass())); + } + + /** + * Check if a given entry (key/value pair) is present in the given {@link Bson}. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @param value can be {@literal null}. + * @return {@literal true} if (key/value pair) is present. + * @since 3.2 + */ + public static boolean contains(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document document) { + return document.containsKey(key) && ObjectUtils.nullSafeEquals(document.get(key), value); + } + if (bson instanceof BSONObject bsonObject) { + return bsonObject.containsField(key) && ObjectUtils.nullSafeEquals(bsonObject.get(key), value); + } + + Map map = asMap(bson); + return map.containsKey(key) && ObjectUtils.nullSafeEquals(map.get(key), value); + } + + /** + * Remove {@code _id : null} from the given {@link Bson} if present. + * + * @param bson must not be {@literal null}. + * @since 3.2 + */ + public static boolean removeNullId(Bson bson) { + + if (!contains(bson, FieldName.ID.name(), null)) { + return false; + } + + removeFrom(bson, FieldName.ID.name()); + return true; + } + + /** + * Remove the given {@literal key} from the {@link Bson} value. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @since 3.2 + */ + static void removeFrom(Bson bson, String key) { + + if (bson instanceof Document document) { + + document.remove(key); + return; + } + + if (bson instanceof BSONObject bsonObject) { + + bsonObject.removeField(key); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot remove from %s given Bson must be a Document or BSONObject.", bson.getClass())); + } + + /** + * Extract the corresponding plain value from {@link BsonValue}. Eg. plain {@link String} from + * {@link org.bson.BsonString}. + * + * @param value must not be {@literal null}. + * @return + * @since 2.1 + */ + public static Object toJavaType(BsonValue value) { + + return switch (value.getBsonType()) { + case INT32 -> value.asInt32().getValue(); + case INT64 -> value.asInt64().getValue(); + case STRING -> value.asString().getValue(); + case DECIMAL128 -> value.asDecimal128().doubleValue(); + case DOUBLE -> value.asDouble().getValue(); + case BOOLEAN -> value.asBoolean().getValue(); + case OBJECT_ID -> value.asObjectId().getValue(); + case DB_POINTER -> new DBRef(value.asDBPointer().getNamespace(), value.asDBPointer().getId()); + case BINARY -> { + + BsonBinary binary = value.asBinary(); + if(binary.getType() != BsonBinarySubType.VECTOR.getValue()) { + yield binary.getData(); + } + yield value.asBinary().asVector(); + } + case DATE_TIME -> new Date(value.asDateTime().getValue()); + case SYMBOL -> value.asSymbol().getSymbol(); + case ARRAY -> value.asArray().toArray(); + case DOCUMENT -> Document.parse(value.asDocument().toJson()); + + default -> value; + }; + } + + /** + * Convert a given simple value (eg. {@link String}, {@link Long}) to its corresponding {@link BsonValue}. + * + * @param source must not be {@literal null}. + * @return the corresponding {@link BsonValue} representation. + * @throws IllegalArgumentException if {@literal source} does not correspond to a {@link BsonValue} type. + * @since 3.0 + */ + public static BsonValue simpleToBsonValue(Object source) { + return simpleToBsonValue(source, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Convert a given simple value (eg. {@link String}, {@link Long}) to its corresponding {@link BsonValue}. + * + * @param source must not be {@literal null}. + * @param codecRegistry The {@link CodecRegistry} used as a fallback to convert types using native {@link Codec}. Must + * not be {@literal null}. + * @return the corresponding {@link BsonValue} representation. + * @throws IllegalArgumentException if {@literal source} does not correspond to a {@link BsonValue} type. + * @since 4.2 + */ + @SuppressWarnings("unchecked") + public static BsonValue simpleToBsonValue(Object source, CodecRegistry codecRegistry) { + + if (source instanceof BsonValue bsonValue) { + return bsonValue; + } + + if (source instanceof ObjectId objectId) { + return new BsonObjectId(objectId); + } + + if (source instanceof String stringValue) { + return new BsonString(stringValue); + } + + if (source instanceof Double doubleValue) { + return new BsonDouble(doubleValue); + } + + if (source instanceof Integer integerValue) { + return new BsonInt32(integerValue); + } + + if (source instanceof Long longValue) { + return new BsonInt64(longValue); + } + + if (source instanceof byte[] byteArray) { + return new BsonBinary(byteArray); + } + + if (source instanceof Boolean booleanValue) { + return new BsonBoolean(booleanValue); + } + + if (source instanceof Float floatValue) { + return new BsonDouble(floatValue); + } + + if (source instanceof Binary binary) { + return new BsonBinary(binary.getType(), binary.getData()); + } + + if (source instanceof Date date) { + new BsonDateTime(date.getTime()); + } + + try { + + Object value = source; + if (ClassUtils.isPrimitiveArray(source.getClass())) { + value = CollectionUtils.arrayToList(source); + } + + Codec codec = codecRegistry.get(value.getClass()); + BsonCapturingWriter writer = new BsonCapturingWriter(value.getClass()); + codec.encode(writer, value, + ObjectUtils.isArray(value) || value instanceof Collection ? EncoderContext.builder().build() : null); + return writer.getCapturedValue(); + } catch (CodecConfigurationException e) { + throw new IllegalArgumentException( + String.format("Unable to convert %s to BsonValue.", source != null ? source.getClass().getName() : "null")); + } + } + + /** + * Merge the given {@link Document documents} into on in the given order. Keys contained within multiple documents are + * overwritten by their follow-ups. + * + * @param documents must not be {@literal null}. Can be empty. + * @return the document containing all key value pairs. + * @since 2.2 + */ + public static Document merge(Document... documents) { + + if (ObjectUtils.isEmpty(documents)) { + return new Document(); + } + + if (documents.length == 1) { + return documents[0]; + } + + Document target = new Document(); + Arrays.asList(documents).forEach(target::putAll); + return target; + } + + /** + * @param source + * @param orElse + * @return + * @since 2.2 + */ + public static Document toDocumentOrElse(String source, Function orElse) { + + if (source.stripLeading().startsWith("{")) { + return Document.parse(source); + } + + return orElse.apply(source); + } + + /** + * Serialize the given {@link Document} as Json applying default codecs if necessary. + * + * @param source + * @return + * @since 2.2.1 + */ + @Nullable + public static String toJson(@Nullable Document source) { + + if (source == null) { + return null; + } + + try { + return source.toJson(); + } catch (Exception e) { + return toJson((Object) source); + } + } + + /** + * Check if a given String looks like {@link Document#parse(String) parsable} json. + * + * @param value can be {@literal null}. + * @return {@literal true} if the given value looks like a json document. + * @since 3.0 + */ + public static boolean isJsonDocument(@Nullable String value) { + + if (!StringUtils.hasText(value)) { + return false; + } + + String potentialJson = value.trim(); + return potentialJson.startsWith("{") && potentialJson.endsWith("}"); + } + + /** + * Check if a given String looks like {@link org.bson.BsonArray#parse(String) parsable} json array. + * + * @param value can be {@literal null}. + * @return {@literal true} if the given value looks like a json array. + * @since 3.0 + */ + public static boolean isJsonArray(@Nullable String value) { + return StringUtils.hasText(value) && (value.startsWith("[") && value.endsWith("]")); + } + + /** + * Parse the given {@literal json} to {@link Document} applying transformations as specified by a potentially given + * {@link org.bson.codecs.Codec}. + * + * @param json must not be {@literal null}. + * @param codecRegistryProvider can be {@literal null}. In that case the default {@link DocumentCodec} is used. + * @return never {@literal null}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + * @since 3.0 + */ + public static Document parse(String json, @Nullable CodecRegistryProvider codecRegistryProvider) { + + Assert.notNull(json, "Json must not be null"); + + if (codecRegistryProvider == null) { + return Document.parse(json); + } + + return Document.parse(json, codecRegistryProvider.getCodecFor(Document.class) + .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); + } + + /** + * Resolve the value for a given key. If the given {@link Bson} value contains the key the value is immediately + * returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by + * inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further + * (wrong) type, {@literal null} is returned. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 3.0.8 + */ + @Nullable + public static Object resolveValue(Bson bson, String key) { + return resolveValue(asMap(bson), key); + } + + /** + * Resolve the value for a given {@link FieldName field name}. If the given name is a {@link Type#KEY} the value is + * obtained from the target {@link Bson} immediately. If the given fieldName is a {@link Type#PATH} maybe using the + * dot ({@code .}) notation it will try to resolve the path by inspecting the individual parts. If one of the + * intermediate ones is {@literal null} or cannot be inspected further (wrong) type, {@literal null} is returned. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param fieldName the name to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.2 + */ + public static Object resolveValue(Bson bson, FieldName fieldName) { + return resolveValue(asMap(bson), fieldName); + } + + /** + * Resolve the value for a given {@link FieldName field name}. If the given name is a {@link Type#KEY} the value is + * obtained from the target {@link Bson} immediately. If the given fieldName is a {@link Type#PATH} maybe using the + * dot ({@code .}) notation it will try to resolve the path by inspecting the individual parts. If one of the + * intermediate ones is {@literal null} or cannot be inspected further (wrong) type, {@literal null} is returned. + * + * @param source the source to inspect. Must not be {@literal null}. + * @param fieldName the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.2 + */ + @Nullable + public static Object resolveValue(Map source, FieldName fieldName) { + + if (fieldName.isKey()) { + return source.get(fieldName.name()); + } + + String[] parts = fieldName.parts(); + + for (int i = 1; i < parts.length; i++) { + + Object result = source.get(parts[i - 1]); + + if (!(result instanceof Bson resultBson)) { + return null; + } + + source = asMap(resultBson); + } + + return source.get(parts[parts.length - 1]); + } + + /** + * Resolve the value for a given key. If the given {@link Map} value contains the key the value is immediately + * returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by + * inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further + * (wrong) type, {@literal null} is returned. + * + * @param source the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.1 + */ + @Nullable + public static Object resolveValue(Map source, String key) { + + if (source.containsKey(key)) { + return source.get(key); + } + + return resolveValue(source, FieldName.path(key)); + } + + public static boolean hasValue(Bson bson, FieldName fieldName) { + + Map source = asMap(bson); + if (fieldName.isKey()) { + return source.containsKey(fieldName.name()); + } + + String[] parts = fieldName.parts(); + Object result; + + for (int i = 1; i < parts.length; i++) { + + result = source.get(parts[i - 1]); + source = getAsMap(result); + + if (source == null) { + return false; + } + } + + return source.containsKey(parts[parts.length - 1]); + + } + + /** + * Returns whether the underlying {@link Bson bson} has a value ({@literal null} or non-{@literal null}) for the given + * {@code key}. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return {@literal true} if no non {@literal null} value present. + * @since 3.0.8 + */ + public static boolean hasValue(Bson bson, String key) { + return hasValue(bson, FieldName.path(key)); + } + + /** + * Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise. + * + * @param source can be {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + @SuppressWarnings("unchecked") + private static Map getAsMap(Object source) { + + if (source instanceof Document document) { + return document; + } + + if (source instanceof BasicDBObject basicDBObject) { + return basicDBObject; + } + + if (source instanceof DBObject dbObject) { + return dbObject.toMap(); + } + + if (source instanceof Map) { + return (Map) source; + } + + return null; + } + + /** + * Returns the given source object as {@link Bson}, i.e. {@link Document}s and maps as is or throw + * {@link IllegalArgumentException}. + * + * @param source + * @return the converted/casted source object. + * @throws IllegalArgumentException if {@code source} cannot be converted/cast to {@link Bson}. + * @since 3.2.3 + * @see #supportsBson(Object) + */ + @SuppressWarnings("unchecked") + public static Bson asBson(Object source) { + + if (source instanceof Document document) { + return document; + } + + if (source instanceof BasicDBObject basicDBObject) { + return basicDBObject; + } + + if (source instanceof DBObject dbObject) { + return new Document(dbObject.toMap()); + } + + if (source instanceof Map) { + return new Document((Map) source); + } + + throw new IllegalArgumentException(String.format("Cannot convert %s to Bson", source)); + } + + /** + * Returns the given source can be used/converted as {@link Bson}. + * + * @param source + * @return {@literal true} if the given source can be converted to {@link Bson}. + * @since 3.2.3 + */ + public static boolean supportsBson(Object source) { + return source instanceof DBObject || source instanceof Map; + } + + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + public static Collection asCollection(Object source) { + + if (source instanceof Collection collection) { + return collection; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + + public static Document mapValues(Document source, BiFunction valueMapper) { + return mapEntries(source, Entry::getKey, entry -> valueMapper.apply(entry.getKey(), entry.getValue())); + } + + public static Document mapEntries(Document source, Function, String> keyMapper, + Function, Object> valueMapper) { + + if (source.isEmpty()) { + return source; + } + + Map target = new LinkedHashMap<>(source.size(), 1f); + for (Entry entry : source.entrySet()) { + target.put(keyMapper.apply(entry), valueMapper.apply(entry)); + } + return new Document(target); + } + + @Nullable + private static String toJson(@Nullable Object value) { + + if (value == null) { + return null; + } + + try { + return value instanceof Document document + ? document.toJson(MongoClientSettings.getDefaultCodecRegistry().get(Document.class)) + : serializeValue(value); + + } catch (Exception e) { + + if (value instanceof Collection collection) { + return toString(collection); + } else if (value instanceof Map map) { + return toString(map); + } else if (ObjectUtils.isArray(value)) { + return toString(Arrays.asList(ObjectUtils.toObjectArray(value))); + } + + throw e instanceof JsonParseException jsonParseException ? jsonParseException : new JsonParseException(e); + } + } + + private static String serializeValue(@Nullable Object value) { + + if (value == null) { + return "null"; + } + + String documentJson = new Document("toBeEncoded", value).toJson(); + return documentJson.substring(documentJson.indexOf(':') + 1, documentJson.length() - 1).trim(); + } + + private static String toString(Map source) { + + // Avoid String.format for performance + return iterableToDelimitedString(source.entrySet(), "{ ", " }", + entry -> "\"" + entry.getKey() + "\" : " + toJson(entry.getValue())); + } + + private static String toString(Collection source) { + return iterableToDelimitedString(source, "[ ", " ]", BsonUtils::toJson); + } + + private static String iterableToDelimitedString(Iterable source, String prefix, String suffix, + Converter transformer) { + + StringJoiner joiner = new StringJoiner(", ", prefix, suffix); + + StreamSupport.stream(source.spliterator(), false).map(transformer::convert).forEach(joiner::add); + + return joiner.toString(); + } + + static class BsonCapturingWriter extends AbstractBsonWriter { + + private final List values = new ArrayList<>(0); + + public BsonCapturingWriter(Class type) { + super(new BsonWriterSettings()); + + if (ClassUtils.isAssignable(Map.class, type)) { + setContext(new Context(null, BsonContextType.DOCUMENT)); + } else if (ClassUtils.isAssignable(List.class, type) || type.isArray()) { + setContext(new Context(null, BsonContextType.ARRAY)); + } else { + setContext(new Context(null, BsonContextType.DOCUMENT)); + } + } + + @Nullable + BsonValue getCapturedValue() { + + if (values.isEmpty()) { + return null; + } + if (!getContext().getContextType().equals(BsonContextType.ARRAY)) { + return values.get(0); + } + + return new BsonArray(values); + } + + @Override + protected void doWriteStartDocument() { + + } + + @Override + protected void doWriteEndDocument() { + + } + + @Override + public void writeStartArray() { + setState(State.VALUE); + } + + @Override + public void writeEndArray() { + setState(State.NAME); + } + + @Override + protected void doWriteStartArray() { + + } + + @Override + protected void doWriteEndArray() { + + } + + @Override + protected void doWriteBinaryData(BsonBinary value) { + values.add(value); + } + + @Override + protected void doWriteBoolean(boolean value) { + values.add(BsonBoolean.valueOf(value)); + } + + @Override + protected void doWriteDateTime(long value) { + values.add(new BsonDateTime(value)); + } + + @Override + protected void doWriteDBPointer(BsonDbPointer value) { + values.add(value); + } + + @Override + protected void doWriteDouble(double value) { + values.add(new BsonDouble(value)); + } + + @Override + protected void doWriteInt32(int value) { + values.add(new BsonInt32(value)); + } + + @Override + protected void doWriteInt64(long value) { + values.add(new BsonInt64(value)); + } + + @Override + protected void doWriteDecimal128(Decimal128 value) { + values.add(new BsonDecimal128(value)); + } + + @Override + protected void doWriteJavaScript(String value) { + values.add(new BsonJavaScript(value)); + } + + @Override + protected void doWriteJavaScriptWithScope(String value) { + throw new UnsupportedOperationException("Cannot capture JavaScriptWith"); + } + + @Override + protected void doWriteMaxKey() {} + + @Override + protected void doWriteMinKey() {} + + @Override + protected void doWriteNull() { + values.add(new BsonNull()); + } + + @Override + protected void doWriteObjectId(ObjectId value) { + values.add(new BsonObjectId(value)); + } + + @Override + protected void doWriteRegularExpression(BsonRegularExpression value) { + values.add(value); + } + + @Override + protected void doWriteString(String value) { + values.add(new BsonString(value)); + } + + @Override + protected void doWriteSymbol(String value) { + values.add(new BsonSymbol(value)); + } + + @Override + protected void doWriteTimestamp(BsonTimestamp value) { + values.add(value); + } + + @Override + protected void doWriteUndefined() { + values.add(new BsonUndefined()); + } + + @Override + public void flush() { + values.clear(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java index 1e334782bf..48f2e9b84f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,21 @@ */ package org.springframework.data.mongodb.util; +import java.util.Arrays; + import com.mongodb.BasicDBList; /** * @author Thomas Darimont + * @deprecated since 4.2.0 */ +@Deprecated(since = "4.2.0", forRemoval = true) public class DBObjectUtils { public static BasicDBList dbList(Object... items) { BasicDBList list = new BasicDBList(); - for (Object item : items) { - list.add(item); - } + list.addAll(Arrays.asList(items)); return list; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java new file mode 100644 index 0000000000..191c7d24d3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java @@ -0,0 +1,89 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Value object representing a dot path. + * + * @author Mark Paluch + * @since 3.2 + */ +public class DotPath { + + private static final DotPath EMPTY = new DotPath(""); + + private final String path; + + private DotPath(String path) { + this.path = path; + } + + /** + * Creates a new {@link DotPath} from {@code dotPath}. + * + * @param dotPath the dot path, can be empty or {@literal null}. + * @return the {@link DotPath} representing {@code dotPath}. + */ + public static DotPath from(@Nullable String dotPath) { + + if (StringUtils.hasLength(dotPath)) { + return new DotPath(dotPath); + } + + return EMPTY; + } + + /** + * Returns an empty dotpath. + * + * @return an empty dotpath. + */ + public static DotPath empty() { + return EMPTY; + } + + /** + * Append a segment to the dotpath. If the dotpath is not empty, then segments are separated with a dot. + * + * @param segment the segment to append. + * @return + */ + public DotPath append(String segment) { + + if (isEmpty()) { + return new DotPath(segment); + } + + return new DotPath(path + "." + segment); + } + + /** + * Returns whether this dotpath is empty. + * + * @return whether this dotpath is empty. + */ + public boolean isEmpty() { + return !StringUtils.hasLength(path); + } + + @Override + public String toString() { + return path; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java new file mode 100644 index 0000000000..67255b878a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java @@ -0,0 +1,96 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.time.Duration; +import java.util.function.Supplier; + +import org.springframework.core.env.Environment; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.format.datetime.standard.DurationFormatterUtils; +import org.springframework.lang.Nullable; + +/** + * Helper to evaluate Duration from expressions. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class DurationUtil { + + private static final ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + + /** + * Evaluates and potentially parses the given string representation into a {@link Duration} value. + * + * @param value the {@link String} representation of the duration to evaluate. + * @param evaluationContext context supplier for property and expression language evaluation. + * @return the evaluated duration. + */ + public static Duration evaluate(String value, ValueEvaluationContext evaluationContext) { + + ValueExpression expression = PARSER.parse(value); + Object evaluatedTimeout = expression.evaluate(evaluationContext); + + if (evaluatedTimeout == null) { + return Duration.ZERO; + } + + if (evaluatedTimeout instanceof Duration duration) { + return duration; + } + + return parse(evaluatedTimeout.toString()); + } + + /** + * Evaluates and potentially parses the given string representation into a {@link Duration} value. + * + * @param value the {@link String} representation of the duration to evaluate. + * @param evaluationContext context supplier for expression language evaluation. + * @return the evaluated duration. + */ + public static Duration evaluate(String value, Supplier evaluationContext) { + + return evaluate(value, new ValueEvaluationContext() { + @Nullable + @Override + public Environment getEnvironment() { + return null; + } + + @Nullable + @Override + public EvaluationContext getEvaluationContext() { + return evaluationContext.get(); + } + }); + } + + /** + * + * @param duration duration string to parse. + * @return parsed {@link Duration}. + * @see DurationFormatterUtils + */ + public static Duration parse(String duration) { + return DurationFormatterUtils.detectAndParse(duration); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java new file mode 100644 index 0000000000..ffc97402fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java @@ -0,0 +1,95 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.jetbrains.annotations.Nullable; + +/** + * Empty variant of {@link Document}. + * + * @author Mark Paluch + */ +class EmptyDocument extends Document { + + @Override + public Document append(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map map) { + throw new UnsupportedOperationException(); + } + + @Override + public void replaceAll(BiFunction function) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean remove(Object key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean replace(String key, Object oldValue, Object newValue) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public Object replace(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Set> entrySet() { + return Collections.emptySet(); + } + + @Override + public Collection values() { + return Collections.emptyList(); + } + + @Override + public Set keySet() { + return Collections.emptySet(); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java index d230574911..8fc4b108ff 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,104 @@ */ package org.springframework.data.mongodb.util; +import java.lang.reflect.Field; + +import org.springframework.data.util.Version; +import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.internal.build.MongoDriverVersion; /** * {@link MongoClientVersion} holds information about the used mongo-java client and is used to distinguish between * different versions. - * + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.7 */ public class MongoClientVersion { - private static final boolean IS_MONGO_30 = ClassUtils.isPresent("com.mongodb.binding.SingleServerBinding", - MongoClientVersion.class.getClassLoader()); - private static final boolean IS_ASYNC_CLIENT = ClassUtils.isPresent("com.mongodb.async.client.MongoClient", + private static final boolean SYNC_CLIENT_PRESENT = ClassUtils.isPresent("com.mongodb.MongoClient", + MongoClientVersion.class.getClassLoader()) + || ClassUtils.isPresent("com.mongodb.client.MongoClient", MongoClientVersion.class.getClassLoader()); + + private static final boolean ASYNC_CLIENT_PRESENT = ClassUtils.isPresent("com.mongodb.async.client.MongoClient", MongoClientVersion.class.getClassLoader()); + private static final boolean REACTIVE_CLIENT_PRESENT = ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", MongoClientVersion.class.getClassLoader()); + + private static final boolean IS_VERSION_5_OR_NEWER; + + private static final Version CLIENT_VERSION; + + static { + + ClassLoader classLoader = MongoClientVersion.class.getClassLoader(); + Version version = getMongoDbDriverVersion(classLoader); + + CLIENT_VERSION = version; + IS_VERSION_5_OR_NEWER = CLIENT_VERSION.isGreaterThanOrEqualTo(Version.parse("5.0")); + } + /** - * @return |literal true} if MongoDB Java driver version 3.0 or later is on classpath. + * @return {@literal true} if the async MongoDB Java driver is on classpath. */ - public static boolean isMongo3Driver() { - return IS_MONGO_30; + public static boolean isAsyncClient() { + return ASYNC_CLIENT_PRESENT; } /** - * @return {lliteral true} if MongoDB Java driver is on classpath. + * @return {@literal true} if the sync MongoDB Java driver is on classpath. + * @since 2.1 */ - public static boolean isAsyncClient() { - return IS_ASYNC_CLIENT; + public static boolean isSyncClientPresent() { + return SYNC_CLIENT_PRESENT; + } + + /** + * @return {@literal true} if the reactive MongoDB Java driver is on classpath. + * @since 2.1 + */ + public static boolean isReactiveClientPresent() { + return REACTIVE_CLIENT_PRESENT; + } + + /** + * @return {@literal true} if the MongoDB Java driver version is 5 or newer. + * @since 4.3 + */ + public static boolean isVersion5orNewer() { + return IS_VERSION_5_OR_NEWER; + } + + private static Version getMongoDbDriverVersion(ClassLoader classLoader) { + + Version version = getVersionFromPackage(classLoader); + return version == null ? guessDriverVersionFromClassPath(classLoader) : version; + } + + @Nullable + private static Version getVersionFromPackage(ClassLoader classLoader) { + + if (ClassUtils.isPresent("com.mongodb.internal.build.MongoDriverVersion", classLoader)) { + try { + Field field = ReflectionUtils.findField(MongoDriverVersion.class, "VERSION"); + return field != null ? Version.parse("" + field.get(null)) : null; + } catch (ReflectiveOperationException | IllegalArgumentException exception) { + // well not much we can do, right? + } + } + return null; + } + + private static Version guessDriverVersionFromClassPath(ClassLoader classLoader) { + + if (ClassUtils.isPresent("com.mongodb.internal.connection.StreamFactoryFactory", classLoader)) { + return Version.parse("5"); + } + return Version.parse("4.11"); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java new file mode 100644 index 0000000000..8bd422c493 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java @@ -0,0 +1,417 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.lang.reflect.Method; +import java.net.InetSocketAddress; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.reactivestreams.Publisher; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.ServerAddress; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * Compatibility adapter to bridge functionality across different MongoDB driver versions. + *

                    + * This class is for internal use within the framework and should not be used by applications. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.3 + */ +public class MongoCompatibilityAdapter { + + private static final String NO_LONGER_SUPPORTED = "%s is no longer supported on Mongo Client 5 or newer"; + private static final String NOT_SUPPORTED_ON_4 = "%s is not supported on Mongo Client 4"; + + private static final @Nullable Method getStreamFactoryFactory = ReflectionUtils.findMethod(MongoClientSettings.class, + "getStreamFactoryFactory"); + + private static final @Nullable Method setBucketSize = ReflectionUtils.findMethod(IndexOptions.class, "bucketSize", + Double.class); + + private static final @Nullable Method setTrimFactor; + + static { + + // method name changed in between + Method trimFactor = ReflectionUtils.findMethod(RangeOptions.class, "setTrimFactor", Integer.class); + if (trimFactor != null) { + setTrimFactor = trimFactor; + } else { + setTrimFactor = ReflectionUtils.findMethod(RangeOptions.class, "trimFactor", Integer.class); + } + } + + /** + * Return a compatibility adapter for {@link MongoClientSettings.Builder}. + * + * @param builder + * @return + */ + public static ClientSettingsBuilderAdapter clientSettingsBuilderAdapter(MongoClientSettings.Builder builder) { + return new MongoStreamFactoryFactorySettingsConfigurer(builder)::setStreamFactory; + } + + /** + * Return a compatibility adapter for {@link MongoClientSettings}. + * + * @param clientSettings + * @return + */ + public static ClientSettingsAdapter clientSettingsAdapter(MongoClientSettings clientSettings) { + return new ClientSettingsAdapter() { + @Override + public T getStreamFactoryFactory() { + + if (MongoClientVersion.isVersion5orNewer() || getStreamFactoryFactory == null) { + return null; + } + + return (T) ReflectionUtils.invokeMethod(getStreamFactoryFactory, clientSettings); + } + }; + } + + /** + * Return a compatibility adapter for {@link IndexOptions}. + * + * @param options + * @return + */ + public static IndexOptionsAdapter indexOptionsAdapter(IndexOptions options) { + return bucketSize -> { + + if (MongoClientVersion.isVersion5orNewer() || setBucketSize == null) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("IndexOptions.bucketSize")); + } + + ReflectionUtils.invokeMethod(setBucketSize, options, bucketSize); + }; + } + + /** + * Return a compatibility adapter for {@code MapReduceIterable}. + * + * @param iterable + * @return + */ + @SuppressWarnings("deprecation") + public static MapReduceIterableAdapter mapReduceIterableAdapter(Object iterable) { + return sharded -> { + + if (MongoClientVersion.isVersion5orNewer()) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("sharded")); + } + + // Use MapReduceIterable to avoid package-protected access violations to + // com.mongodb.client.internal.MapReduceIterableImpl + Method shardedMethod = ReflectionUtils.findMethod(MapReduceIterable.class, "sharded", boolean.class); + ReflectionUtils.invokeMethod(shardedMethod, iterable, sharded); + }; + } + + /** + * Return a compatibility adapter for {@link RangeOptions}. + * + * @param options + * @return + */ + public static RangeOptionsAdapter rangeOptionsAdapter(RangeOptions options) { + return trimFactor -> { + + if (!MongoClientVersion.isVersion5orNewer() || setTrimFactor == null) { + throw new UnsupportedOperationException(NOT_SUPPORTED_ON_4.formatted("RangeOptions.trimFactor")); + } + + ReflectionUtils.invokeMethod(setTrimFactor, options, trimFactor); + }; + } + + /** + * Return a compatibility adapter for {@code MapReducePublisher}. + * + * @param publisher + * @return + */ + @SuppressWarnings("deprecation") + public static MapReducePublisherAdapter mapReducePublisherAdapter(Object publisher) { + return sharded -> { + + if (MongoClientVersion.isVersion5orNewer()) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("sharded")); + } + + // Use MapReducePublisher to avoid package-protected access violations to MapReducePublisherImpl + Method shardedMethod = ReflectionUtils.findMethod(MapReducePublisher.class, "sharded", boolean.class); + ReflectionUtils.invokeMethod(shardedMethod, publisher, sharded); + }; + } + + /** + * Return a compatibility adapter for {@link ServerAddress}. + * + * @param serverAddress + * @return + */ + public static ServerAddressAdapter serverAddressAdapter(ServerAddress serverAddress) { + return () -> { + + if (MongoClientVersion.isVersion5orNewer()) { + return null; + } + + Method serverAddressMethod = ReflectionUtils.findMethod(ServerAddress.class, "getSocketAddress"); + Object value = ReflectionUtils.invokeMethod(serverAddressMethod, serverAddress); + return value != null ? InetSocketAddress.class.cast(value) : null; + }; + } + + public static MongoDatabaseAdapterBuilder mongoDatabaseAdapter() { + return MongoDatabaseAdapter::new; + } + + public static ReactiveMongoDatabaseAdapterBuilder reactiveMongoDatabaseAdapter() { + return ReactiveMongoDatabaseAdapter::new; + } + + public interface IndexOptionsAdapter { + void setBucketSize(double bucketSize); + } + + public interface ClientSettingsAdapter { + @Nullable + T getStreamFactoryFactory(); + } + + public interface ClientSettingsBuilderAdapter { + void setStreamFactoryFactory(T streamFactory); + } + + public interface MapReduceIterableAdapter { + void sharded(boolean sharded); + } + + public interface MapReducePublisherAdapter { + void sharded(boolean sharded); + } + + public interface ServerAddressAdapter { + @Nullable + InetSocketAddress getSocketAddress(); + } + + public interface MongoDatabaseAdapterBuilder { + MongoDatabaseAdapter forDb(com.mongodb.client.MongoDatabase db); + } + + public interface RangeOptionsAdapter { + void trimFactor(Integer trimFactor); + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + public static class MongoDatabaseAdapter { + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD; + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD_SESSION; + + private static final Class collectionNamesReturnType; + + private final MongoDatabase db; + + static { + + if (MongoClientVersion.isSyncClientPresent()) { + + LIST_COLLECTION_NAMES_METHOD = ReflectionUtils.findMethod(MongoDatabase.class, "listCollectionNames"); + LIST_COLLECTION_NAMES_METHOD_SESSION = ReflectionUtils.findMethod(MongoDatabase.class, "listCollectionNames", + ClientSession.class); + + if (MongoClientVersion.isVersion5orNewer()) { + try { + collectionNamesReturnType = ClassUtils.forName("com.mongodb.client.ListCollectionNamesIterable", + MongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("Unable to load com.mongodb.client.ListCollectionNamesIterable", e); + } + } else { + try { + collectionNamesReturnType = ClassUtils.forName("com.mongodb.client.MongoIterable", + MongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("Unable to load com.mongodb.client.ListCollectionNamesIterable", e); + } + } + } else { + LIST_COLLECTION_NAMES_METHOD = null; + LIST_COLLECTION_NAMES_METHOD_SESSION = null; + collectionNamesReturnType = Object.class; + } + } + + public MongoDatabaseAdapter(MongoDatabase db) { + this.db = db; + } + + public Class> collectionNameIterableType() { + return (Class>) collectionNamesReturnType; + } + + public MongoIterable listCollectionNames() { + + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, "No method listCollectionNames present for %s".formatted(db)); + return (MongoIterable) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD, db); + } + + public MongoIterable listCollectionNames(ClientSession clientSession) { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, + "No method listCollectionNames(ClientSession) present for %s".formatted(db)); + return (MongoIterable) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD_SESSION, db, + clientSession); + } + } + + public interface ReactiveMongoDatabaseAdapterBuilder { + ReactiveMongoDatabaseAdapter forDb(com.mongodb.reactivestreams.client.MongoDatabase db); + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + public static class ReactiveMongoDatabaseAdapter { + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD; + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD_SESSION; + + private static final Class collectionNamesReturnType; + + private final com.mongodb.reactivestreams.client.MongoDatabase db; + + static { + + if (MongoClientVersion.isReactiveClientPresent()) { + + LIST_COLLECTION_NAMES_METHOD = ReflectionUtils + .findMethod(com.mongodb.reactivestreams.client.MongoDatabase.class, "listCollectionNames"); + LIST_COLLECTION_NAMES_METHOD_SESSION = ReflectionUtils.findMethod( + com.mongodb.reactivestreams.client.MongoDatabase.class, "listCollectionNames", + com.mongodb.reactivestreams.client.ClientSession.class); + + if (MongoClientVersion.isVersion5orNewer()) { + try { + collectionNamesReturnType = ClassUtils.forName( + "com.mongodb.reactivestreams.client.ListCollectionNamesPublisher", + ReactiveMongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("com.mongodb.reactivestreams.client.ListCollectionNamesPublisher", e); + } + } else { + try { + collectionNamesReturnType = ClassUtils.forName("org.reactivestreams.Publisher", + ReactiveMongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("org.reactivestreams.Publisher", e); + } + } + } else { + LIST_COLLECTION_NAMES_METHOD = null; + LIST_COLLECTION_NAMES_METHOD_SESSION = null; + collectionNamesReturnType = Object.class; + } + } + + ReactiveMongoDatabaseAdapter(com.mongodb.reactivestreams.client.MongoDatabase db) { + this.db = db; + } + + public Class> collectionNamePublisherType() { + return (Class>) collectionNamesReturnType; + + } + + public Publisher listCollectionNames() { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, "No method listCollectionNames present for %s".formatted(db)); + return (Publisher) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD, db); + } + + public Publisher listCollectionNames(com.mongodb.reactivestreams.client.ClientSession clientSession) { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, + "No method listCollectionNames(ClientSession) present for %s".formatted(db)); + return (Publisher) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD_SESSION, db, clientSession); + } + } + + static class MongoStreamFactoryFactorySettingsConfigurer { + + private static final Log logger = LogFactory.getLog(MongoStreamFactoryFactorySettingsConfigurer.class); + + private static final String STREAM_FACTORY_NAME = "com.mongodb.connection.StreamFactoryFactory"; + private static final boolean STREAM_FACTORY_PRESENT = ClassUtils.isPresent(STREAM_FACTORY_NAME, + MongoCompatibilityAdapter.class.getClassLoader()); + private final MongoClientSettings.Builder settingsBuilder; + + static boolean isStreamFactoryPresent() { + return STREAM_FACTORY_PRESENT; + } + + public MongoStreamFactoryFactorySettingsConfigurer(Builder settingsBuilder) { + this.settingsBuilder = settingsBuilder; + } + + void setStreamFactory(Object streamFactory) { + + if (MongoClientVersion.isVersion5orNewer() && isStreamFactoryPresent()) { + logger.warn("StreamFactoryFactory is no longer available. Use TransportSettings instead."); + return; + } + + try { + Class streamFactoryType = ClassUtils.forName(STREAM_FACTORY_NAME, streamFactory.getClass().getClassLoader()); + + if (!ClassUtils.isAssignable(streamFactoryType, streamFactory.getClass())) { + throw new IllegalArgumentException("Expected %s but found %s".formatted(streamFactoryType, streamFactory)); + } + + Method setter = ReflectionUtils.findMethod(settingsBuilder.getClass(), "streamFactoryFactory", + streamFactoryType); + if (setter != null) { + ReflectionUtils.invokeMethod(setter, settingsBuilder, streamFactoryType.cast(streamFactory)); + } + } catch (ReflectiveOperationException e) { + throw new IllegalArgumentException("Cannot set StreamFactoryFactory for %s".formatted(settingsBuilder), e); + } + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java index ff846dd0bb..326a5c1e88 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,10 +17,16 @@ import java.util.HashMap; +import org.springframework.lang.Nullable; + +import com.mongodb.MongoException; + /** - * {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}. - * + * {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.yml}. + * * @author Christoph Strobl + * @author Mark Paluch + * @author SangHyuk Lee * @since 1.8 */ public final class MongoDbErrorCodes { @@ -28,14 +34,16 @@ public final class MongoDbErrorCodes { static HashMap dataAccessResourceFailureCodes; static HashMap dataIntegrityViolationCodes; static HashMap duplicateKeyCodes; - static HashMap invalidDataAccessApiUsageExeption; + static HashMap invalidDataAccessApiUsageException; static HashMap permissionDeniedCodes; + static HashMap clientSessionCodes; + static HashMap transactionCodes; static HashMap errorCodes; static { - dataAccessResourceFailureCodes = new HashMap(10); + dataAccessResourceFailureCodes = new HashMap<>(12, 1f); dataAccessResourceFailureCodes.put(6, "HostUnreachable"); dataAccessResourceFailureCodes.put(7, "HostNotFound"); dataAccessResourceFailureCodes.put(89, "NetworkTimeout"); @@ -49,7 +57,7 @@ public final class MongoDbErrorCodes { dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile"); dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt"); - dataIntegrityViolationCodes = new HashMap(6); + dataIntegrityViolationCodes = new HashMap<>(6, 1f); dataIntegrityViolationCodes.put(67, "CannotCreateIndex"); dataIntegrityViolationCodes.put(68, "IndexAlreadyExists"); dataIntegrityViolationCodes.put(85, "IndexOptionsConflict"); @@ -57,84 +65,212 @@ public final class MongoDbErrorCodes { dataIntegrityViolationCodes.put(112, "WriteConflict"); dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress"); - duplicateKeyCodes = new HashMap(3); + duplicateKeyCodes = new HashMap<>(4, 1f); duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey"); duplicateKeyCodes.put(84, "DuplicateKeyValue"); duplicateKeyCodes.put(11000, "DuplicateKey"); duplicateKeyCodes.put(11001, "DuplicateKey"); - invalidDataAccessApiUsageExeption = new HashMap(); - invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle"); - invalidDataAccessApiUsageExeption.put(9, "FailedToParse"); - invalidDataAccessApiUsageExeption.put(14, "TypeMismatch"); - invalidDataAccessApiUsageExeption.put(15, "Overflow"); - invalidDataAccessApiUsageExeption.put(16, "InvalidLength"); - invalidDataAccessApiUsageExeption.put(20, "IllegalOperation"); - invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation"); - invalidDataAccessApiUsageExeption.put(22, "InvalidBSON"); - invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized"); - invalidDataAccessApiUsageExeption.put(29, "NonExistentPath"); - invalidDataAccessApiUsageExeption.put(30, "InvalidPath"); - invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators"); - invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent"); - invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName"); - invalidDataAccessApiUsageExeption.put(52, "InvalidPath"); - invalidDataAccessApiUsageExeption.put(53, "InvalidIdField"); - invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField"); - invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef"); - invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName"); - invalidDataAccessApiUsageExeption.put(57, "DottedFieldName"); - invalidDataAccessApiUsageExeption.put(59, "CommandNotFound"); - invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound"); - invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound"); - invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported"); - invalidDataAccessApiUsageExeption.put(66, "ImmutableField"); - invalidDataAccessApiUsageExeption.put(72, "InvalidOptions"); - invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported"); - invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped"); - invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound"); - invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong"); - invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig"); - - permissionDeniedCodes = new HashMap(); + invalidDataAccessApiUsageException = new HashMap<>(31, 1f); + invalidDataAccessApiUsageException.put(5, "GraphContainsCycle"); + invalidDataAccessApiUsageException.put(9, "FailedToParse"); + invalidDataAccessApiUsageException.put(14, "TypeMismatch"); + invalidDataAccessApiUsageException.put(15, "Overflow"); + invalidDataAccessApiUsageException.put(16, "InvalidLength"); + invalidDataAccessApiUsageException.put(20, "IllegalOperation"); + invalidDataAccessApiUsageException.put(21, "EmptyArrayOperation"); + invalidDataAccessApiUsageException.put(22, "InvalidBSON"); + invalidDataAccessApiUsageException.put(23, "AlreadyInitialized"); + invalidDataAccessApiUsageException.put(29, "NonExistentPath"); + invalidDataAccessApiUsageException.put(30, "InvalidPath"); + invalidDataAccessApiUsageException.put(40, "ConflictingUpdateOperators"); + invalidDataAccessApiUsageException.put(45, "UserDataInconsistent"); + invalidDataAccessApiUsageException.put(52, "DollarPrefixedFieldName"); + invalidDataAccessApiUsageException.put(53, "InvalidIdField"); + invalidDataAccessApiUsageException.put(54, "NotSingleValueField"); + invalidDataAccessApiUsageException.put(55, "InvalidDBRef"); + invalidDataAccessApiUsageException.put(56, "EmptyFieldName"); + invalidDataAccessApiUsageException.put(57, "DottedFieldName"); + invalidDataAccessApiUsageException.put(59, "CommandNotFound"); + invalidDataAccessApiUsageException.put(60, "DatabaseNotFound"); + invalidDataAccessApiUsageException.put(61, "ShardKeyNotFound"); + invalidDataAccessApiUsageException.put(62, "OplogOperationUnsupported"); + invalidDataAccessApiUsageException.put(66, "ImmutableField"); + invalidDataAccessApiUsageException.put(72, "InvalidOptions"); + invalidDataAccessApiUsageException.put(115, "CommandNotSupported"); + invalidDataAccessApiUsageException.put(116, "DocTooLargeForCapped"); + invalidDataAccessApiUsageException.put(10003, "CannotGrowDocumentInCappedNamespace"); + invalidDataAccessApiUsageException.put(130, "SymbolNotFound"); + invalidDataAccessApiUsageException.put(17280, "KeyTooLong"); + invalidDataAccessApiUsageException.put(13334, "ShardKeyTooBig"); + + permissionDeniedCodes = new HashMap<>(8, 1f); permissionDeniedCodes.put(11, "UserNotFound"); permissionDeniedCodes.put(18, "AuthenticationFailed"); permissionDeniedCodes.put(31, "RoleNotFound"); permissionDeniedCodes.put(32, "RolesNotRelated"); - permissionDeniedCodes.put(33, "PrvilegeNotFound"); + permissionDeniedCodes.put(33, "PrivilegeNotFound"); permissionDeniedCodes.put(15847, "CannotAuthenticate"); permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB"); permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB"); - errorCodes = new HashMap(); + clientSessionCodes = new HashMap<>(4, 1f); + clientSessionCodes.put(206, "NoSuchSession"); + clientSessionCodes.put(213, "DuplicateSession"); + clientSessionCodes.put(217, "IncompleteTransactionHistory"); + clientSessionCodes.put(225, "TransactionTooOld"); + clientSessionCodes.put(228, "SessionTransferIncomplete"); + clientSessionCodes.put(244, "TransactionAborted"); + clientSessionCodes.put(251, "NoSuchTransaction"); + clientSessionCodes.put(256, "TransactionCommitted"); + clientSessionCodes.put(257, "TransactionToLarge"); + clientSessionCodes.put(261, "TooManyLogicalSessions"); + clientSessionCodes.put(263, "OperationNotSupportedInTransaction"); + clientSessionCodes.put(264, "TooManyLogicalSessions"); + + errorCodes = new HashMap<>( + dataAccessResourceFailureCodes.size() + dataIntegrityViolationCodes.size() + duplicateKeyCodes.size() + + invalidDataAccessApiUsageException.size() + permissionDeniedCodes.size() + clientSessionCodes.size(), + 1f); errorCodes.putAll(dataAccessResourceFailureCodes); errorCodes.putAll(dataIntegrityViolationCodes); errorCodes.putAll(duplicateKeyCodes); - errorCodes.putAll(invalidDataAccessApiUsageExeption); + errorCodes.putAll(invalidDataAccessApiUsageException); errorCodes.putAll(permissionDeniedCodes); + errorCodes.putAll(clientSessionCodes); + } + + @Nullable + public static String getErrorDescription(@Nullable Integer errorCode) { + return errorCode == null ? null : errorCodes.get(errorCode); } - public static boolean isDataIntegrityViolationCode(Integer errorCode) { - return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode); + public static boolean isDataIntegrityViolationCode(@Nullable Integer errorCode) { + return errorCode != null && dataIntegrityViolationCodes.containsKey(errorCode); } - public static boolean isDataAccessResourceFailureCode(Integer errorCode) { - return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode); + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataIntegrityViolationError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDataIntegrityViolationCode(me.getCode()); + } + return false; } - public static boolean isDuplicateKeyCode(Integer errorCode) { - return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode); + public static boolean isDataAccessResourceFailureCode(@Nullable Integer errorCode) { + return errorCode != null && dataAccessResourceFailureCodes.containsKey(errorCode); } - public static boolean isPermissionDeniedCode(Integer errorCode) { - return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode); + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataAccessResourceError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDataAccessResourceFailureCode(me.getCode()); + } + return false; } - public static boolean isInvalidDataAccessApiUsageCode(Integer errorCode) { - return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode); + public static boolean isDuplicateKeyCode(@Nullable Integer errorCode) { + return errorCode != null && duplicateKeyCodes.containsKey(errorCode); } - public static String getErrorDescription(Integer errorCode) { - return errorCode == null ? null : errorCodes.get(errorCode); + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDuplicateKeyError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDuplicateKeyCode(me.getCode()); + } + return false; + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataDuplicateKeyError(Exception exception) { + return isDuplicateKeyError(exception); + } + + public static boolean isPermissionDeniedCode(@Nullable Integer errorCode) { + return errorCode != null && permissionDeniedCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isPermissionDeniedError(Exception exception) { + + if (exception instanceof MongoException) { + return isPermissionDeniedCode(((MongoException) exception).getCode()); + } + return false; + } + + public static boolean isInvalidDataAccessApiUsageCode(@Nullable Integer errorCode) { + return errorCode != null && invalidDataAccessApiUsageException.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isInvalidDataAccessApiUsageError(Exception exception) { + + if (exception instanceof MongoException me) { + return isInvalidDataAccessApiUsageCode(me.getCode()); + } + return false; + } + + /** + * Check if the given error code matches a know session related error. + * + * @param errorCode the error code to check. + * @return {@literal true} if error matches. + * @since 2.1 + */ + public static boolean isClientSessionFailureCode(@Nullable Integer errorCode) { + return errorCode != null && clientSessionCodes.containsKey(errorCode); + } + + /** + * Check if the given error code matches a know transaction related error. + * + * @param errorCode the error code to check. + * @return {@literal true} if error matches. + * @since 2.1 + */ + public static boolean isTransactionFailureCode(@Nullable Integer errorCode) { + return errorCode != null && transactionCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isClientSessionFailure(Exception exception) { + + if (exception instanceof MongoException me) { + return isClientSessionFailureCode(me.getCode()); + } + return false; } -} \ No newline at end of file +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java new file mode 100644 index 0000000000..23c96f9e46 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.regex.Pattern; + +import org.springframework.lang.Nullable; + +/** + * Utility to translate {@link Pattern#flags() regex flags} to MongoDB regex options and vice versa. + * + * @author Mark Paluch + * @since 3.3 + */ +public abstract class RegexFlags { + + private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; + + static { + FLAG_LOOKUP['g'] = 256; + FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; + FLAG_LOOKUP['m'] = Pattern.MULTILINE; + FLAG_LOOKUP['s'] = Pattern.DOTALL; + FLAG_LOOKUP['c'] = Pattern.CANON_EQ; + FLAG_LOOKUP['x'] = Pattern.COMMENTS; + FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; + FLAG_LOOKUP['t'] = Pattern.LITERAL; + FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; + } + + private RegexFlags() { + + } + + /** + * Lookup the MongoDB specific options from given {@link Pattern#flags() flags}. + * + * @param flags the Regex flags to look up. + * @return the options string. May be empty. + */ + public static String toRegexOptions(int flags) { + + if (flags == 0) { + return ""; + } + + StringBuilder buf = new StringBuilder(); + + for (int i = 'a'; i < 'z'; i++) { + + if (FLAG_LOOKUP[i] == 0) { + continue; + } + + if ((flags & FLAG_LOOKUP[i]) > 0) { + buf.append((char) i); + } + } + + return buf.toString(); + } + + /** + * Lookup the MongoDB specific flags for a given regex option string. + * + * @param s the Regex option/flag to look up. Can be {@literal null}. + * @return zero if given {@link String} is {@literal null} or empty. + * @since 2.2 + */ + public static int toRegexFlags(@Nullable String s) { + + int flags = 0; + + if (s == null) { + return flags; + } + + for (char f : s.toLowerCase().toCharArray()) { + flags |= toRegexFlag(f); + } + + return flags; + } + + /** + * Lookup the MongoDB specific flags for a given character. + * + * @param c the Regex option/flag to look up. + * @return + * @throws IllegalArgumentException for unknown flags + * @since 2.2 + */ + public static int toRegexFlag(char c) { + + int flag = FLAG_LOOKUP[c]; + + if (flag == 0) { + throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); + } + + return flag; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java new file mode 100644 index 0000000000..344244717e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.aggregation; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Field; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class TestAggregationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + + private TestAggregationContext(AggregationOperationContext delegate) { + this.delegate = delegate; + } + + public static AggregationOperationContext contextFor(@Nullable Class type) { + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return contextFor(type, mongoConverter); + } + + public static AggregationOperationContext contextFor(@Nullable Class type, MongoConverter mongoConverter) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + return new TestAggregationContext(new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference()); + } + + @Override + public Document getMappedObject(Document document) { + return delegate.getMappedObject(document); + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return delegate.getMappedObject(document, type); + } + + @Override + public FieldReference getReference(Field field) { + return delegate.getReference(field); + } + + @Override + public FieldReference getReference(String name) { + return delegate.getReference(name); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java new file mode 100644 index 0000000000..9dd3f1d8fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.encryption; + +import java.util.Base64; +import java.util.UUID; +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.types.Binary; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Internal utility class for dealing with encryption related matters. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionUtils { + + /** + * Resolve a given plain {@link String} value into the store native {@literal keyId} format, considering potential + * {@link Expression expressions}.
                    + * The potential keyId is probed against an {@link UUID#fromString(String) UUID value} or decoded from the + * {@literal base64} representation prior to conversion into its {@link Binary} format. + * + * @param value the source value to resolve the keyId for. Must not be {@literal null}. + * @param evaluationContext a {@link Supplier} used to provide the {@link EvaluationContext} in case an + * {@link Expression} is {@link ExpressionUtils#detectExpression(String) detected}. + * @return can be {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + */ + @Nullable + public static Object resolveKeyId(String value, Supplier evaluationContext) { + + Assert.notNull(value, "Value must not be null"); + + Object potentialKeyId = value; + Expression expression = ExpressionUtils.detectExpression(value); + if (expression != null) { + potentialKeyId = expression.getValue(evaluationContext.get()); + if (!(potentialKeyId instanceof String)) { + return potentialKeyId; + } + } + + try { + return new Binary(BsonBinarySubType.UUID_STANDARD, + new BsonBinary(UUID.fromString(potentialKeyId.toString())).getData()); + } catch (IllegalArgumentException e) { + + return new Binary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode(potentialKeyId.toString())); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java new file mode 100644 index 0000000000..b5c26755cf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.time.format.DateTimeFormatter.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; + +/** + * DateTimeFormatter implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
                    + * Formatted and modified. + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class DateTimeFormatter { + + private static final int DATE_STRING_LENGTH = "1970-01-01".length(); + + static long parse(final String dateTimeString) { + // ISO_OFFSET_DATE_TIME will not parse date strings consisting of just year-month-day, so use ISO_LOCAL_DATE for + // those + if (dateTimeString.length() == DATE_STRING_LENGTH) { + return LocalDate.parse(dateTimeString, ISO_LOCAL_DATE).atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli(); + } else { + return ISO_OFFSET_DATE_TIME.parse(dateTimeString, Instant::from).toEpochMilli(); + } + } + + static String format(final long dateTime) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME); + } + + private DateTimeFormatter() { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java new file mode 100644 index 0000000000..6c31a9721f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import java.util.Collections; +import java.util.Map; + +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + * @since 3.3.5 + */ +class EvaluationContextExpressionEvaluator implements ValueExpressionEvaluator { + + final ValueProvider valueProvider; + final ExpressionParser expressionParser; + + EvaluationContextExpressionEvaluator(ValueProvider valueProvider, ExpressionParser expressionParser) { + + this.valueProvider = valueProvider; + this.expressionParser = expressionParser; + } + + @Nullable + @Override + public T evaluate(String expression) { + return evaluateExpression(expression, Collections.emptyMap()); + } + + EvaluationContext getEvaluationContext(String expressionString) { + return new StandardEvaluationContext(); + } + + Expression getParsedExpression(String expressionString) { + return expressionParser.parseExpression(expressionString); + } + + @SuppressWarnings("unchecked") + T evaluateExpression(String expressionString, Map variables) { + + Expression expression = getParsedExpression(expressionString); + EvaluationContext ctx = getEvaluationContext(expressionString); + variables.forEach(ctx::setVariable); + + Object result = expression.getValue(ctx, Object.class); + return (T) result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java new file mode 100644 index 0000000000..4b4b497dae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java @@ -0,0 +1,73 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.bson.json.JsonParseException; + +/** + * JsonBuffer implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
                    + * Formatted and modified. + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class JsonBuffer { + + private final String buffer; + private int position; + private boolean eof; + + JsonBuffer(final String buffer) { + this.buffer = buffer; + } + + public int getPosition() { + return position; + } + + public void setPosition(final int position) { + this.position = position; + } + + public int read() { + if (eof) { + throw new JsonParseException("Trying to read past EOF."); + } else if (position >= buffer.length()) { + eof = true; + return -1; + } else { + return buffer.charAt(position++); + } + } + + public void unread(final int c) { + eof = false; + if (c != -1 && buffer.charAt(position - 1) == c) { + position--; + } + } + + public String substring(final int beginIndex) { + return buffer.substring(beginIndex); + } + + public String substring(final int beginIndex, final int endIndex) { + return buffer.substring(beginIndex, endIndex); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java new file mode 100644 index 0000000000..ca4fbddd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java @@ -0,0 +1,623 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.bson.BsonRegularExpression; +import org.bson.json.JsonParseException; + +/** + * Parses the string representation of a JSON object into a set of {@link JsonToken}-derived objects.
                    + * JsonScanner implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
                    + * Formatted and modified to allow reading Spring Data specific placeholder values. + * + * @author Jeff Yemin + * @author Trisha Gee + * @author Robert Guo + * @author Ross Lawley + * @author Christoph Strobl + * @since 2.2 + */ +class JsonScanner { + + private final JsonBuffer buffer; + + JsonScanner(final String json) { + this(new JsonBuffer(json)); + } + + JsonScanner(final JsonBuffer buffer) { + this.buffer = buffer; + } + + /** + * @param newPosition the new position of the cursor position in the buffer + */ + public void setBufferPosition(final int newPosition) { + buffer.setPosition(newPosition); + } + + /** + * @return the current location of the cursor in the buffer + */ + public int getBufferPosition() { + return buffer.getPosition(); + } + + /** + * Finds and returns the next complete token from this scanner. If scanner reached the end of the source, it will + * return a token with {@code JSONTokenType.END_OF_FILE} type. + * + * @return The next token. + * @throws JsonParseException if source is invalid. + */ + public JsonToken nextToken() { + + int c = buffer.read(); + while (c != -1 && Character.isWhitespace(c)) { + c = buffer.read(); + } + if (c == -1) { + return new JsonToken(JsonTokenType.END_OF_FILE, ""); + } + + switch (c) { + case '{': + return new JsonToken(JsonTokenType.BEGIN_OBJECT, "{"); + case '}': + return new JsonToken(JsonTokenType.END_OBJECT, "}"); + case '[': + return new JsonToken(JsonTokenType.BEGIN_ARRAY, "["); + case ']': + return new JsonToken(JsonTokenType.END_ARRAY, "]"); + case '(': + return new JsonToken(JsonTokenType.LEFT_PAREN, "("); + case ')': + return new JsonToken(JsonTokenType.RIGHT_PAREN, ")"); + case ':': + + c = buffer.read(); + buffer.unread(c); + + if (c == '#') { // for binding the SQL style ':#{#firstname}"' + return scanBindString(); + } + + return new JsonToken(JsonTokenType.COLON, ":"); + case ',': + return new JsonToken(JsonTokenType.COMMA, ","); + case '\'': + case '"': + return scanString((char) c); + case '/': + return scanRegularExpression(); + default: + if (c == '-' || Character.isDigit(c)) { + return scanNumber((char) c); + } else if (c == '$' || c == '_' || Character.isLetter(c)) { + return scanUnquotedString(); + } else if (c == '?') { // for binding parameters. Both simple and SpEL ones. + return scanBindString(); + } else { + int position = buffer.getPosition(); + buffer.unread(c); + throw new JsonParseException("Invalid JSON input; Position: %d; Character: '%c'.", position, c); + } + } + } + + /** + * Reads {@code RegularExpressionToken} from source. The following variants of lexemes are possible: + * + *

                    +	 *  /pattern/
                    +	 *  /\(pattern\)/
                    +	 *  /pattern/ims
                    +	 * 
                    + * + * Options can include 'i','m','x','s' + * + * @return The regular expression token. + * @throws JsonParseException if regular expression representation is not valid. + */ + private JsonToken scanRegularExpression() { + + int start = buffer.getPosition() - 1; + int options = -1; + + RegularExpressionState state = RegularExpressionState.IN_PATTERN; + while (true) { + int c = buffer.read(); + switch (state) { + case IN_PATTERN: + switch (c) { + case -1: + state = RegularExpressionState.INVALID; + break; + case '/': + state = RegularExpressionState.IN_OPTIONS; + options = buffer.getPosition(); + break; + case '\\': + state = RegularExpressionState.IN_ESCAPE_SEQUENCE; + break; + default: + state = RegularExpressionState.IN_PATTERN; + break; + } + break; + case IN_ESCAPE_SEQUENCE: + state = RegularExpressionState.IN_PATTERN; + break; + case IN_OPTIONS: + switch (c) { + case 'i': + case 'm': + case 'x': + case 's': + state = RegularExpressionState.IN_OPTIONS; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = RegularExpressionState.DONE; + break; + default: + if (Character.isWhitespace(c)) { + state = RegularExpressionState.DONE; + } else { + state = RegularExpressionState.INVALID; + } + break; + } + break; + default: + break; + } + + switch (state) { + case DONE: + buffer.unread(c); + int end = buffer.getPosition(); + BsonRegularExpression regex = new BsonRegularExpression(buffer.substring(start + 1, options - 1), + buffer.substring(options, end)); + return new JsonToken(JsonTokenType.REGULAR_EXPRESSION, regex); + case INVALID: + throw new JsonParseException("Invalid JSON regular expression; Position: %d.", buffer.getPosition()); + default: + } + } + } + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + private JsonToken scanBindString() { + + int start = buffer.getPosition() - 1; + int c = buffer.read(); + + int charCount = 0; + boolean isExpression = false; + int parenthesisCount = 0; + + while (c == '$' || c == '_' || Character.isLetterOrDigit(c) || c == '#' || c == '{' || c == '[' + || (isExpression && isExpressionAllowedChar(c))) { + + if (charCount == 0 && (c == '#' || c == '$')) { + isExpression = true; + } else if (isExpression) { + if (c == '{') { + parenthesisCount++; + } else if (c == '}') { + + parenthesisCount--; + if (parenthesisCount == 0) { + c = buffer.read(); + break; + } + } + } + charCount++; + c = buffer.read(); + } + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + + return new JsonToken(JsonTokenType.UNQUOTED_STRING, lexeme); + } + + private static boolean isExpressionAllowedChar(int c) { + + return (c == '+' || // + c == '-' || // + c == ':' || // + c == '.' || // + c == ',' || // + c == '*' || // + c == '/' || // + c == '%' || // + c == '(' || // + c == ')' || // + c == '[' || // + c == ']' || // + c == '#' || // + c == '{' || // + c == '}' || // + c == '@' || // + c == '^' || // + c == '!' || // + c == '=' || // + c == '&' || // + c == '|' || // + c == '?' || // + c == '$' || // + c == '>' || // + c == '<' || // + c == '"' || // + c == '\'' || // + c == ' '); + } + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + private JsonToken scanUnquotedString() { + int start = buffer.getPosition() - 1; + int c = buffer.read(); + while (c == '$' || c == '_' || Character.isLetterOrDigit(c)) { + c = buffer.read(); + } + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + return new JsonToken(JsonTokenType.UNQUOTED_STRING, lexeme); + } + + /** + * Reads number token from source. The following variants of lexemes are possible: + * + *
                    +	 *  12
                    +	 *  123
                    +	 *  -0
                    +	 *  -345
                    +	 *  -0.0
                    +	 *  0e1
                    +	 *  0e-1
                    +	 *  -0e-1
                    +	 *  1e12
                    +	 *  -Infinity
                    +	 * 
                    + * + * @return The number token. + * @throws JsonParseException if number representation is invalid. + */ + // CHECKSTYLE:OFF + private JsonToken scanNumber(final char firstChar) { + + int c = firstChar; + + int start = buffer.getPosition() - 1; + + NumberState state; + + switch (c) { + case '-': + state = NumberState.SAW_LEADING_MINUS; + break; + case '0': + state = NumberState.SAW_LEADING_ZERO; + break; + default: + state = NumberState.SAW_INTEGER_DIGITS; + break; + } + + JsonTokenType type = JsonTokenType.INT64; + + while (true) { + c = buffer.read(); + switch (state) { + case SAW_LEADING_MINUS: + switch (c) { + case '0': + state = NumberState.SAW_LEADING_ZERO; + break; + case 'I': + state = NumberState.SAW_MINUS_I; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_LEADING_ZERO: + switch (c) { + case '.': + state = NumberState.SAW_DECIMAL_POINT; + break; + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_INTEGER_DIGITS: + switch (c) { + case '.': + state = NumberState.SAW_DECIMAL_POINT; + break; + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_DECIMAL_POINT: + type = JsonTokenType.DOUBLE; + if (Character.isDigit(c)) { + state = NumberState.SAW_FRACTION_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + case SAW_FRACTION_DIGITS: + switch (c) { + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_FRACTION_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_EXPONENT_LETTER: + type = JsonTokenType.DOUBLE; + switch (c) { + case '+': + case '-': + state = NumberState.SAW_EXPONENT_SIGN; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_EXPONENT_SIGN: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + case SAW_EXPONENT_DIGITS: + switch (c) { + case ',': + case '}': + case ']': + case ')': + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_MINUS_I: + boolean sawMinusInfinity = true; + char[] nfinity = new char[] { 'n', 'f', 'i', 'n', 'i', 't', 'y' }; + for (int i = 0; i < nfinity.length; i++) { + if (c != nfinity[i]) { + sawMinusInfinity = false; + break; + } + c = buffer.read(); + } + if (sawMinusInfinity) { + type = JsonTokenType.DOUBLE; + switch (c) { + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + } else { + state = NumberState.INVALID; + } + break; + default: + } + + switch (state) { + case INVALID: + throw new JsonParseException("Invalid JSON number"); + case DONE: + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + if (type == JsonTokenType.DOUBLE) { + return new JsonToken(JsonTokenType.DOUBLE, Double.parseDouble(lexeme)); + } else { + long value = Long.parseLong(lexeme); + if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { + return new JsonToken(JsonTokenType.INT64, value); + } else { + return new JsonToken(JsonTokenType.INT32, (int) value); + } + } + default: + } + } + + } + // CHECKSTYLE:ON + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + // CHECKSTYLE:OFF + private JsonToken scanString(final char quoteCharacter) { + + StringBuilder sb = new StringBuilder(); + + while (true) { + int c = buffer.read(); + switch (c) { + case '\\': + c = buffer.read(); + switch (c) { + case '\'': + sb.append('\''); + break; + case '"': + sb.append('"'); + break; + case '\\': + sb.append('\\'); + break; + case '/': + sb.append('/'); + break; + case 'b': + sb.append('\b'); + break; + case 'f': + sb.append('\f'); + break; + case 'n': + sb.append('\n'); + break; + case 'r': + sb.append('\r'); + break; + case 't': + sb.append('\t'); + break; + case 'u': + int u1 = buffer.read(); + int u2 = buffer.read(); + int u3 = buffer.read(); + int u4 = buffer.read(); + if (u4 != -1) { + String hex = new String(new char[] { (char) u1, (char) u2, (char) u3, (char) u4 }); + sb.append((char) Integer.parseInt(hex, 16)); + } + break; + default: + throw new JsonParseException("Invalid escape sequence in JSON string '\\%c'.", c); + } + break; + + default: + if (c == quoteCharacter) { + return new JsonToken(JsonTokenType.STRING, sb.toString()); + } + if (c != -1) { + sb.append((char) c); + } + } + if (c == -1) { + throw new JsonParseException("End of file in JSON string."); + } + } + } + + private enum NumberState { + SAW_LEADING_MINUS, SAW_LEADING_ZERO, SAW_INTEGER_DIGITS, SAW_DECIMAL_POINT, SAW_FRACTION_DIGITS, SAW_EXPONENT_LETTER, SAW_EXPONENT_SIGN, SAW_EXPONENT_DIGITS, SAW_MINUS_I, DONE, INVALID + } + + private enum RegularExpressionState { + IN_PATTERN, IN_ESCAPE_SEQUENCE, IN_OPTIONS, DONE, INVALID + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java new file mode 100644 index 0000000000..293736123e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java @@ -0,0 +1,86 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.lang.String.*; + +import org.bson.BsonDouble; +import org.bson.json.JsonParseException; +import org.bson.types.Decimal128; + +/** + * JsonToken implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
                    + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class JsonToken { + + private final Object value; + private final JsonTokenType type; + + JsonToken(final JsonTokenType type, final Object value) { + + this.value = value; + this.type = type; + } + + Object getValue() { + return value; + } + + T getValue(final Class clazz) { + + try { + if (Long.class == clazz) { + if (value instanceof Integer integerValue) { + return clazz.cast(integerValue.longValue()); + } else if (value instanceof String stringValue) { + return clazz.cast(Long.valueOf(stringValue)); + } + } else if (Integer.class == clazz) { + if (value instanceof String stringValue) { + return clazz.cast(Integer.valueOf(stringValue)); + } + } else if (Double.class == clazz) { + if (value instanceof String stringValue) { + return clazz.cast(Double.valueOf(stringValue)); + } + } else if (Decimal128.class == clazz) { + if (value instanceof Integer integerValue) { + return clazz.cast(new Decimal128(integerValue)); + } else if (value instanceof Long longValue) { + return clazz.cast(new Decimal128(longValue)); + } else if (value instanceof Double doubleValue) { + return clazz.cast(new BsonDouble(doubleValue).decimal128Value()); + } else if (value instanceof String stringValue) { + return clazz.cast(Decimal128.parse(stringValue)); + } + } + + return clazz.cast(value); + } catch (Exception e) { + throw new JsonParseException(format("Exception converting value '%s' to type %s", value, clazz.getName()), e); + } + } + + public JsonTokenType getType() { + return type; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java new file mode 100644 index 0000000000..bbdfbc4ae8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java @@ -0,0 +1,107 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +/** + * JsonTokenType implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
                    + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +enum JsonTokenType { + /** + * An invalid token. + */ + INVALID, + + /** + * A begin array token (a '['). + */ + BEGIN_ARRAY, + + /** + * A begin object token (a '{'). + */ + BEGIN_OBJECT, + + /** + * An end array token (a ']'). + */ + END_ARRAY, + + /** + * A left parenthesis (a '('). + */ + LEFT_PAREN, + + /** + * A right parenthesis (a ')'). + */ + RIGHT_PAREN, + + /** + * An end object token (a '}'). + */ + END_OBJECT, + + /** + * A colon token (a ':'). + */ + COLON, + + /** + * A comma token (a ','). + */ + COMMA, + + /** + * A Double token. + */ + DOUBLE, + + /** + * An Int32 token. + */ + INT32, + + /** + * And Int64 token. + */ + INT64, + + /** + * A regular expression token. + */ + REGULAR_EXPRESSION, + + /** + * A string token. + */ + STRING, + + /** + * An unquoted string token. + */ + UNQUOTED_STRING, + + /** + * An end of file token. + */ + END_OF_FILE +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java new file mode 100644 index 0000000000..b4fd13b3af --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java @@ -0,0 +1,178 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.ParseException; +import org.springframework.expression.ParserContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; + +/** + * Reusable context for binding parameters to a placeholder or a SpEL expression within a JSON structure.
                    + * To be used along with {@link ParameterBindingDocumentCodec#decode(String, ParameterBindingContext)}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public class ParameterBindingContext { + + private final ValueProvider valueProvider; + private final ValueExpressionEvaluator expressionEvaluator; + + /** + * @param valueProvider + * @param expressionParser + * @param evaluationContext + * @deprecated since 4.4.0, use {@link #ParameterBindingContext(ValueProvider, ExpressionParser, Supplier)} instead. + */ + @Deprecated(since = "4.4.0") + public ParameterBindingContext(ValueProvider valueProvider, SpelExpressionParser expressionParser, + EvaluationContext evaluationContext) { + this(valueProvider, expressionParser, () -> evaluationContext); + } + + /** + * @param valueProvider + * @param expressionEvaluator + * @since 3.1 + * @deprecated since 4.4.0, use {@link #ParameterBindingContext(ValueProvider, ValueExpressionEvaluator)} instead. + */ + @Deprecated(since = "4.4.0") + public ParameterBindingContext(ValueProvider valueProvider, SpELExpressionEvaluator expressionEvaluator) { + this(valueProvider, (ValueExpressionEvaluator) expressionEvaluator); + } + + /** + * @param valueProvider + * @param expressionParser + * @param evaluationContext a {@link Supplier} for {@link Lazy} context retrieval. + * @since 2.2.3 + */ + public ParameterBindingContext(ValueProvider valueProvider, ExpressionParser expressionParser, + Supplier evaluationContext) { + this(valueProvider, new EvaluationContextExpressionEvaluator(valueProvider, unwrap(expressionParser)) { + @Override + public EvaluationContext getEvaluationContext(String expressionString) { + return evaluationContext.get(); + } + }); + } + + private static ExpressionParser unwrap(ExpressionParser expressionParser) { + return new ExpressionParser() { + @Override + public Expression parseExpression(String expressionString) throws ParseException { + return expressionParser.parseExpression(unwrap(expressionString)); + } + + @Override + public Expression parseExpression(String expressionString, ParserContext context) throws ParseException { + return expressionParser.parseExpression(unwrap(expressionString), context); + } + }; + } + + private static String unwrap(String expressionString) { + return expressionString.startsWith("#{") && expressionString.endsWith("}") + ? expressionString.substring(2, expressionString.length() - 1).trim() + : expressionString; + } + + /** + * @param valueProvider + * @param expressionEvaluator + * @since 4.4.0 + */ + public ParameterBindingContext(ValueProvider valueProvider, ValueExpressionEvaluator expressionEvaluator) { + this.valueProvider = valueProvider; + this.expressionEvaluator = expressionEvaluator; + } + + /** + * Create a new {@link ParameterBindingContext} that is capable of expression parsing and can provide a + * {@link EvaluationContext} based on {@link ExpressionDependencies}. + * + * @param valueProvider + * @param expressionParser + * @param contextFunction + * @return + * @since 3.1 + */ + public static ParameterBindingContext forExpressions(ValueProvider valueProvider, ExpressionParser expressionParser, + Function contextFunction) { + + return new ParameterBindingContext(valueProvider, + new EvaluationContextExpressionEvaluator(valueProvider, expressionParser) { + + @Override + public EvaluationContext getEvaluationContext(String expressionString) { + + Expression expression = getParsedExpression(expressionString); + ExpressionDependencies dependencies = ExpressionDependencies.discover(expression); + return contextFunction.apply(dependencies); + } + }); + } + + /** + * Create a new {@link ParameterBindingContext} that is capable of expression parsing. + * + * @param valueProvider + * @param expressionEvaluator + * @return + * @since 4.4.0 + */ + public static ParameterBindingContext forExpressions(ValueProvider valueProvider, + ValueExpressionEvaluator expressionEvaluator) { + + return new ParameterBindingContext(valueProvider, expressionEvaluator); + } + + @Nullable + public Object bindableValueForIndex(int index) { + return valueProvider.getBindableValue(index); + } + + @Nullable + public Object evaluateExpression(String expressionString) { + return expressionEvaluator.evaluate(expressionString); + } + + @Nullable + public Object evaluateExpression(String expressionString, Map variables) { + + if (expressionEvaluator instanceof EvaluationContextExpressionEvaluator expressionEvaluator) { + return expressionEvaluator.evaluateExpression(expressionString, variables); + } + return expressionEvaluator.evaluate(expressionString); + } + + public ValueProvider getValueProvider() { + return valueProvider; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java new file mode 100644 index 0000000000..ffa226ab69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java @@ -0,0 +1,412 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.util.Arrays.*; +import static org.bson.assertions.Assertions.*; +import static org.bson.codecs.configuration.CodecRegistries.*; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; + +import org.bson.AbstractBsonReader.State; +import org.bson.BsonBinarySubType; +import org.bson.BsonDocument; +import org.bson.BsonDocumentWriter; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonValue; +import org.bson.BsonWriter; +import org.bson.Document; +import org.bson.Transformer; +import org.bson.codecs.*; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.json.JsonParseException; + +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link Codec} implementation that allows binding parameters to placeholders or SpEL expressions when decoding a + * JSON String.
                    + * Modified version of MongoDB + * Inc. DocumentCodec licensed under the Apache License, Version 2.0.
                    + * + * @author Jeff Yemin + * @author Ross Lawley + * @author Ralph Schaer + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +public class ParameterBindingDocumentCodec implements CollectibleCodec { + + private static final String ID_FIELD_NAME = FieldName.ID.name(); + private static final CodecRegistry DEFAULT_REGISTRY = fromProviders( + asList(new ValueCodecProvider(), new BsonValueCodecProvider(), new DocumentCodecProvider())); + private static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); + + private final BsonTypeCodecMap bsonTypeCodecMap; + private final CodecRegistry registry; + private final IdGenerator idGenerator; + private final Transformer valueTransformer; + + /** + * Construct a new instance with a default {@code CodecRegistry}. + */ + public ParameterBindingDocumentCodec() { + this(DEFAULT_REGISTRY); + } + + /** + * Construct a new instance with the given registry. + * + * @param registry the registry + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry) { + this(registry, DEFAULT_BSON_TYPE_CLASS_MAP); + } + + /** + * Construct a new instance with the given registry and BSON type class map. + * + * @param registry the registry + * @param bsonTypeClassMap the BSON type class map + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap) { + this(registry, bsonTypeClassMap, null); + } + + /** + * Construct a new instance with the given registry and BSON type class map. The transformer is applied as a last step + * when decoding values, which allows users of this codec to control the decoding process. For example, a user of this + * class could substitute a value decoded as a Document with an instance of a special purpose class (e.g., one + * representing a DBRef in MongoDB). + * + * @param registry the registry + * @param bsonTypeClassMap the BSON type class map + * @param valueTransformer the value transformer to use as a final step when decoding the value of any field in the + * document + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, + final Transformer valueTransformer) { + this.registry = notNull("registry", registry); + this.bsonTypeCodecMap = new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry); + this.idGenerator = new ObjectIdGenerator(); + this.valueTransformer = valueTransformer != null ? valueTransformer : new Transformer() { + @Override + public Object transform(final Object value) { + return value; + } + }; + } + + @Override + public boolean documentHasId(final Document document) { + return document.containsKey(ID_FIELD_NAME); + } + + @Override + public BsonValue getDocumentId(final Document document) { + if (!documentHasId(document)) { + throw new IllegalStateException("The document does not contain an _id"); + } + + Object id = document.get(ID_FIELD_NAME); + if (id instanceof BsonValue bsonValue) { + return bsonValue; + } + + BsonDocument idHoldingDocument = new BsonDocument(); + BsonWriter writer = new BsonDocumentWriter(idHoldingDocument); + writer.writeStartDocument(); + writer.writeName(ID_FIELD_NAME); + writeValue(writer, EncoderContext.builder().build(), id); + writer.writeEndDocument(); + return idHoldingDocument.get(ID_FIELD_NAME); + } + + @Override + public Document generateIdIfAbsentFromDocument(final Document document) { + if (!documentHasId(document)) { + document.put(ID_FIELD_NAME, idGenerator.generate()); + } + return document; + } + + @Override + public void encode(final BsonWriter writer, final Document document, final EncoderContext encoderContext) { + writeMap(writer, document, encoderContext); + } + + // Spring Data Customization START + public Document decode(@Nullable String json, Object[] values) { + + return decode(json, new ParameterBindingContext((index) -> values[index], new SpelExpressionParser(), + EvaluationContextProvider.DEFAULT.getEvaluationContext(values))); + } + + public Document decode(@Nullable String json, ParameterBindingContext bindingContext) { + + if (!StringUtils.hasText(json)) { + return new Document(); + } + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, bindingContext); + return this.decode(reader, DecoderContext.builder().build()); + } + + /** + * Determine {@link ExpressionDependencies} from Expressions that are nested in the {@code json} content. Returns + * {@link Optional#empty()} if {@code json} is empty or of it does not contain any SpEL expressions. + * + * @param json + * @param expressionParser + * @return merged {@link ExpressionDependencies} object if expressions were found, otherwise + * {@link ExpressionDependencies#none()}. + * @since 3.1 + */ + public ExpressionDependencies captureExpressionDependencies(@Nullable String json, ValueProvider valueProvider, + ValueExpressionParser expressionParser) { + + if (!StringUtils.hasText(json)) { + return ExpressionDependencies.none(); + } + + DependencyCapturingExpressionEvaluator expressionEvaluator = new DependencyCapturingExpressionEvaluator( + expressionParser); + this.decode(new ParameterBindingJsonReader(json, new ParameterBindingContext(valueProvider, expressionEvaluator)), + DecoderContext.builder().build()); + + return expressionEvaluator.getCapturedDependencies(); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Override + public Document decode(final BsonReader reader, final DecoderContext decoderContext) { + + if (reader instanceof ParameterBindingJsonReader bindingReader) { + + // check if the reader has actually found something to replace on top level and did so. + // binds just placeholder queries like: `@Query(?0)` + if (bindingReader.currentValue instanceof org.bson.Document document) { + return document; + } else if (bindingReader.currentValue instanceof String stringValue) { + try { + return decode(stringValue, new Object[0]); + } catch (JsonParseException jsonParseException) { + throw new IllegalArgumentException("Expression result is not a valid json document", jsonParseException); + } + } else if (bindingReader.currentValue instanceof Map) { + return new Document((Map) bindingReader.currentValue); + } + } + + Document document = new Document(); + + try { + + reader.readStartDocument(); + + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + String fieldName = reader.readName(); + Object value = readValue(reader, decoderContext); + document.put(fieldName, value); + } + + reader.readEndDocument(); + + } catch (JsonParseException | BsonInvalidOperationException e) { + try { + + Object value = readValue(reader, decoderContext); + if (value instanceof Map map) { + if (!map.isEmpty()) { + return new Document((Map) value); + } + } + } catch (Exception ex) { + e.addSuppressed(ex); + throw e; + } + } + + return document; + } + + // Spring Data Customization END + + @Override + public Class getEncoderClass() { + return Document.class; + } + + private void beforeFields(final BsonWriter bsonWriter, final EncoderContext encoderContext, + final Map document) { + if (encoderContext.isEncodingCollectibleDocument() && document.containsKey(ID_FIELD_NAME)) { + bsonWriter.writeName(ID_FIELD_NAME); + writeValue(bsonWriter, encoderContext, document.get(ID_FIELD_NAME)); + } + } + + private boolean skipField(final EncoderContext encoderContext, final String key) { + return encoderContext.isEncodingCollectibleDocument() && key.equals(ID_FIELD_NAME); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final Object value) { + if (value == null) { + writer.writeNull(); + } else if (value instanceof Iterable) { + writeIterable(writer, (Iterable) value, encoderContext.getChildContext()); + } else if (value instanceof Map) { + writeMap(writer, (Map) value, encoderContext.getChildContext()); + } else { + Codec codec = registry.get(value.getClass()); + encoderContext.encodeWithChildContext(codec, writer, value); + } + } + + private void writeMap(final BsonWriter writer, final Map map, final EncoderContext encoderContext) { + writer.writeStartDocument(); + + beforeFields(writer, encoderContext, map); + + for (final Map.Entry entry : map.entrySet()) { + if (skipField(encoderContext, entry.getKey())) { + continue; + } + writer.writeName(entry.getKey()); + writeValue(writer, encoderContext, entry.getValue()); + } + writer.writeEndDocument(); + } + + private void writeIterable(final BsonWriter writer, final Iterable list, + final EncoderContext encoderContext) { + writer.writeStartArray(); + for (final Object value : list) { + writeValue(writer, encoderContext, value); + } + writer.writeEndArray(); + } + + private Object readValue(final BsonReader reader, final DecoderContext decoderContext) { + + // Spring Data Customization START + if (reader instanceof ParameterBindingJsonReader bindingReader) { + + // check if the reader has actually found something to replace and did so. + // resets the reader state to move on after the actual value + // returns the replacement value + if (bindingReader.currentValue != null) { + + Object value = bindingReader.currentValue; + + if (ObjectUtils.nullSafeEquals(BsonType.DATE_TIME, bindingReader.getCurrentBsonType()) + && !(value instanceof Date)) { + + if (value instanceof Number numberValue) { + value = new Date(NumberUtils.convertNumberToTargetClass(numberValue, Long.class)); + } else if (value instanceof String stringValue) { + value = new Date(DateTimeFormatter.parse(stringValue)); + } + } + + bindingReader.setState(State.TYPE); + bindingReader.currentValue = null; + return value; + } + } + + // Spring Data Customization END + + BsonType bsonType = reader.getCurrentBsonType(); + if (bsonType == BsonType.NULL) { + reader.readNull(); + return null; + } else if (bsonType == BsonType.ARRAY) { + return readList(reader, decoderContext); + } else if (bsonType == BsonType.BINARY && BsonBinarySubType.isUuid(reader.peekBinarySubType()) + && reader.peekBinarySize() == 16) { + return registry.get(UUID.class).decode(reader, decoderContext); + } + + // Spring Data Customization START + // By default the registry uses DocumentCodec for parsing. + // We need to reroute that to our very own implementation or we'll end up only mapping half the placeholders. + Codec codecToUse = bsonTypeCodecMap.get(bsonType); + if (codecToUse instanceof org.bson.codecs.DocumentCodec) { + codecToUse = this; + } + + return valueTransformer.transform(codecToUse.decode(reader, decoderContext)); + // Spring Data Customization END + } + + private List readList(final BsonReader reader, final DecoderContext decoderContext) { + reader.readStartArray(); + List list = new ArrayList<>(); + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + list.add(readValue(reader, decoderContext)); + } + reader.readEndArray(); + return list; + } + + /** + * @author Christoph Strobl + * @since 3.1 + */ + static class DependencyCapturingExpressionEvaluator implements ValueExpressionEvaluator { + + private static final Object PLACEHOLDER = new Object(); + + private final ValueExpressionParser expressionParser; + private final List dependencies = new ArrayList<>(); + + DependencyCapturingExpressionEvaluator(ValueExpressionParser expressionParser) { + this.expressionParser = expressionParser; + } + + @Nullable + @Override + public T evaluate(String expression) { + + dependencies.add(expressionParser.parse(expression).getExpressionDependencies()); + return (T) PLACEHOLDER; + } + + ExpressionDependencies getCapturedDependencies() { + return ExpressionDependencies.merged(dependencies); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java new file mode 100644 index 0000000000..8dd42e2427 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java @@ -0,0 +1,1722 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.lang.String.*; + +import java.text.DateFormat; +import java.text.ParsePosition; +import java.text.SimpleDateFormat; +import java.time.format.DateTimeParseException; +import java.util.Base64; +import java.util.Calendar; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.*; +import org.bson.json.JsonParseException; +import org.bson.types.Decimal128; +import org.bson.types.MaxKey; +import org.bson.types.MinKey; +import org.bson.types.ObjectId; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; + +/** + * Reads a JSON and evaluates placehoders and SpEL expressions. Modified version of MongoDB Inc. + * JsonReader licensed under the Apache License, Version 2.0.
                    + * + * @author Jeff Yemin + * @author Ross Lawley + * @author Trisha Gee + * @author Robert Guo + * @author Florian Buecklers + * @author Brendon Puntin + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +public class ParameterBindingJsonReader extends AbstractBsonReader { + + private static final Pattern ENTIRE_QUERY_BINDING_PATTERN = Pattern.compile("^\\?(\\d+)$|^[\\?:][#$]\\{.*\\}$"); + private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); + private static final Pattern EXPRESSION_BINDING_PATTERN = Pattern.compile("[\\?:][#$]\\{.*\\}"); + private static final Pattern SPEL_PARAMETER_BINDING_PATTERN = Pattern.compile("('\\?(\\d+)'|\\?(\\d+))"); + + private final ParameterBindingContext bindingContext; + + private final JsonScanner scanner; + private JsonToken pushedToken; + Object currentValue; + + /** + * Constructs a new instance with the given JSON string. + * + * @param json A string representation of a JSON. + */ + public ParameterBindingJsonReader(final String json) { + this(json, new Object[] {}); + } + + // Spring Data Customization START + + /** + * Constructs a new instance with the given JSON string. + * + * @param json A string representation of a JSON. + */ + public ParameterBindingJsonReader(String json, Object[] values) { + + this(json, (index) -> values[index], new SpelExpressionParser(), + EvaluationContextProvider.DEFAULT.getEvaluationContext(values)); + } + + public ParameterBindingJsonReader(String json, ValueProvider accessor, SpelExpressionParser spelExpressionParser, + EvaluationContext evaluationContext) { + + this(json, accessor, spelExpressionParser, () -> evaluationContext); + } + + /** + * @since 2.2.3 + */ + public ParameterBindingJsonReader(String json, ValueProvider accessor, SpelExpressionParser spelExpressionParser, + Supplier evaluationContext) { + + this(json, new ParameterBindingContext(accessor, spelExpressionParser, evaluationContext)); + + } + + public ParameterBindingJsonReader(String json, ParameterBindingContext bindingContext) { + + this.scanner = new JsonScanner(json); + setContext(new Context(null, BsonContextType.TOP_LEVEL)); + + this.bindingContext = bindingContext; + + Matcher matcher = ENTIRE_QUERY_BINDING_PATTERN.matcher(json); + if (matcher.find()) { + BindableValue bindingResult = bindableValueFor(new JsonToken(JsonTokenType.UNQUOTED_STRING, json)); + currentValue = bindingResult.getValue(); + } + } + + // Spring Data Customization END + + @Override + protected BsonBinary doReadBinaryData() { + return (BsonBinary) currentValue; + } + + @Override + protected byte doPeekBinarySubType() { + return doReadBinaryData().getType(); + } + + @Override + protected int doPeekBinarySize() { + return doReadBinaryData().getData().length; + } + + @Override + protected boolean doReadBoolean() { + return (Boolean) currentValue; + } + + // CHECKSTYLE:OFF + @Override + public BsonType readBsonType() { + + if (isClosed()) { + throw new IllegalStateException("This instance has been closed"); + } + if (getState() == State.INITIAL || getState() == State.DONE || getState() == State.SCOPE_DOCUMENT) { + // in JSON the top level value can be of any type so fall through + setState(State.TYPE); + } + if (getState() != State.TYPE) { + throwInvalidState("readBSONType", State.TYPE); + } + + if (getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken nameToken = popToken(); + switch (nameToken.getType()) { + case STRING: + case UNQUOTED_STRING: + + // Spring Data Customization START + + setCurrentName(bindableValueFor(nameToken).getValue().toString()); + + // Spring Data Customization END + break; + case END_OBJECT: + setState(State.END_OF_DOCUMENT); + return BsonType.END_OF_DOCUMENT; + default: + throw new JsonParseException("JSON reader was expecting a name but found '%s'.", nameToken.getValue()); + } + + JsonToken colonToken = popToken(); + if (colonToken.getType() != JsonTokenType.COLON) { + throw new JsonParseException("JSON reader was expecting ':' but found '%s'.", colonToken.getValue()); + } + } + + JsonToken token = popToken(); + if (getContext().getContextType() == BsonContextType.ARRAY && token.getType() == JsonTokenType.END_ARRAY) { + setState(State.END_OF_ARRAY); + return BsonType.END_OF_DOCUMENT; + } + + // Spring Data Customization START + + boolean noValueFound = false; + BindableValue bindableValue = null; + + switch (token.getType()) { + case BEGIN_ARRAY: + setCurrentBsonType(BsonType.ARRAY); + break; + case BEGIN_OBJECT: + visitExtendedJSON(); + break; + case DOUBLE: + setCurrentBsonType(BsonType.DOUBLE); + currentValue = token.getValue(); + break; + case END_OF_FILE: + setCurrentBsonType(BsonType.END_OF_DOCUMENT); + break; + case INT32: + setCurrentBsonType(BsonType.INT32); + currentValue = token.getValue(); + break; + case INT64: + setCurrentBsonType(BsonType.INT64); + currentValue = token.getValue(); + break; + case REGULAR_EXPRESSION: + + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + currentValue = bindableValueFor(token).getValue(); + break; + case STRING: + + setCurrentBsonType(BsonType.STRING); + currentValue = bindableValueFor(token).getValue().toString(); + break; + case UNQUOTED_STRING: + + String value = token.getValue(String.class); + + if ("false".equals(value) || "true".equals(value)) { + setCurrentBsonType(BsonType.BOOLEAN); + currentValue = Boolean.parseBoolean(value); + } else if ("Infinity".equals(value)) { + setCurrentBsonType(BsonType.DOUBLE); + currentValue = Double.POSITIVE_INFINITY; + } else if ("NaN".equals(value)) { + setCurrentBsonType(BsonType.DOUBLE); + currentValue = Double.NaN; + } else if ("null".equals(value)) { + setCurrentBsonType(BsonType.NULL); + } else if ("undefined".equals(value)) { + setCurrentBsonType(BsonType.UNDEFINED); + } else if ("MinKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MIN_KEY); + currentValue = new MinKey(); + } else if ("MaxKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MAX_KEY); + currentValue = new MaxKey(); + } else if ("BinData".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitBinDataConstructor(); + } else if ("Date".equals(value)) { + currentValue = visitDateTimeConstructorWithOutNew(); + setCurrentBsonType(BsonType.STRING); + } else if ("HexData".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitHexDataConstructor(); + } else if ("ISODate".equals(value)) { + setCurrentBsonType(BsonType.DATE_TIME); + currentValue = visitISODateTimeConstructor(); + } else if ("NumberInt".equals(value)) { + setCurrentBsonType(BsonType.INT32); + currentValue = visitNumberIntConstructor(); + } else if ("NumberLong".equals(value)) { + setCurrentBsonType(BsonType.INT64); + currentValue = visitNumberLongConstructor(); + } else if ("NumberDecimal".equals(value)) { + setCurrentBsonType(BsonType.DECIMAL128); + currentValue = visitNumberDecimalConstructor(); + } else if ("ObjectId".equals(value)) { + setCurrentBsonType(BsonType.OBJECT_ID); + currentValue = visitObjectIdConstructor(); + } else if ("Timestamp".equals(value)) { + setCurrentBsonType(BsonType.TIMESTAMP); + currentValue = visitTimestampConstructor(); + } else if ("RegExp".equals(value)) { + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + currentValue = visitRegularExpressionConstructor(); + } else if ("DBPointer".equals(value)) { + setCurrentBsonType(BsonType.DB_POINTER); + currentValue = visitDBPointerConstructor(); + } else if ("UUID".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitUUIDConstructor(); + } else if ("new".equals(value)) { + visitNew(); + } else { + + bindableValue = bindableValueFor(token); + if (bindableValue != null) { + + if (bindableValue.getIndex() != -1) { + setCurrentBsonType(bindableValue.getType()); + } else { + setCurrentBsonType(BsonType.STRING); + } + + currentValue = bindableValue.getValue(); + } else { + noValueFound = true; + } + } + break; + default: + noValueFound = true; + break; + } + + // Spring Data Customization END + + if (noValueFound) { + throw new JsonParseException("JSON reader was expecting a value but found '%s'.", token.getValue()); + } + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + + switch (getContext().getContextType()) { + case DOCUMENT: + case SCOPE_DOCUMENT: + default: + setState(State.NAME); + break; + case ARRAY: + case JAVASCRIPT_WITH_SCOPE: + case TOP_LEVEL: + setState(State.VALUE); + break; + } + return getCurrentBsonType(); + } + + // Spring Data Customization START + + @Override + public void setState(State newState) { + super.setState(newState); + } + + private BindableValue bindableValueFor(JsonToken token) { + + if (!JsonTokenType.STRING.equals(token.getType()) && !JsonTokenType.UNQUOTED_STRING.equals(token.getType()) + && !JsonTokenType.REGULAR_EXPRESSION.equals(token.getType())) { + return null; + } + + boolean isRegularExpression = token.getType().equals(JsonTokenType.REGULAR_EXPRESSION); + + BindableValue bindableValue = new BindableValue(); + String tokenValue = isRegularExpression ? token.getValue(BsonRegularExpression.class).getPattern() + : String.class.cast(token.getValue()); + Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(tokenValue); + + if (token.getType().equals(JsonTokenType.UNQUOTED_STRING)) { + + Matcher regexMatcher = EXPRESSION_BINDING_PATTERN.matcher(tokenValue); + if (regexMatcher.find()) { + + String binding = regexMatcher.group(); + String expression = binding.substring(3, binding.length() - 1); + String expressionString = binding.substring(1); + + Matcher inSpelMatcher = SPEL_PARAMETER_BINDING_PATTERN.matcher(expression); // ?0 '?0' + Map innerSpelVariables = new HashMap<>(); + + while (inSpelMatcher.find()) { + + String group = inSpelMatcher.group(); + int index = computeParameterIndex(group); + Object value = getBindableValueForIndex(index); + String varName = "__QVar" + innerSpelVariables.size(); + expression = expression.replace(group, "#" + varName); + expressionString = expressionString.replace(group, "#" + varName); + if(group.startsWith("'")) { // retain the string semantic + innerSpelVariables.put(varName, nullSafeToString(value)); + } else { + innerSpelVariables.put(varName, value); + } + } + + Object value = evaluateExpression(expressionString, innerSpelVariables); + bindableValue.setValue(value); + bindableValue.setType(bsonTypeForValue(value)); + return bindableValue; + } + + if (matcher.find()) { + + int index = computeParameterIndex(matcher.group()); + Object bindableValueForIndex = getBindableValueForIndex(index); + bindableValue.setValue(bindableValueForIndex); + bindableValue.setType(bsonTypeForValue(bindableValueForIndex)); + return bindableValue; + } + + bindableValue.setValue(tokenValue); + bindableValue.setType(BsonType.STRING); + return bindableValue; + + } + + String computedValue = tokenValue; + + Matcher regexMatcher = EXPRESSION_BINDING_PATTERN.matcher(computedValue); + + while (regexMatcher.find()) { + + String binding = regexMatcher.group(); + String expression = binding.substring(3, binding.length() - 1); + String expressionString = binding.substring(1); + + Matcher inSpelMatcher = SPEL_PARAMETER_BINDING_PATTERN.matcher(expression); + Map innerSpelVariables = new HashMap<>(); + + while (inSpelMatcher.find()) { + + String group = inSpelMatcher.group(); + int index = computeParameterIndex(group); + Object value = getBindableValueForIndex(index); + String varName = "__QVar" + innerSpelVariables.size(); + expression = expression.replace(group, "#" + varName); + expressionString = expressionString.replace(group, "#" + varName); + if(group.startsWith("'")) { // retain the string semantic + innerSpelVariables.put(varName, nullSafeToString(value)); + } else { + innerSpelVariables.put(varName, value); + } + } + + computedValue = computedValue.replace(binding, + nullSafeToString(evaluateExpression(expressionString, innerSpelVariables))); + + bindableValue.setValue(computedValue); + bindableValue.setType(BsonType.STRING); + + return bindableValue; + } + + while (matcher.find()) { + + String group = matcher.group(); + int index = computeParameterIndex(group); + computedValue = computedValue.replace(group, nullSafeToString(getBindableValueForIndex(index))); + } + + if (isRegularExpression) { + + BsonRegularExpression originalExpression = token.getValue(BsonRegularExpression.class); + + bindableValue.setValue(new BsonRegularExpression(computedValue, originalExpression.getOptions())); + bindableValue.setType(BsonType.REGULAR_EXPRESSION); + } else { + + bindableValue.setValue(computedValue); + bindableValue.setType(BsonType.STRING); + } + return bindableValue; + } + + private static String nullSafeToString(@Nullable Object value) { + + if (value instanceof Date date) { + return DateTimeFormatter.format(date.getTime()); + } + + return ObjectUtils.nullSafeToString(value); + } + + private static int computeParameterIndex(String parameter) { + return NumberUtils.parseNumber(parameter.replace("?", "").replace("'", ""), Integer.class); + } + + private Object getBindableValueForIndex(int index) { + return bindingContext.bindableValueForIndex(index); + } + + private BsonType bsonTypeForValue(Object value) { + + if (value == null) { + return BsonType.NULL; + } + + Class type = value.getClass(); + + if (ClassUtils.isAssignable(String.class, type)) { + + if (((String) value).startsWith("{")) { + return BsonType.DOCUMENT; + } + return BsonType.STRING; + } + if (ClassUtils.isAssignable(Boolean.class, type)) { + return BsonType.BOOLEAN; + } + if (ClassUtils.isAssignable(Document.class, type)) { + return BsonType.DOCUMENT; + } + if (ClassUtils.isAssignable(Double.class, type)) { + return BsonType.DOUBLE; + } + if (ClassUtils.isAssignable(Long.class, type)) { + return BsonType.INT64; + } + if (ClassUtils.isAssignable(Integer.class, type)) { + return BsonType.INT32; + } + if (ClassUtils.isAssignable(Pattern.class, type)) { + return BsonType.REGULAR_EXPRESSION; + } + if (ClassUtils.isAssignable(Iterable.class, type)) { + return BsonType.ARRAY; + } + if (ClassUtils.isAssignable(Map.class, type)) { + return BsonType.DOCUMENT; + } + + return BsonType.UNDEFINED; + } + + @Nullable + private Object evaluateExpression(String expressionString) { + return bindingContext.evaluateExpression(expressionString, Collections.emptyMap()); + } + + @Nullable + private Object evaluateExpression(String expressionString, Map variables) { + return bindingContext.evaluateExpression(expressionString, variables); + } + + // Spring Data Customization END + // CHECKSTYLE:ON + + @Override + public Decimal128 doReadDecimal128() { + return (Decimal128) currentValue; + } + + @Override + protected long doReadDateTime() { + return (Long) currentValue; + } + + @Override + protected double doReadDouble() { + return (Double) currentValue; + } + + @Override + protected void doReadEndArray() { + setContext(getContext().getParentContext()); + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + } + + @Override + protected void doReadEndDocument() { + setContext(getContext().getParentContext()); + if (getContext() != null && getContext().getContextType() == BsonContextType.SCOPE_DOCUMENT) { + setContext(getContext().getParentContext()); // JavaScriptWithScope + verifyToken(JsonTokenType.END_OBJECT); // outermost closing bracket for JavaScriptWithScope + } + + if (getContext() == null) { + throw new JsonParseException("Unexpected end of document."); + } + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + } + + @Override + protected int doReadInt32() { + return (Integer) currentValue; + } + + @Override + protected long doReadInt64() { + return (Long) currentValue; + } + + @Override + protected String doReadJavaScript() { + return (String) currentValue; + } + + @Override + protected String doReadJavaScriptWithScope() { + return (String) currentValue; + } + + @Override + protected void doReadMaxKey() {} + + @Override + protected void doReadMinKey() {} + + @Override + protected void doReadNull() {} + + @Override + protected ObjectId doReadObjectId() { + return (ObjectId) currentValue; + } + + @Override + protected BsonRegularExpression doReadRegularExpression() { + return (BsonRegularExpression) currentValue; + } + + @Override + protected BsonDbPointer doReadDBPointer() { + return (BsonDbPointer) currentValue; + } + + @Override + protected void doReadStartArray() { + setContext(new Context(getContext(), BsonContextType.ARRAY)); + } + + @Override + protected void doReadStartDocument() { + setContext(new Context(getContext(), BsonContextType.DOCUMENT)); + } + + @Override + protected String doReadString() { + return (String) currentValue; + } + + @Override + protected String doReadSymbol() { + return (String) currentValue; + } + + @Override + protected BsonTimestamp doReadTimestamp() { + return (BsonTimestamp) currentValue; + } + + @Override + protected void doReadUndefined() {} + + @Override + protected void doSkipName() {} + + @Override + protected void doSkipValue() { + switch (getCurrentBsonType()) { + case ARRAY: + readStartArray(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipValue(); + } + readEndArray(); + break; + case BINARY: + readBinaryData(); + break; + case BOOLEAN: + readBoolean(); + break; + case DATE_TIME: + readDateTime(); + break; + case DOCUMENT: + readStartDocument(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipName(); + skipValue(); + } + readEndDocument(); + break; + case DOUBLE: + readDouble(); + break; + case INT32: + readInt32(); + break; + case INT64: + readInt64(); + break; + case DECIMAL128: + readDecimal128(); + break; + case JAVASCRIPT: + readJavaScript(); + break; + case JAVASCRIPT_WITH_SCOPE: + readJavaScriptWithScope(); + readStartDocument(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipName(); + skipValue(); + } + readEndDocument(); + break; + case MAX_KEY: + readMaxKey(); + break; + case MIN_KEY: + readMinKey(); + break; + case NULL: + readNull(); + break; + case OBJECT_ID: + readObjectId(); + break; + case REGULAR_EXPRESSION: + readRegularExpression(); + break; + case STRING: + readString(); + break; + case SYMBOL: + readSymbol(); + break; + case TIMESTAMP: + readTimestamp(); + break; + case UNDEFINED: + readUndefined(); + break; + default: + } + } + + private JsonToken popToken() { + if (pushedToken != null) { + JsonToken token = pushedToken; + pushedToken = null; + return token; + } else { + return scanner.nextToken(); + } + } + + private void pushToken(final JsonToken token) { + if (pushedToken == null) { + pushedToken = token; + } else { + throw new BsonInvalidOperationException("There is already a pending token."); + } + } + + private void verifyToken(final JsonTokenType expectedType) { + JsonToken token = popToken(); + if (expectedType != token.getType()) { + throw new JsonParseException("JSON reader expected token type '%s' but found '%s'.", expectedType, + token.getValue()); + } + } + + private void verifyToken(final JsonTokenType expectedType, final Object expectedValue) { + JsonToken token = popToken(); + if (expectedType != token.getType()) { + throw new JsonParseException("JSON reader expected token type '%s' but found '%s'.", expectedType, + token.getValue()); + } + if (!expectedValue.equals(token.getValue())) { + throw new JsonParseException("JSON reader expected '%s' but found '%s'.", expectedValue, token.getValue()); + } + } + + private void verifyString(final String expected) { + if (expected == null) { + throw new IllegalArgumentException("Can't be null"); + } + + JsonToken token = popToken(); + JsonTokenType type = token.getType(); + + if ((type != JsonTokenType.STRING && type != JsonTokenType.UNQUOTED_STRING) || !expected.equals(token.getValue())) { + throw new JsonParseException("JSON reader expected '%s' but found '%s'.", expected, token.getValue()); + } + } + + private void visitNew() { + JsonToken typeToken = popToken(); + if (typeToken.getType() != JsonTokenType.UNQUOTED_STRING) { + throw new JsonParseException("JSON reader expected a type name but found '%s'.", typeToken.getValue()); + } + + String value = typeToken.getValue(String.class); + + if ("MinKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MIN_KEY); + currentValue = new MinKey(); + } else if ("MaxKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MAX_KEY); + currentValue = new MaxKey(); + } else if ("BinData".equals(value)) { + currentValue = visitBinDataConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else if ("Date".equals(value)) { + currentValue = visitDateTimeConstructor(); + setCurrentBsonType(BsonType.DATE_TIME); + } else if ("HexData".equals(value)) { + currentValue = visitHexDataConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else if ("ISODate".equals(value)) { + currentValue = visitISODateTimeConstructor(); + setCurrentBsonType(BsonType.DATE_TIME); + } else if ("NumberInt".equals(value)) { + currentValue = visitNumberIntConstructor(); + setCurrentBsonType(BsonType.INT32); + } else if ("NumberLong".equals(value)) { + currentValue = visitNumberLongConstructor(); + setCurrentBsonType(BsonType.INT64); + } else if ("NumberDecimal".equals(value)) { + currentValue = visitNumberDecimalConstructor(); + setCurrentBsonType(BsonType.DECIMAL128); + } else if ("ObjectId".equals(value)) { + currentValue = visitObjectIdConstructor(); + setCurrentBsonType(BsonType.OBJECT_ID); + } else if ("RegExp".equals(value)) { + currentValue = visitRegularExpressionConstructor(); + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + } else if ("DBPointer".equals(value)) { + currentValue = visitDBPointerConstructor(); + setCurrentBsonType(BsonType.DB_POINTER); + } else if ("UUID".equals(value)) { + currentValue = visitUUIDConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else { + throw new JsonParseException("JSON reader expected a type name but found '%s'.", value); + } + } + + private void visitExtendedJSON() { + JsonToken nameToken = popToken(); + String value = nameToken.getValue(String.class); + JsonTokenType type = nameToken.getType(); + + if (type == JsonTokenType.STRING || type == JsonTokenType.UNQUOTED_STRING) { + + if ("$binary".equals(value) || "$type".equals(value)) { + currentValue = visitBinDataExtendedJson(value); + if (currentValue != null) { + setCurrentBsonType(BsonType.BINARY); + return; + } + } + if ("$uuid".equals(value)) { + currentValue = visitUuidExtendedJson(); + setCurrentBsonType(BsonType.BINARY); + return; + } + else if ("$regex".equals(value) || "$options".equals(value)) { + currentValue = visitRegularExpressionExtendedJson(value); + if (currentValue != null) { + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + return; + } + } else if ("$code".equals(value)) { + visitJavaScriptExtendedJson(); + return; + } else if ("$date".equals(value)) { + currentValue = visitDateTimeExtendedJson(); + setCurrentBsonType(BsonType.DATE_TIME); + return; + } else if ("$maxKey".equals(value)) { + currentValue = visitMaxKeyExtendedJson(); + setCurrentBsonType(BsonType.MAX_KEY); + return; + } else if ("$minKey".equals(value)) { + currentValue = visitMinKeyExtendedJson(); + setCurrentBsonType(BsonType.MIN_KEY); + return; + } else if ("$oid".equals(value)) { + currentValue = visitObjectIdExtendedJson(); + setCurrentBsonType(BsonType.OBJECT_ID); + return; + } else if ("$regularExpression".equals(value)) { + currentValue = visitNewRegularExpressionExtendedJson(); + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + return; + } else if ("$symbol".equals(value)) { + currentValue = visitSymbolExtendedJson(); + setCurrentBsonType(BsonType.SYMBOL); + return; + } else if ("$timestamp".equals(value)) { + currentValue = visitTimestampExtendedJson(); + setCurrentBsonType(BsonType.TIMESTAMP); + return; + } else if ("$undefined".equals(value)) { + currentValue = visitUndefinedExtendedJson(); + setCurrentBsonType(BsonType.UNDEFINED); + return; + } else if ("$numberLong".equals(value)) { + currentValue = visitNumberLongExtendedJson(); + setCurrentBsonType(BsonType.INT64); + return; + } else if ("$numberInt".equals(value)) { + currentValue = visitNumberIntExtendedJson(); + setCurrentBsonType(BsonType.INT32); + return; + } else if ("$numberDouble".equals(value)) { + currentValue = visitNumberDoubleExtendedJson(); + setCurrentBsonType(BsonType.DOUBLE); + return; + } else if ("$numberDecimal".equals(value)) { + currentValue = visitNumberDecimalExtendedJson(); + setCurrentBsonType(BsonType.DECIMAL128); + return; + } else if ("$dbPointer".equals(value)) { + currentValue = visitDbPointerExtendedJson(); + setCurrentBsonType(BsonType.DB_POINTER); + return; + } + } + + pushToken(nameToken); + setCurrentBsonType(BsonType.DOCUMENT); + } + + private void visitEmptyConstructor() { + JsonToken nextToken = popToken(); + if (nextToken.getType() == JsonTokenType.LEFT_PAREN) { + verifyToken(JsonTokenType.RIGHT_PAREN); + } else { + pushToken(nextToken); + } + } + + private BsonBinary visitBinDataConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a binary subtype but found '%s'.", subTypeToken.getValue()); + } + verifyToken(JsonTokenType.COMMA); + JsonToken bytesToken = popToken(); + if (bytesToken.getType() != JsonTokenType.UNQUOTED_STRING && bytesToken.getType() != JsonTokenType.STRING) { + throw new JsonParseException("JSON reader expected a string but found '%s'.", bytesToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + + byte[] bytes = Base64.getDecoder().decode(bytesToken.getValue(String.class)); + return new BsonBinary(subTypeToken.getValue(Integer.class).byteValue(), bytes); + } + + private BsonBinary visitUUIDConstructor() { + this.verifyToken(JsonTokenType.LEFT_PAREN); + String hexString = this.readStringFromExtendedJson().replace("-", ""); + + this.verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString)); + } + + private BsonRegularExpression visitRegularExpressionConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + String pattern = readStringFromExtendedJson(); + String options = ""; + JsonToken commaToken = popToken(); + if (commaToken.getType() == JsonTokenType.COMMA) { + options = readStringFromExtendedJson(); + } else { + pushToken(commaToken); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonRegularExpression(pattern, options); + } + + private ObjectId visitObjectIdConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + ObjectId objectId = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.RIGHT_PAREN); + return objectId; + } + + private BsonTimestamp visitTimestampConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken timeToken = popToken(); + int time; + if (timeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", timeToken.getValue()); + } else { + time = timeToken.getValue(Integer.class); + } + verifyToken(JsonTokenType.COMMA); + JsonToken incrementToken = popToken(); + int increment; + if (incrementToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", timeToken.getValue()); + } else { + increment = incrementToken.getValue(Integer.class); + } + + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonTimestamp(time, increment); + } + + private BsonDbPointer visitDBPointerConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + String namespace = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + ObjectId id = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonDbPointer(namespace, id); + } + + private int visitNumberIntConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + int value; + if (valueToken.getType() == JsonTokenType.INT32) { + value = valueToken.getValue(Integer.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Integer.parseInt(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private long visitNumberLongConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + long value; + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64) { + value = valueToken.getValue(Long.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Long.parseLong(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private Decimal128 visitNumberDecimalConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + Decimal128 value; + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64 + || valueToken.getType() == JsonTokenType.DOUBLE) { + value = valueToken.getValue(Decimal128.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Decimal128.parse(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected a number or a string but found '%s'.", valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private long visitISODateTimeConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + + JsonToken token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + return new Date().getTime(); + } else if (token.getType() != JsonTokenType.STRING) { + throw new JsonParseException("JSON reader expected a string but found '%s'.", token.getValue()); + } + + verifyToken(JsonTokenType.RIGHT_PAREN); + + String dateTimeString = token.getValue(String.class); + + try { + return DateTimeFormatter.parse(dateTimeString); + } catch (DateTimeParseException e) { + throw new JsonParseException("Failed to parse string as a date: " + dateTimeString, e); + } + } + + private BsonBinary visitHexDataConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a binary subtype but found '%s'.", subTypeToken.getValue()); + } + verifyToken(JsonTokenType.COMMA); + String hex = readStringFromExtendedJson(); + verifyToken(JsonTokenType.RIGHT_PAREN); + + if ((hex.length() & 1) != 0) { + hex = "0" + hex; + } + + for (final BsonBinarySubType subType : BsonBinarySubType.values()) { + if (subType.getValue() == subTypeToken.getValue(Integer.class)) { + return new BsonBinary(subType, decodeHex(hex)); + } + } + return new BsonBinary(decodeHex(hex)); + } + + private long visitDateTimeConstructor() { + DateFormat format = new SimpleDateFormat("EEE MMM dd yyyy HH:mm:ss z", Locale.ENGLISH); + + verifyToken(JsonTokenType.LEFT_PAREN); + + JsonToken token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + return new Date().getTime(); + } else if (token.getType() == JsonTokenType.STRING) { + verifyToken(JsonTokenType.RIGHT_PAREN); + String s = token.getValue(String.class); + ParsePosition pos = new ParsePosition(0); + Date dateTime = format.parse(s, pos); + if (dateTime != null && pos.getIndex() == s.length()) { + return dateTime.getTime(); + } else { + throw new JsonParseException( + "JSON reader expected a date in 'EEE MMM dd yyyy HH:mm:ss z' format but found '%s'.", s); + } + + } else if (token.getType() == JsonTokenType.INT32 || token.getType() == JsonTokenType.INT64) { + long[] values = new long[7]; + int pos = 0; + while (true) { + if (pos < values.length) { + values[pos++] = token.getValue(Long.class); + } + token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + break; + } + if (token.getType() != JsonTokenType.COMMA) { + throw new JsonParseException("JSON reader expected a ',' or a ')' but found '%s'.", token.getValue()); + } + token = popToken(); + if (token.getType() != JsonTokenType.INT32 && token.getType() != JsonTokenType.INT64) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", token.getValue()); + } + } + if (pos == 1) { + return values[0]; + } else if (pos < 3 || pos > 7) { + throw new JsonParseException("JSON reader expected 1 or 3-7 integers but found %d.", pos); + } + + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + calendar.set(Calendar.YEAR, (int) values[0]); + calendar.set(Calendar.MONTH, (int) values[1]); + calendar.set(Calendar.DAY_OF_MONTH, (int) values[2]); + calendar.set(Calendar.HOUR_OF_DAY, (int) values[3]); + calendar.set(Calendar.MINUTE, (int) values[4]); + calendar.set(Calendar.SECOND, (int) values[5]); + calendar.set(Calendar.MILLISECOND, (int) values[6]); + return calendar.getTimeInMillis(); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", token.getValue()); + } + } + + private String visitDateTimeConstructorWithOutNew() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken token = popToken(); + if (token.getType() != JsonTokenType.RIGHT_PAREN) { + while (token.getType() != JsonTokenType.END_OF_FILE) { + token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + break; + } + } + if (token.getType() != JsonTokenType.RIGHT_PAREN) { + throw new JsonParseException("JSON reader expected a ')' but found '%s'.", token.getValue()); + } + } + + DateFormat df = new SimpleDateFormat("EEE MMM dd yyyy HH:mm:ss z", Locale.ENGLISH); + return df.format(new Date()); + } + + private BsonBinary visitBinDataExtendedJson(final String firstKey) { + + Mark mark = new Mark(); + + verifyToken(JsonTokenType.COLON); + + if (firstKey.equals("$binary")) { + JsonToken nextToken = popToken(); + if (nextToken.getType() == JsonTokenType.BEGIN_OBJECT) { + JsonToken nameToken = popToken(); + String firstNestedKey = nameToken.getValue(String.class); + byte[] data; + byte type; + if (firstNestedKey.equals("base64")) { + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + verifyToken(JsonTokenType.COMMA); + verifyString("subType"); + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + } else if (firstNestedKey.equals("subType")) { + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("base64"); + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + } else { + throw new JsonParseException("Unexpected key for $binary: " + firstNestedKey); + } + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonBinary(type, data); + } else { + mark.reset(); + return visitLegacyBinaryExtendedJson(firstKey); + } + } else { + mark.reset(); + return visitLegacyBinaryExtendedJson(firstKey); + } + } + + private BsonBinary visitLegacyBinaryExtendedJson(final String firstKey) { + + Mark mark = new Mark(); + + try { + verifyToken(JsonTokenType.COLON); + + byte[] data; + byte type; + + if (firstKey.equals("$binary")) { + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + verifyToken(JsonTokenType.COMMA); + verifyString("$type"); + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + } else { + type = readBinarySubtypeFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$binary"); + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + } + verifyToken(JsonTokenType.END_OBJECT); + + return new BsonBinary(type, data); + } catch (JsonParseException e) { + mark.reset(); + return null; + } catch (NumberFormatException e) { + mark.reset(); + return null; + } + } + + private byte readBinarySubtypeFromExtendedJson() { + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.STRING && subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a string or number but found '%s'.", subTypeToken.getValue()); + } + + if (subTypeToken.getType() == JsonTokenType.STRING) { + return (byte) Integer.parseInt(subTypeToken.getValue(String.class), 16); + } else { + return subTypeToken.getValue(Integer.class).byteValue(); + } + } + + private long visitDateTimeExtendedJson() { + long value; + verifyToken(JsonTokenType.COLON); + JsonToken valueToken = popToken(); + if (valueToken.getType() == JsonTokenType.BEGIN_OBJECT) { + JsonToken nameToken = popToken(); + String name = nameToken.getValue(String.class); + if (!name.equals("$numberLong")) { + throw new JsonParseException( + String.format("JSON reader expected $numberLong within $date, but found %s", name)); + } + value = visitNumberLongExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + } else { + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64) { + value = valueToken.getValue(Long.class); + } else if (valueToken.getType() == JsonTokenType.STRING + || valueToken.getType() == JsonTokenType.UNQUOTED_STRING) { + + // Spring Data Customization START + + Object dt = bindableValueFor(valueToken).getValue(); + if (dt instanceof Date date) { + value = date.getTime(); + } else if (dt instanceof Number numberValue) { + value = NumberUtils.convertNumberToTargetClass(numberValue, Long.class); + } else { + try { + value = DateTimeFormatter.parse(dt.toString()); + } catch (IllegalArgumentException e) { + throw new JsonParseException(String.format("Failed to parse string '%s' as a date", dt), e); + } + } + + // Spring Data Customization END + } else { + throw new JsonParseException("JSON reader expected an integer or string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.END_OBJECT); + } + return value; + } + + private MaxKey visitMaxKeyExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.INT32, 1); + verifyToken(JsonTokenType.END_OBJECT); + return new MaxKey(); + } + + private MinKey visitMinKeyExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.INT32, 1); + verifyToken(JsonTokenType.END_OBJECT); + return new MinKey(); + } + + private ObjectId visitObjectIdExtendedJson() { + verifyToken(JsonTokenType.COLON); + ObjectId objectId = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.END_OBJECT); + return objectId; + } + + private BsonRegularExpression visitNewRegularExpressionExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + String pattern; + String options = ""; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("pattern")) { + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("options"); + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + } else if (firstKey.equals("options")) { + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("pattern"); + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + } else { + throw new JsonParseException("Expected 't' and 'i' fields in $timestamp document but found " + firstKey); + } + + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonRegularExpression(pattern, options); + } + + private BsonRegularExpression visitRegularExpressionExtendedJson(final String firstKey) { + Mark extendedJsonMark = new Mark(); + + try { + verifyToken(JsonTokenType.COLON); + + String pattern; + String options = ""; + if (firstKey.equals("$regex")) { + pattern = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$options"); + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + } else { + options = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$regex"); + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonRegularExpression(pattern, options); + } catch (JsonParseException e) { + extendedJsonMark.reset(); + return null; + } + } + + private String readStringFromExtendedJson() { + JsonToken patternToken = popToken(); + + // Spring Data Customization START + + if (patternToken.getType() == JsonTokenType.STRING || patternToken.getType() == JsonTokenType.UNQUOTED_STRING) { + Object value = bindableValueFor(patternToken).getValue(); + return value != null ? value.toString() : null; + } + + throw new JsonParseException("JSON reader expected a string but found '%s'.", patternToken.getValue()); + + // Spring Data Customization END + } + + private String visitSymbolExtendedJson() { + verifyToken(JsonTokenType.COLON); + String symbol = readStringFromExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + return symbol; + } + + private BsonTimestamp visitTimestampExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + int time; + int increment; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("t")) { + verifyToken(JsonTokenType.COLON); + time = readIntFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("i"); + verifyToken(JsonTokenType.COLON); + increment = readIntFromExtendedJson(); + } else if (firstKey.equals("i")) { + verifyToken(JsonTokenType.COLON); + increment = readIntFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("t"); + verifyToken(JsonTokenType.COLON); + time = readIntFromExtendedJson(); + } else { + throw new JsonParseException("Expected 't' and 'i' fields in $timestamp document but found " + firstKey); + } + + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonTimestamp(time, increment); + } + + private int readIntFromExtendedJson() { + JsonToken nextToken = popToken(); + int value; + if (nextToken.getType() == JsonTokenType.INT32) { + value = nextToken.getValue(Integer.class); + } else if (nextToken.getType() == JsonTokenType.INT64) { + value = nextToken.getValue(Long.class).intValue(); + } else { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", nextToken.getValue()); + } + return value; + } + + private BsonBinary visitUuidExtendedJson() { + verifyToken(JsonTokenType.COLON); + String hexString = this.readStringFromExtendedJson().replace("-", ""); + verifyToken(JsonTokenType.END_OBJECT); + try { + return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString)); + } catch (IllegalArgumentException e) { + throw new JsonParseException(e); + } + } + + private void visitJavaScriptExtendedJson() { + verifyToken(JsonTokenType.COLON); + String code = readStringFromExtendedJson(); + JsonToken nextToken = popToken(); + switch (nextToken.getType()) { + case COMMA: + verifyString("$scope"); + verifyToken(JsonTokenType.COLON); + setState(State.VALUE); + currentValue = code; + setCurrentBsonType(BsonType.JAVASCRIPT_WITH_SCOPE); + setContext(new Context(getContext(), BsonContextType.SCOPE_DOCUMENT)); + break; + case END_OBJECT: + currentValue = code; + setCurrentBsonType(BsonType.JAVASCRIPT); + break; + default: + throw new JsonParseException("JSON reader expected ',' or '}' but found '%s'.", nextToken); + } + } + + private BsonUndefined visitUndefinedExtendedJson() { + verifyToken(JsonTokenType.COLON); + JsonToken valueToken = popToken(); + if (!valueToken.getValue(String.class).equals("true")) { + throw new JsonParseException("JSON reader requires $undefined to have the value of true but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonUndefined(); + } + + private Long visitNumberLongExtendedJson() { + verifyToken(JsonTokenType.COLON); + Long value; + String longAsString = readStringFromExtendedJson(); + try { + value = Long.valueOf(longAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", longAsString, Long.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Integer visitNumberIntExtendedJson() { + verifyToken(JsonTokenType.COLON); + Integer value; + String intAsString = readStringFromExtendedJson(); + try { + value = Integer.valueOf(intAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", intAsString, Integer.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Double visitNumberDoubleExtendedJson() { + verifyToken(JsonTokenType.COLON); + Double value; + String doubleAsString = readStringFromExtendedJson(); + try { + value = Double.valueOf(doubleAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", doubleAsString, Double.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Decimal128 visitNumberDecimalExtendedJson() { + verifyToken(JsonTokenType.COLON); + Decimal128 value; + String decimal128AsString = readStringFromExtendedJson(); + try { + value = Decimal128.parse(decimal128AsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", decimal128AsString, Decimal128.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private BsonDbPointer visitDbPointerExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + String ref; + ObjectId oid; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("$ref")) { + verifyToken(JsonTokenType.COLON); + ref = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$id"); + oid = readDbPointerIdFromExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + } else if (firstKey.equals("$id")) { + oid = readDbPointerIdFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$ref"); + verifyToken(JsonTokenType.COLON); + ref = readStringFromExtendedJson(); + + } else { + throw new JsonParseException("Expected $ref and $id fields in $dbPointer document but found " + firstKey); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonDbPointer(ref, oid); + } + + private ObjectId readDbPointerIdFromExtendedJson() { + ObjectId oid; + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + verifyToken(JsonTokenType.STRING, "$oid"); + oid = visitObjectIdExtendedJson(); + return oid; + } + + @Override + public BsonReaderMark getMark() { + return new Mark(); + } + + @Override + protected Context getContext() { + return (Context) super.getContext(); + } + + protected class Mark extends AbstractBsonReader.Mark { + private final JsonToken pushedToken; + private final Object currentValue; + private final int position; + + protected Mark() { + super(); + pushedToken = ParameterBindingJsonReader.this.pushedToken; + currentValue = ParameterBindingJsonReader.this.currentValue; + position = ParameterBindingJsonReader.this.scanner.getBufferPosition(); + } + + public void reset() { + super.reset(); + ParameterBindingJsonReader.this.pushedToken = pushedToken; + ParameterBindingJsonReader.this.currentValue = currentValue; + ParameterBindingJsonReader.this.scanner.setBufferPosition(position); + ParameterBindingJsonReader.this.setContext(new Context(getParentContext(), getContextType())); + } + } + + protected class Context extends AbstractBsonReader.Context { + protected Context(final AbstractBsonReader.Context parentContext, final BsonContextType contextType) { + super(parentContext, contextType); + } + + protected Context getParentContext() { + return (Context) super.getParentContext(); + } + + protected BsonContextType getContextType() { + return super.getContextType(); + } + } + + private static byte[] decodeHex(final String hex) { + if (hex.length() % 2 != 0) { + throw new IllegalArgumentException("A hex string must contain an even number of characters: " + hex); + } + + byte[] out = new byte[hex.length() / 2]; + + for (int i = 0; i < hex.length(); i += 2) { + int high = Character.digit(hex.charAt(i), 16); + int low = Character.digit(hex.charAt(i + 1), 16); + if (high == -1 || low == -1) { + throw new IllegalArgumentException("A hex string can only contain the characters 0-9, A-F, a-f: " + hex); + } + + out[i / 2] = (byte) (high * 16 + low); + } + + return out; + } + + // Spring Data Customization START + + static class BindableValue { + + private BsonType type; + private Object value; + private int index; + + BindableValue() {} + + BsonType getType() { + return type; + } + + void setType(BsonType type) { + this.type = type; + } + + Object getValue() { + return value; + } + + void setValue(Object value) { + this.value = value; + } + + int getIndex() { + return index; + } + + void setIndex(int index) { + this.index = index; + } + } + + // Spring Data Customization END +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java new file mode 100644 index 0000000000..8f1d23885d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.springframework.lang.Nullable; + +/** + * A value provider to retrieve bindable values by their parameter index. + * + * @author Christoph Strobl + * @since 2.2 + */ +@FunctionalInterface +public interface ValueProvider { + + /** + * @param index parameter index to use. + * @return can be {@literal null}. + * @throws RuntimeException if the requested element does not exist. + */ + @Nullable + Object getBindableValue(int index); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java new file mode 100644 index 0000000000..8a86b3522b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java @@ -0,0 +1,5 @@ +/** + * MongoDB driver-specific utility classes for Json conversion. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.util.json; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/package-info.java index bfee8e6608..7caec410f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/package-info.java @@ -1,20 +1,6 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ /** - * - * @author Thomas Darimont + * MongoDB driver-specific utility classes for {@link org.bson.conversions.Bson} and {@link com.mongodb.DBObject} + * interaction. */ -package org.springframework.data.mongodb.util; \ No newline at end of file +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.util; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java new file mode 100644 index 0000000000..9fa66b3b2b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.spel; + +import java.util.function.Supplier; + +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ParserContext; +import org.springframework.expression.common.LiteralExpression; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Internal utility class for dealing with {@link Expression} and potential ones. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class ExpressionUtils { + + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + + /** + * Returns a SpEL {@link Expression} if the given {@link String} is actually an expression that does not evaluate to a + * {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). + * + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. + */ + @Nullable + public static Expression detectExpression(@Nullable String potentialExpression) { + + if (!StringUtils.hasText(potentialExpression)) { + return null; + } + + Expression expression = PARSER.parseExpression(potentialExpression, ParserContext.TEMPLATE_EXPRESSION); + return expression instanceof LiteralExpression ? null : expression; + } + + @Nullable + public static Object evaluate(String value, Supplier evaluationContext) { + + Expression expression = detectExpression(value); + if (expression == null) { + return value; + } + + return expression.getValue(evaluationContext.get(), Object.class); + } +} diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt new file mode 100644 index 0000000000..4c8545ee8c --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt @@ -0,0 +1,52 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update +import org.springframework.data.mongodb.core.query.UpdateDefinition +import org.springframework.data.util.Pair.of + +/** + * Extension for [BulkOperations.updateMulti] that converts a list of [kotlin.Pair] to list of [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.updateMulti(kotlinPairs: List>): BulkOperations = + updateMulti(kotlinPairs.toSpringPairs()) + +/** + * Extension for [BulkOperations.upsert] that converts a list of [kotlin.Pair] to list of [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.upsert(kotlinPairs: List>): BulkOperations = + upsert(kotlinPairs.toSpringPairs()) + +/** + * Extension for [BulkOperations.updateOne] that converts a [kotlin.Pair] to [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.updateOne(kotlinPairs: List>): BulkOperations = + updateOne(kotlinPairs.toSpringPairs()) + +private fun List>.toSpringPairs(): List> { + return map { (first, second) -> of(first, second) } +} diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt new file mode 100644 index 0000000000..c995624f38 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @author Mark Paluch + * @since 2.0 + */ +inline fun ExecutableAggregationOperation.aggregateAndReturn(): ExecutableAggregationOperation.ExecutableAggregation = + aggregateAndReturn(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt new file mode 100644 index 0000000000..cc0d3c7486 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt @@ -0,0 +1,68 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty +import kotlin.reflect.KProperty1 + +/** + * Extension for [ExecutableFindOperation.query] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @author Mark Paluch + * @since 2.0 + */ +inline fun ExecutableFindOperation.query(): ExecutableFindOperation.ExecutableFind = + query(T::class.java) + +/** + * Extension for [ExecutableFindOperation.query] for a type-safe projection of distinct values. + * + * @author Mark Paluch + * @since 3.0 + */ +inline fun ExecutableFindOperation.distinct(field : KProperty1): ExecutableFindOperation.TerminatingDistinct = + query(T::class.java).distinct(field.name) + +/** + * Extension for [ExecutableFindOperation.FindWithProjection.as] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @author Mark Paluch + * @since 2.0 + */ +inline fun ExecutableFindOperation.FindWithProjection<*>.asType(): ExecutableFindOperation.FindWithQuery = + `as`(T::class.java) + +/** + * Extension for [ExecutableFindOperation.DistinctWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +inline fun ExecutableFindOperation.DistinctWithProjection.asType(): ExecutableFindOperation.TerminatingDistinct = + `as`(T::class.java) + +/** + * Extension for [ExecutableFindOperation.FindDistinct.distinct] leveraging KProperty. + * + * @author Mark Paluch + * @since 3.0 + */ +fun ExecutableFindOperation.FindDistinct.distinct(key: KProperty<*>): ExecutableFindOperation.TerminatingDistinct = + distinct(key.toDotPath()) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt new file mode 100644 index 0000000000..153cc03381 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableInsertOperation.insert] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @author Mark Paluch + * @since 2.0 + */ +inline fun ExecutableInsertOperation.insert(): ExecutableInsertOperation.ExecutableInsert = + insert(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt new file mode 100644 index 0000000000..fb5551c363 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableMapReduceOperation.mapReduce] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ExecutableMapReduceOperation.mapReduce(): ExecutableMapReduceOperation.MapReduceWithMapFunction = + mapReduce(T::class.java) + +/** + * Extension for [ExecutableMapReduceOperation.MapReduceWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ExecutableMapReduceOperation.MapReduceWithProjection<*>.asType(): ExecutableMapReduceOperation.MapReduceWithQuery = + `as`(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt new file mode 100644 index 0000000000..d91edb5150 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableRemoveOperation.remove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @author Mark Paluch + * @since 2.0 + */ +inline fun ExecutableRemoveOperation.remove(): ExecutableRemoveOperation.ExecutableRemove = + remove(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt new file mode 100644 index 0000000000..26c9e6b796 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt @@ -0,0 +1,25 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableUpdateOperation.update] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.0 + */ +inline fun ExecutableUpdateOperation.update(): ExecutableUpdateOperation.ExecutableUpdate = + update(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt new file mode 100644 index 0000000000..25af7bd18b --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt @@ -0,0 +1,348 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.MongoCollection +import com.mongodb.client.result.DeleteResult +import com.mongodb.client.result.UpdateResult +import org.bson.Document +import org.springframework.data.geo.GeoResults +import org.springframework.data.mongodb.core.BulkOperations.BulkMode +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.aggregation.AggregationResults +import org.springframework.data.mongodb.core.index.IndexOperations +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions +import org.springframework.data.mongodb.core.mapreduce.MapReduceResults +import org.springframework.data.mongodb.core.query.NearQuery +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.UpdateDefinition +import java.util.stream.Stream + +/** + * Extension for [MongoOperations.getCollectionName] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.getCollectionName(): String = + getCollectionName(T::class.java) + +/** + * Extension for [MongoOperations.execute] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.execute(action: CollectionCallback): T? = + execute(T::class.java, action) + +/** + * Extension for [MongoOperations.stream] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.stream(query: Query): Stream = + stream(query, T::class.java) + +/** + * Extension for [MongoOperations.stream] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.stream( + query: Query, + collectionName: String? = null +): Stream = + if (collectionName != null) stream(query, T::class.java, collectionName) + else stream(query, T::class.java) + +/** + * Extension for [MongoOperations.createCollection] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.createCollection( + collectionOptions: CollectionOptions? = null): MongoCollection = + if (collectionOptions != null) createCollection(T::class.java, collectionOptions) + else createCollection(T::class.java) + +/** + * Extension for [MongoOperations.collectionExists] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.collectionExists(): Boolean = + collectionExists(T::class.java) + +/** + * Extension for [MongoOperations.dropCollection] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.dropCollection() { + dropCollection(T::class.java) +} + +/** + * Extension for [MongoOperations.indexOps] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.indexOps(): IndexOperations = + indexOps(T::class.java) + +/** + * Extension for [MongoOperations.bulkOps] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.bulkOps(bulkMode: BulkMode, collectionName: String? = null): BulkOperations = + if (collectionName != null) bulkOps(bulkMode, T::class.java, collectionName) + else bulkOps(bulkMode, T::class.java) + +/** + * Extension for [MongoOperations.findAll] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findAll(collectionName: String? = null): List = + if (collectionName != null) findAll(T::class.java, collectionName) else findAll(T::class.java) + +/** + * Extension for [MongoOperations.aggregate] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 3.2 + */ +inline fun MongoOperations.aggregate(aggregation: Aggregation): AggregationResults = + aggregate(aggregation, I::class.java, O::class.java) + +/** + * Extension for [MongoOperations.aggregate] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.aggregate( + aggregation: Aggregation, + collectionName: String +): AggregationResults = + aggregate(aggregation, collectionName, O::class.java) + +/** + * Extension for [MongoOperations.aggregateStream] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 3.2 + */ +inline fun MongoOperations.aggregateStream(aggregation: Aggregation): Stream = + aggregateStream(aggregation, I::class.java, O::class.java) + +/** + * Extension for [MongoOperations.aggregateStream] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.aggregateStream( + aggregation: Aggregation, + collectionName: String +): Stream = + aggregateStream(aggregation, collectionName, O::class.java) + +/** + * Extension for [MongoOperations.mapReduce] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.mapReduce(collectionName: String, mapFunction: String, reduceFunction: String, options: MapReduceOptions? = null): MapReduceResults = + if (options != null) mapReduce(collectionName, mapFunction, reduceFunction, options, T::class.java) + else mapReduce(collectionName, mapFunction, reduceFunction, T::class.java) + +/** + * Extension for [MongoOperations.mapReduce] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 52.0 + */ +inline fun MongoOperations.mapReduce(query: Query, collectionName: String, mapFunction: String, reduceFunction: String, options: MapReduceOptions? = null): MapReduceResults = + if (options != null) mapReduce(query, collectionName, mapFunction, reduceFunction, options, T::class.java) + else mapReduce(query, collectionName, mapFunction, reduceFunction, T::class.java) + +/** + * Extension for [MongoOperations.geoNear] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("DEPRECATION") +@Deprecated("Since 2.2, the `geoNear` command has been removed in MongoDB Server 4.2.0. Use Aggregations with `Aggregation.geoNear(NearQuery, String)` instead.", replaceWith = ReplaceWith("aggregate()")) +inline fun MongoOperations.geoNear(near: NearQuery, collectionName: String? = null): GeoResults = + if (collectionName != null) geoNear(near, T::class.java, collectionName) + else geoNear(near, T::class.java) + +/** + * Extension for [MongoOperations.findOne] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findOne(query: Query, collectionName: String? = null): T? = + if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java) + +/** + * Extension for [MongoOperations.exists] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.exists(query: Query, collectionName: String? = null): Boolean = + if (collectionName != null) exists(query, T::class.java, collectionName) + else exists(query, T::class.java) + +/** + * Extension for [MongoOperations.find] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.find(query: Query, collectionName: String? = null): List = + if (collectionName != null) find(query, T::class.java, collectionName) + else find(query, T::class.java) + +/** + * Extension for [MongoOperations.findById] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findById(id: Any, collectionName: String? = null): T? = + if (collectionName != null) findById(id, T::class.java, collectionName) + else findById(id, T::class.java) + +/** + * Extension for [MongoOperations.findDistinct] leveraging reified type parameters. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +inline fun MongoOperations.findDistinct(query: Query, field: String, collectionName: String? = null): List = + if (collectionName != null) findDistinct(query, field, collectionName, E::class.java, T::class.java) + else findDistinct(query, field, E::class.java, T::class.java) + +/** + * Extension for [MongoOperations.findAndModify] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findAndModify(query: Query, update: UpdateDefinition, options: FindAndModifyOptions, collectionName: String? = null): T? = + if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName) + else findAndModify(query, update, options, T::class.java) + +/** + * Extension for [MongoOperations.findAndRemove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findAndRemove(query: Query, collectionName: String? = null): T? = + if (collectionName != null) findAndRemove(query, T::class.java, collectionName) + else findAndRemove(query, T::class.java) + +/** + * Extension for [MongoOperations.count] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.count(query: Query = Query(), collectionName: String? = null): Long = + if (collectionName != null) count(query, T::class.java, collectionName) else count(query, T::class.java) + +/** + * Extension for [MongoOperations.insert] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.2 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.insert(batchToSave: Collection): Collection = insert(batchToSave, T::class.java) + +/** + * Extension for [MongoOperations.upsert] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.upsert(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = + if (collectionName != null) upsert(query, update, T::class.java, collectionName) + else upsert(query, update, T::class.java) + +/** + * Extension for [MongoOperations.updateFirst] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.updateFirst(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = + if (collectionName != null) updateFirst(query, update, T::class.java, collectionName) + else updateFirst(query, update, T::class.java) + +/** + * Extension for [MongoOperations.updateMulti] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.updateMulti(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = + if (collectionName != null) updateMulti(query, update, T::class.java, collectionName) + else updateMulti(query, update, T::class.java) + +/** + * Extension for [MongoOperations.remove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.remove(query: Query, collectionName: String? = null): DeleteResult = + if (collectionName != null) remove(query, T::class.java, collectionName) + else remove(query, T::class.java) + +/** + * Extension for [MongoOperations.findAllAndRemove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun MongoOperations.findAllAndRemove(query: Query): List = + findAllAndRemove(query, T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt new file mode 100644 index 0000000000..e59c5786bc --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt @@ -0,0 +1,37 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow + +/** + * Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveAggregationOperation.aggregateAndReturn(): ReactiveAggregationOperation.ReactiveAggregation = + aggregateAndReturn(T::class.java) + +/** + * Coroutines [Flow] variant of [ReactiveAggregationOperation.TerminatingAggregationOperation.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveAggregationOperation.TerminatingAggregationOperation.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt new file mode 100644 index 0000000000..d589c32285 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow + +/** + * Extension for [RactiveChangeStreamOperation.changeStream] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.2 + */ +inline fun ReactiveChangeStreamOperation.changeStream(): ReactiveChangeStreamOperation.ReactiveChangeStream = + changeStream(T::class.java) + +/** + * Extension for [ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.2 + */ +inline fun ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection<*>.asType(): ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection = + `as`(T::class.java) + +/** + * Coroutines [Flow] variant of [ReactiveChangeStreamOperation.TerminatingChangeStream.listen]. + * + * @author Christoph Strobl + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveChangeStreamOperation.TerminatingChangeStream.flow(): Flow> = + listen().asFlow() + diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt new file mode 100644 index 0000000000..da1cb7d333 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt @@ -0,0 +1,157 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitFirstOrNull +import kotlinx.coroutines.reactive.awaitSingle +import org.springframework.data.geo.GeoResult +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty +import kotlin.reflect.KProperty1 + +/** + * Extension for [ReactiveFindOperation.query] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveFindOperation.query(): ReactiveFindOperation.ReactiveFind = + query(T::class.java) + +/** + * Extension for [ReactiveFindOperation.query] for a type-safe projection of distinct values. + * + * @author Mark Paluch + * @since 3.0 + */ +inline fun ReactiveFindOperation.distinct(field : KProperty1): ReactiveFindOperation.TerminatingDistinct = + query(T::class.java).distinct(field.name) + +/** + * Extension for [ReactiveFindOperation.FindWithProjection.as] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveFindOperation.FindWithProjection<*>.asType(): ReactiveFindOperation.FindWithQuery = + `as`(T::class.java) + +/** + * Extension for [ReactiveFindOperation.DistinctWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ReactiveFindOperation.DistinctWithProjection.asType(): ReactiveFindOperation.TerminatingDistinct = + `as`(T::class.java) + +/** + * Extension for [ReactiveFindOperation.FindDistinct.distinct] leveraging KProperty. + * + * @author Mark Paluch + * @since 3.0 + */ +fun ReactiveFindOperation.FindDistinct.distinct(key: KProperty<*>): ReactiveFindOperation.TerminatingDistinct = + distinct(key.toDotPath()) + +/** + * Non-nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.one]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitOne(): T = + one().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.one]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitOneOrNull(): T? = + one().awaitFirstOrNull() + +/** + * Non-nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitFirst(): T = + first().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitFirstOrNull(): T? = + first().awaitFirstOrNull() + +/** + * Coroutines variant of [ReactiveFindOperation.TerminatingFind.count]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveFindOperation.TerminatingFind.awaitCount(): Long = + count().awaitSingle() + +/** + * Coroutines variant of [ReactiveFindOperation.TerminatingFind.exists]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveFindOperation.TerminatingFind.awaitExists(): Boolean = + exists().awaitSingle() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFind.all]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFind.flow(): Flow = + all().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFind.tail]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFind.tailAsFlow(): Flow = + tail().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFindNear.all]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFindNear.flow(): Flow> = + all().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingDistinct.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveFindOperation.TerminatingDistinct.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt new file mode 100644 index 0000000000..2ed40aa074 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt @@ -0,0 +1,48 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitSingle + +/** + * Extension for [ReactiveInsertOperation.insert] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveInsertOperation.insert(): ReactiveInsertOperation.ReactiveInsert = + insert(T::class.java) + +/** + * Coroutines variant of [ReactiveInsertOperation.TerminatingInsert.one]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveInsertOperation.TerminatingInsert.oneAndAwait(o: T): T = + one(o).awaitSingle() + + +/** + * Coroutines [Flow] variant of [ReactiveInsertOperation.TerminatingInsert.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveInsertOperation.TerminatingInsert.flow(objects: Collection): Flow = + all(objects).asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt new file mode 100644 index 0000000000..7fc4678100 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow + +/** + * Extension for [ReactiveMapReduceOperation.mapReduce] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ReactiveMapReduceOperation.mapReduce(): ReactiveMapReduceOperation.MapReduceWithMapFunction = + mapReduce(T::class.java) + +/** + * Extension for [ReactiveMapReduceOperation.MapReduceWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ReactiveMapReduceOperation.MapReduceWithProjection<*>.asType(): ReactiveMapReduceOperation.MapReduceWithQuery = + `as`(T::class.java) + + +/** + * Coroutines [Flow] variant of [ReactiveMapReduceOperation.TerminatingMapReduce.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveMapReduceOperation.TerminatingMapReduce.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt new file mode 100644 index 0000000000..fda959be64 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt @@ -0,0 +1,291 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.result.DeleteResult +import com.mongodb.client.result.UpdateResult +import com.mongodb.reactivestreams.client.MongoCollection +import org.bson.Document +import org.springframework.data.geo.GeoResult +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.aggregation.TypedAggregation +import org.springframework.data.mongodb.core.index.ReactiveIndexOperations +import org.springframework.data.mongodb.core.query.NearQuery +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.UpdateDefinition +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono + +/** + * Extension for [ReactiveMongoOperations.indexOps] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.indexOps(): ReactiveIndexOperations = + indexOps(T::class.java) + +/** + * Extension for [ReactiveMongoOperations.execute] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.execute(action: ReactiveCollectionCallback): Flux = + execute(T::class.java, action) + +/** + * Extension for [ReactiveMongoOperations.createCollection] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.createCollection(collectionOptions: CollectionOptions? = null): Mono> = + if (collectionOptions != null) createCollection(T::class.java, collectionOptions) else createCollection(T::class.java) + +/** + * Extension for [ReactiveMongoOperations.collectionExists] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.collectionExists(): Mono = + collectionExists(T::class.java) + +/** + * Extension for [ReactiveMongoOperations.dropCollection] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.dropCollection(): Mono = + dropCollection(T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findAll] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.findAll(collectionName: String? = null): Flux = + if (collectionName != null) findAll(T::class.java, collectionName) else findAll(T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findOne] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.findOne(query: Query, collectionName: String? = null): Mono = + if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.exists] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.exists(query: Query, collectionName: String? = null): Mono = + if (collectionName != null) exists(query, T::class.java, collectionName) else exists(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.find] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.find(query: Query, collectionName: String? = null): Flux = + if (collectionName != null) find(query, T::class.java, collectionName) else find(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findById] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.findById(id: Any, collectionName: String? = null): Mono = + if (collectionName != null) findById(id, T::class.java, collectionName) else findById(id, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findDistinct] leveraging reified type parameters. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +inline fun ReactiveMongoOperations.findDistinct(query: Query, field: String, collectionName: String? = null): Flux = + if (collectionName != null) findDistinct(query, field, collectionName, E::class.java, T::class.java) + else findDistinct(query, field, E::class.java, T::class.java) + + +/** + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. + * + * @author Wonwoo Lee + * @since 3.1.4 + */ +inline fun ReactiveMongoOperations.aggregate( + aggregation: TypedAggregation<*>, + collectionName: String +): Flux = + this.aggregate(aggregation, collectionName, O::class.java) + +/** + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. + * + * @author Wonwoo Lee + * @since 3.1.4 + */ +inline fun ReactiveMongoOperations.aggregate(aggregation: TypedAggregation<*>): Flux = + this.aggregate(aggregation, O::class.java) + +/** + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. + * + * @author Wonwoo Lee + * @author Mark Paluch + * @since 3.1.4 + */ +inline fun ReactiveMongoOperations.aggregate( + aggregation: Aggregation +): Flux = + this.aggregate(aggregation, I::class.java, O::class.java) + +/** + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. + * + * @author Wonwoo Lee + * @since 3.1.4 + */ +inline fun ReactiveMongoOperations.aggregate( + aggregation: Aggregation, + collectionName: String +): Flux = + this.aggregate(aggregation, collectionName, O::class.java) + +/** + * Extension for [ReactiveMongoOperations.geoNear] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("DEPRECATION") +@Deprecated("Since 2.2, the `geoNear` command has been removed in MongoDB Server 4.2.0. Use Aggregations with `Aggregation.geoNear(NearQuery, String)` instead.", replaceWith = ReplaceWith("aggregate()")) +inline fun ReactiveMongoOperations.geoNear(near: NearQuery, collectionName: String? = null): Flux> = + if (collectionName != null) geoNear(near, T::class.java, collectionName) else geoNear(near, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findAndModify] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.findAndModify(query: Query, update: UpdateDefinition, options: FindAndModifyOptions, collectionName: String? = null): Mono = + if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName) else findAndModify(query, update, options, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findAndRemove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.findAndRemove(query: Query, collectionName: String? = null): Mono = + if (collectionName != null) findAndRemove(query, T::class.java, collectionName) + else findAndRemove(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.count] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.count(query: Query = Query(), collectionName: String? = null): Mono = + if (collectionName != null) count(query, T::class.java, collectionName) + else count(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.insert] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.2 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.insert(batchToSave: Collection): Flux = insert(batchToSave, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.upsert] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.upsert(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = + if (collectionName != null) upsert(query, update, T::class.java, collectionName) + else upsert(query, update, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.updateFirst] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.updateFirst(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = + if (collectionName != null) updateFirst(query, update, T::class.java, collectionName) + else updateFirst(query, update, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.updateMulti] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.updateMulti(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = + if (collectionName != null) updateMulti(query, update, T::class.java, collectionName) + else updateMulti(query, update, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.remove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.remove(query: Query, collectionName: String? = null): Mono = + if (collectionName != null) remove(query, T::class.java, collectionName) + else remove(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.findAllAndRemove] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.findAllAndRemove(query: Query): Flux = + findAllAndRemove(query, T::class.java) + +/** + * Extension for [ReactiveMongoOperations.tail] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +inline fun ReactiveMongoOperations.tail(query: Query, collectionName: String? = null): Flux = + if (collectionName != null) tail(query, T::class.java, collectionName) else tail(query, T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt new file mode 100644 index 0000000000..5cf7ccf514 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt @@ -0,0 +1,49 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.result.DeleteResult +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitSingle + +/** + * Extension for [ReactiveRemoveOperation.remove] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveRemoveOperation.remove(): ReactiveRemoveOperation.ReactiveRemove = + remove(T::class.java) + +/** + * Coroutines variant of [ReactiveRemoveOperation.TerminatingRemove.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveRemoveOperation.TerminatingRemove.allAndAwait(): DeleteResult = + all().awaitSingle() + + +/** + * Coroutines [Flow] variant of [ReactiveRemoveOperation.TerminatingRemove.findAndRemove]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveRemoveOperation.TerminatingRemove.findAndRemoveAsFlow(): Flow = + findAndRemove().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt new file mode 100644 index 0000000000..0c603dcaed --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt @@ -0,0 +1,100 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.result.UpdateResult +import kotlinx.coroutines.reactive.awaitFirstOrNull +import kotlinx.coroutines.reactive.awaitSingle + +/** + * Extension for [ReactiveUpdateOperation.update] leveraging reified type parameters. + * + * @author Mark Paluch + * @since 2.0 + */ +inline fun ReactiveUpdateOperation.update(): ReactiveUpdateOperation.ReactiveUpdate = + update(T::class.java) + +/** + * Non-nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait(): T = + findAndModify().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwaitOrNull(): T? = + findAndModify().awaitFirstOrNull() + +/** + * Non-nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndReplace.findAndReplace]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndReplace.findReplaceAndAwait(): T = + findAndReplace().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndReplace.findAndReplace]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndReplace.findReplaceAndAwaitOrNull(): T? = + findAndReplace().awaitFirstOrNull() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.allAndAwait(): UpdateResult = + all().awaitSingle() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.firstAndAwait(): UpdateResult = + first().awaitSingle() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.upsert]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.upsertAndAwait(): UpdateResult = upsert().awaitSingle() + +/** + * Extension for [ReactiveUpdateOperation.FindAndReplaceWithProjection.as] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +inline fun ReactiveUpdateOperation.FindAndReplaceWithProjection<*>.asType(): ReactiveUpdateOperation.FindAndReplaceWithOptions = + `as`(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt new file mode 100644 index 0000000000..f4744d402a --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt @@ -0,0 +1,59 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty + +/** + * Extension for [Criteria.is] providing an `isEqualTo` alias since `is` is a reserved keyword in Kotlin. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +fun Criteria.isEqualTo(o: Any?): Criteria = `is`(o) + +/** + * Extension for [Criteria.in] providing an `inValues` alias since `in` is a reserved keyword in Kotlin. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +fun Criteria.inValues(c: Collection): Criteria = `in`(c) + +/** + * Extension for [Criteria.in] providing an `inValues` alias since `in` is a reserved keyword in Kotlin. + * + * @author Sebastien Deleuze + * @since 2.0 + */ +fun Criteria.inValues(vararg o: Any?): Criteria = `in`(*o) + +/** + * Creates a Criteria using a KProperty as key. + * Supports nested field names with [KPropertyPath]. + * @author Tjeu Kayim + * @since 2.2 + */ +fun where(key: KProperty<*>): Criteria = Criteria.where(key.toDotPath()) + +/** + * Add new key to the criteria chain using a KProperty. + * Supports nested field names with [KPropertyPath]. + * @author Tjeu Kayim + * @since 2.2 + */ +infix fun Criteria.and(key: KProperty<*>): Criteria = and(key.toDotPath()) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt new file mode 100644 index 0000000000..43cf02ea4c --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -0,0 +1,416 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.bson.BsonRegularExpression +import org.springframework.data.geo.Circle +import org.springframework.data.geo.Point +import org.springframework.data.geo.Shape +import org.springframework.data.mapping.toDotPath +import org.springframework.data.mongodb.core.geo.GeoJson +import org.springframework.data.mongodb.core.schema.JsonSchemaObject +import java.util.regex.Pattern +import kotlin.reflect.KProperty + +/** + * Creates a criterion using equality. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.isEqualTo + */ +infix fun KProperty.isEqualTo(value: T) = + Criteria(this.toDotPath()).isEqualTo(value) + +/** + * Creates a criterion using the $ne operator. + * + * See [MongoDB Query operator: $ne](https://docs.mongodb.com/manual/reference/operator/query/ne/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.ne + */ +infix fun KProperty.ne(value: T): Criteria = + Criteria(this.toDotPath()).ne(value) + +/** + * Creates a criterion using the $lt operator. + * + * See [MongoDB Query operator: $lt](https://docs.mongodb.com/manual/reference/operator/query/lt/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.lt + */ +infix fun KProperty.lt(value: Any): Criteria = + Criteria(this.toDotPath()).lt(value) + +/** + * Creates a criterion using the $lte operator. + * + * See [MongoDB Query operator: $lte](https://docs.mongodb.com/manual/reference/operator/query/lte/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.lte + */ +infix fun KProperty.lte(value: Any): Criteria = + Criteria(this.toDotPath()).lte(value) + +/** + * Creates a criterion using the $gt operator. + * + * See [MongoDB Query operator: $gt](https://docs.mongodb.com/manual/reference/operator/query/gt/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.gt + */ +infix fun KProperty.gt(value: Any): Criteria = + Criteria(this.toDotPath()).gt(value) + +/** + * Creates a criterion using the $gte operator. + * + * See [MongoDB Query operator: $gte](https://docs.mongodb.com/manual/reference/operator/query/gte/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.gte + */ +infix fun KProperty.gte(value: Any): Criteria = + Criteria(this.toDotPath()).gte(value) + +/** + * Creates a criterion using the $in operator. + * + * See [MongoDB Query operator: $in](https://docs.mongodb.com/manual/reference/operator/query/in/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.inValues + */ +fun KProperty.inValues(vararg o: Any): Criteria = + Criteria(this.toDotPath()).`in`(*o) + +/** + * Creates a criterion using the $in operator. + * + * See [MongoDB Query operator: $in](https://docs.mongodb.com/manual/reference/operator/query/in/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.inValues + */ +infix fun KProperty.inValues(value: Collection): Criteria = + Criteria(this.toDotPath()).`in`(value) + +/** + * Creates a criterion using the $nin operator. + * + * See [MongoDB Query operator: $nin](https://docs.mongodb.com/manual/reference/operator/query/nin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nin + */ +fun KProperty.nin(vararg o: Any): Criteria = + Criteria(this.toDotPath()).nin(*o) + +/** + * Creates a criterion using the $nin operator. + * + * See [MongoDB Query operator: $nin](https://docs.mongodb.com/manual/reference/operator/query/nin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nin + */ +infix fun KProperty.nin(value: Collection): Criteria = + Criteria(this.toDotPath()).nin(value) + +/** + * Creates a criterion using the $mod operator. + * + * See [MongoDB Query operator: $mod](https://docs.mongodb.com/manual/reference/operator/query/mod/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.mod + */ +fun KProperty.mod(value: Number, remainder: Number): Criteria = + Criteria(this.toDotPath()).mod(value, remainder) + +/** + * Creates a criterion using the $all operator. + * + * See [MongoDB Query operator: $all](https://docs.mongodb.com/manual/reference/operator/query/all/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.all + */ +fun KProperty<*>.all(vararg o: Any): Criteria = + Criteria(this.toDotPath()).all(*o) + +/** + * Creates a criterion using the $all operator. + * + * See [MongoDB Query operator: $all](https://docs.mongodb.com/manual/reference/operator/query/all/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.all + */ +infix fun KProperty<*>.all(value: Collection<*>): Criteria = + Criteria(this.toDotPath()).all(value) + +/** + * Creates a criterion using the $size operator. + * + * See [MongoDB Query operator: $size](https://docs.mongodb.com/manual/reference/operator/query/size/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.size + */ +infix fun KProperty<*>.size(s: Int): Criteria = + Criteria(this.toDotPath()).size(s) + +/** + * Creates a criterion using the $exists operator. + * + * See [MongoDB Query operator: $exists](https://docs.mongodb.com/manual/reference/operator/query/exists/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.exists + */ +infix fun KProperty<*>.exists(b: Boolean): Criteria = + Criteria(this.toDotPath()).exists(b) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +infix fun KProperty<*>.type(t: Int): Criteria = + Criteria(this.toDotPath()).type(t) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +infix fun KProperty<*>.type(t: Collection): Criteria = + Criteria(this.toDotPath()).type(*t.toTypedArray()) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +fun KProperty<*>.type(vararg t: JsonSchemaObject.Type): Criteria = + Criteria(this.toDotPath()).type(*t) + +/** + * Creates a criterion using the $not meta operator which affects the clause directly following + * + * See [MongoDB Query operator: $not](https://docs.mongodb.com/manual/reference/operator/query/not/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.not + */ +fun KProperty<*>.not(): Criteria = + Criteria(this.toDotPath()).not() + +/** + * Creates a criterion using a $regex operator. + * + * See [MongoDB Query operator: $regex](https://docs.mongodb.com/manual/reference/operator/query/regex/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: String): Criteria = + Criteria(this.toDotPath()).regex(re, null) + +/** + * Creates a criterion using a $regex and $options operator. + * + * See [MongoDB Query operator: $regex](https://docs.mongodb.com/manual/reference/operator/query/regex/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +fun KProperty.regex(re: String, options: String?): Criteria = + Criteria(this.toDotPath()).regex(re, options) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: Regex): Criteria = + Criteria(this.toDotPath()).regex(re.toPattern()) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: Pattern): Criteria = + Criteria(this.toDotPath()).regex(re) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: BsonRegularExpression): Criteria = + Criteria(this.toDotPath()).regex(re) + +/** + * Creates a geospatial criterion using a $geoWithin $centerSphere operation. This is only available for + * Mongo 2.4 and higher. + * + * See [MongoDB Query operator: + * $geoWithin](https://docs.mongodb.com/manual/reference/operator/query/geoWithin/) + * + * See [MongoDB Query operator: + * $centerSphere](https://docs.mongodb.com/manual/reference/operator/query/centerSphere/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.withinSphere + */ +infix fun KProperty>.withinSphere(circle: Circle): Criteria = + Criteria(this.toDotPath()).withinSphere(circle) + +/** + * Creates a geospatial criterion using a $geoWithin operation. + * + * See [MongoDB Query operator: + * $geoWithin](https://docs.mongodb.com/manual/reference/operator/query/geoWithin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.within + */ +infix fun KProperty>.within(shape: Shape): Criteria = + Criteria(this.toDotPath()).within(shape) + +/** + * Creates a geospatial criterion using a $near operation. + * + * See [MongoDB Query operator: $near](https://docs.mongodb.com/manual/reference/operator/query/near/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.near + */ +infix fun KProperty>.near(point: Point): Criteria = + Criteria(this.toDotPath()).near(point) + +/** + * Creates a geospatial criterion using a $nearSphere operation. This is only available for Mongo 1.7 and + * higher. + * + * See [MongoDB Query operator: + * $nearSphere](https://docs.mongodb.com/manual/reference/operator/query/nearSphere/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nearSphere + */ +infix fun KProperty>.nearSphere(point: Point): Criteria = + Criteria(this.toDotPath()).nearSphere(point) + +/** + * Creates criterion using `$geoIntersects` operator which matches intersections of the given `geoJson` + * structure and the documents one. Requires MongoDB 2.4 or better. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.intersects + */ +infix fun KProperty>.intersects(geoJson: GeoJson<*>): Criteria = + Criteria(this.toDotPath()).intersects(geoJson) + +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.maxDistance + */ +infix fun KProperty>.maxDistance(d: Double): Criteria = + Criteria(this.toDotPath()).maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.minDistance + */ +infix fun KProperty>.minDistance(d: Double): Criteria = + Criteria(this.toDotPath()).minDistance(d) + +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Sangyong Choi + * @since 3.2.5 + * @see Criteria.maxDistance + */ +infix fun Criteria.maxDistance(d: Double): Criteria = + this.maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Sangyong Choi + * @since 3.2.5 + * @see Criteria.minDistance + */ +infix fun Criteria.minDistance(d: Double): Criteria = + this.minDistance(d) + +/** + * Creates a criterion using the $elemMatch operator + * + * See [MongoDB Query operator: + * $elemMatch](https://docs.mongodb.com/manual/reference/operator/query/elemMatch/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.elemMatch + */ +infix fun KProperty<*>.elemMatch(c: Criteria): Criteria = + Criteria(this.toDotPath()).elemMatch(c) + +/** + * Use [Criteria.BitwiseCriteriaOperators] as gateway to create a criterion using one of the + * [bitwise operators](https://docs.mongodb.com/manual/reference/operator/query-bitwise/) like + * `$bitsAllClear`. + * + * Example: + * ``` + * bits { allClear(123) } + * ``` + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.bits + */ +infix fun KProperty<*>.bits(bitwiseCriteria: Criteria.BitwiseCriteriaOperators.() -> Criteria) = + Criteria(this.toDotPath()).bits().let(bitwiseCriteria) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt new file mode 100644 index 0000000000..d132482f65 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt @@ -0,0 +1,228 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.springframework.data.mapping.toDotPath +import org.springframework.data.mongodb.core.query.Update.Position +import kotlin.reflect.KProperty + +/** + * Static factory method to create an Update using the provided key + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.update + */ +fun update(key: KProperty, value: T?) = + Update.update(key.toDotPath(), value) + +/** + * Update using the {@literal $set} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.set + */ +fun Update.set(key: KProperty, value: T?) = + set(key.toDotPath(), value) + +/** + * Update using the {@literal $setOnInsert} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.setOnInsert + */ +fun Update.setOnInsert(key: KProperty, value: T?) = + setOnInsert(key.toDotPath(), value) + +/** + * Update using the {@literal $unset} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.unset + */ +fun Update.unset(key: KProperty) = + unset(key.toDotPath()) + +/** + * Update using the {@literal $inc} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.inc + */ +fun Update.inc(key: KProperty, inc: Number) = + inc(key.toDotPath(), inc) + +fun Update.inc(key: KProperty) = + inc(key.toDotPath()) + +/** + * Update using the {@literal $push} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.push + */ +fun Update.push(key: KProperty>, value: T?) = + push(key.toDotPath(), value) + +/** + * Update using {@code $push} modifier.
                    + * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values as well as using + * {@code $position}. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.push + */ +fun Update.push(key: KProperty) = + push(key.toDotPath()) + +/** + * Update using {@code $addToSet} modifier.
                    + * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values * {@code $position}. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.addToSet + */ +fun Update.addToSet(key: KProperty) = + addToSet(key.toDotPath()) + +/** + * Update using the {@literal $addToSet} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.addToSet + */ +fun Update.addToSet(key: KProperty>, value: T?) = + addToSet(key.toDotPath(), value) + +/** + * Update using the {@literal $pop} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pop + */ +fun Update.pop(key: KProperty, pos: Position) = + pop(key.toDotPath(), pos) + +/** + * Update using the {@literal $pull} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pull + */ +fun Update.pull(key: KProperty, value: Any) = + pull(key.toDotPath(), value) + +/** + * Update using the {@literal $pullAll} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pullAll + */ +fun Update.pullAll(key: KProperty>, values: Array) = + pullAll(key.toDotPath(), values) + +/** + * Update given key to current date using {@literal $currentDate} modifier. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.currentDate + */ +fun Update.currentDate(key: KProperty) = + currentDate(key.toDotPath()) + +/** + * Update given key to current date using {@literal $currentDate : { $type : "timestamp" }} modifier. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.currentTimestamp + */ +fun Update.currentTimestamp(key: KProperty) = + currentTimestamp(key.toDotPath()) + +/** + * Multiply the value of given key by the given number. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.multiply + */ +fun Update.multiply(key: KProperty, multiplier: Number) = + multiply(key.toDotPath(), multiplier) + +/** + * Update given key to the {@code value} if the {@code value} is greater than the current value of the field. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.max + */ +fun Update.max(key: KProperty, value: T) = + max(key.toDotPath(), value) + +/** + * Update given key to the {@code value} if the {@code value} is less than the current value of the field. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.min + */ +fun Update.min(key: KProperty, value: T) = + min(key.toDotPath(), value) + +/** + * The operator supports bitwise {@code and}, bitwise {@code or}, and bitwise {@code xor} operations. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.bitwise + */ +fun Update.bitwise(key: KProperty) = + bitwise(key.toDotPath()) + +/** + * Filter elements in an array that match the given criteria for update. {@code expression} is used directly with the + * driver without further type or field mapping. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.filterArray + */ +fun Update.filterArray(identifier: KProperty, expression: Any) = + filterArray(identifier.toDotPath(), expression) + +/** + * Determine if a given {@code key} will be touched on execution. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.modifies + */ +fun Update.modifies(key: KProperty) = + modifies(key.toDotPath()) + diff --git a/spring-data-mongodb/src/main/resources/META-INF/services/javax.enterprise.inject.spi.Extension b/spring-data-mongodb/src/main/resources/META-INF/services/jakarta.enterprise.inject.spi.Extension similarity index 100% rename from spring-data-mongodb/src/main/resources/META-INF/services/javax.enterprise.inject.spi.Extension rename to spring-data-mongodb/src/main/resources/META-INF/services/jakarta.enterprise.inject.spi.Extension diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring.factories b/spring-data-mongodb/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..ab65395178 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/META-INF/spring.factories @@ -0,0 +1,2 @@ +org.springframework.data.web.config.SpringDataJacksonModules=org.springframework.data.mongodb.config.GeoJsonConfiguration +org.springframework.data.repository.core.support.RepositoryFactorySupport=org.springframework.data.mongodb.repository.support.MongoRepositoryFactory diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas index 26a5caba1c..57920f7449 100644 --- a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas +++ b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas @@ -6,4 +6,27 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/sprin http\://www.springframework.org/schema/data/mongo/spring-mongo-1.5.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-1.7.xsd=org/springframework/data/mongodb/config/spring-mongo-1.7.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-1.8.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd -http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-4.0.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.5.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.7.xsd=org/springframework/data/mongodb/config/spring-mongo-1.7.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.8.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-4.0.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories b/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..0130a33d7c --- /dev/null +++ b/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1,6 @@ +org.springframework.aot.hint.RuntimeHintsRegistrar=\ + org.springframework.data.mongodb.aot.MongoRuntimeHints,\ + org.springframework.data.mongodb.repository.aot.RepositoryRuntimeHints + +org.springframework.beans.factory.aot.BeanRegistrationAotProcessor=\ + org.springframework.data.mongodb.aot.MongoManagedTypesBeanRegistrationAotProcessor diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd index 8a6693dbea..a0d95f2425 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository-1.0.xsd" /> @@ -318,8 +318,8 @@ The Mongo driver options - - @@ -348,14 +348,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -363,22 +363,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -395,18 +395,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -430,14 +430,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -466,4 +466,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd index c8c1d7c1bc..dd7287dab3 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -316,8 +316,8 @@ The Mongo driver options - - @@ -346,14 +346,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -361,22 +361,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -393,18 +393,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -428,14 +428,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -464,4 +464,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd new file mode 100644 index 0000000000..6c0fcc67c3 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd @@ -0,0 +1,901 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd new file mode 100644 index 0000000000..dde5cd0a91 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd @@ -0,0 +1,894 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd index 9d23805609..cf995f23cd 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -230,7 +230,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -331,8 +331,8 @@ The Mongo driver options - - @@ -361,14 +361,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -376,22 +376,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -408,18 +408,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -443,14 +443,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -479,4 +479,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd index a3f3e68ade..c1b6b1df34 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -234,7 +234,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -346,8 +346,8 @@ The Mongo driver options - - @@ -376,14 +376,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -391,22 +391,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -423,18 +423,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -458,14 +458,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -557,7 +557,7 @@ The reference to a Mongoconverter instance. - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd index c036f6b506..78f1dabe14 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -241,7 +241,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -300,7 +300,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -364,8 +364,8 @@ The Mongo driver options - - @@ -394,14 +394,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -409,22 +409,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -441,18 +441,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -476,14 +476,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd index 68524f8f8f..0b67ea4db8 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -248,7 +248,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -318,7 +318,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -382,8 +382,8 @@ The Mongo driver options - - @@ -412,14 +412,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -427,22 +427,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -459,18 +459,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -494,14 +494,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -27,7 +27,7 @@ Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used f - + - + @@ -261,7 +261,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -331,7 +331,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -354,7 +354,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -384,7 +384,7 @@ The name of the Mongo object that determines what server to monitor. (by default @@ -410,8 +410,8 @@ The Mongo driver options - - @@ -440,19 +440,19 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -460,22 +460,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -492,18 +492,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -527,14 +527,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + @@ -606,11 +606,11 @@ The comma delimited list of username:password@database entries to use for authen - + @@ -626,11 +626,11 @@ The MongoClient description. The minimum number of connections per host. ]]> - + @@ -638,36 +638,36 @@ The number of connections allowed per host. Will block if run out. Default is @@ -684,7 +684,7 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + - + - + @@ -732,7 +732,7 @@ The connect timeout for connections used for the cluster heartbeat. The socket timeout for connections used for the cluster heartbeat. ]]> - + - + @@ -838,7 +838,7 @@ The reference to a Mongoconverter instance. - + + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -27,7 +27,7 @@ Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used f - + - - + + @@ -170,8 +170,8 @@ The MongoClientURI string.]]> + Top-level element that contains one or more custom converters to be used for mapping + domain objects to and from Mongo's DBObject]]> @@ -267,7 +267,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -337,7 +337,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -360,7 +360,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -389,10 +389,10 @@ The name of the Mongo object that determines what server to monitor. (by default --> - - + + @@ -416,8 +416,8 @@ The Mongo driver options - - @@ -446,19 +446,19 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + - - + + @@ -466,22 +466,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -498,18 +498,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -533,14 +533,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + - - + + @@ -612,13 +612,13 @@ The comma delimited list of username:password@database entries to use for authen - + - - - + + + - + @@ -644,36 +644,36 @@ The number of connections allowed per host. Will block if run out. Default is @@ -690,7 +690,7 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + - + - + @@ -738,7 +738,7 @@ The connect timeout for connections used for the cluster heartbeat. The socket timeout for connections used for the cluster heartbeat. ]]> - + - + @@ -765,8 +765,8 @@ The SSLSocketFactory to use for the SSL connection. If none is configured here, + Element defining a custom converter. + ]]> @@ -844,7 +844,7 @@ The reference to a Mongoconverter instance. - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd new file mode 100644 index 0000000000..8364fe70ea --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd @@ -0,0 +1,708 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd new file mode 100644 index 0000000000..b482282b40 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd @@ -0,0 +1,867 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd new file mode 100644 index 0000000000..3b6d0e9db1 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd @@ -0,0 +1,895 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd new file mode 100644 index 0000000000..1bdaa38450 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd @@ -0,0 +1,907 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java index 4a77bc00e3..ce458132d9 100644 --- a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java +++ b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java @@ -1,11 +1,12 @@ + /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,36 +14,36 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +import java.util.Collections; +import java.util.Set; + import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; /** * Sample configuration class in default package. - * - * @see DATAMONGO-877 + * * @author Oliver Gierke */ @Configuration -public class ConfigClassInDefaultPackage extends AbstractMongoConfiguration { +public class ConfigClassInDefaultPackage extends MongoClientClosingTestConfiguration { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#getDatabaseName() - */ @Override protected String getDatabaseName() { return "default"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#mongo() - */ @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return MongoClients.create(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } diff --git a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java index f9b9a78cab..7fa6c358f3 100644 --- a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java +++ b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import org.junit.Test; + +import org.junit.jupiter.api.Test; + import org.springframework.context.annotation.AnnotationConfigApplicationContext; /** * Unit test for {@link ConfigClassInDefaultPackage}. - * - * @see DATAMONGO-877 + * * @author Oliver Gierke */ public class ConfigClassInDefaultPackageUnitTests { - /** - * @see DATAMONGO-877 - */ - @Test + @Test // DATAMONGO-877 public void loadsConfigClassFromDefaultPackage() { new AnnotationConfigApplicationContext(ConfigClassInDefaultPackage.class).close(); } diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java b/spring-data-mongodb/src/test/java/example/first/First.java similarity index 65% rename from spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java rename to spring-data-mongodb/src/test/java/example/first/First.java index db044c4a34..04e50fa206 100644 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java +++ b/spring-data-mongodb/src/test/java/example/first/First.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,10 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.crossstore; +package example.first; -import org.springframework.data.crossstore.ChangeSetBacked; +import org.springframework.data.mongodb.core.mapping.Document; -public interface DocumentBacked extends ChangeSetBacked { +/** + * @author Oliver Gierke + */ +@Document +public class First { } diff --git a/spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderUnitTests.java b/spring-data-mongodb/src/test/java/example/second/Second.java similarity index 58% rename from spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderUnitTests.java rename to spring-data-mongodb/src/test/java/example/second/Second.java index 2be86e958d..446501de87 100644 --- a/spring-data-mongodb-log4j/src/test/java/org/springframework/data/mongodb/log4j/MongoLog4jAppenderUnitTests.java +++ b/spring-data-mongodb/src/test/java/example/second/Second.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,22 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.log4j; +package example.second; -import org.junit.Test; +import org.springframework.data.mongodb.core.mapping.Document; /** - * Unit tests for {@link MongoLog4jAppender}. - * * @author Oliver Gierke */ -public class MongoLog4jAppenderUnitTests { +@Document +public class Second { - /** - * @see DATAMONGO-641 - */ - @Test - public void closesWithoutMongoInstancePresent() { - new MongoLog4jAppender().close(); - } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java new file mode 100644 index 0000000000..0448ad936c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.assertj.core.api.Assertions; +import org.assertj.core.api.ListAssert; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; + +/** + * @author Christoph Strobl + */ +public class CapturingTransactionOptionsResolver implements MongoTransactionOptionsResolver { + + private final MongoTransactionOptionsResolver delegateResolver; + private final List capturedOptions = new ArrayList<>(10); + + public CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver delegateResolver) { + this.delegateResolver = delegateResolver; + } + + @Nullable + @Override + public String getLabelPrefix() { + return delegateResolver.getLabelPrefix(); + } + + @Override + public MongoTransactionOptions convert(Map source) { + + MongoTransactionOptions options = delegateResolver.convert(source); + capturedOptions.add(options); + return options; + } + + public void clear() { + capturedOptions.clear(); + } + + public List getCapturedOptions() { + return capturedOptions; + } + + public MongoTransactionOptions getLastCapturedOption() { + return CollectionUtils.lastElement(capturedOptions); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java new file mode 100644 index 0000000000..2724fb1605 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java @@ -0,0 +1,134 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Unit tests for {@link DefaultMongoTransactionOptionsResolver}. + * + * @author Yan Kardziyaka + * @author Christoph Strobl + */ +class DefaultMongoTransactionOptionsResolverUnitTests { + + @ParameterizedTest + @ValueSource(strings = { "mongo:maxCommitTime=-PT5S", "mongo:readConcern=invalidValue", + "mongo:readPreference=invalidValue", "mongo:writeConcern=invalidValue", "mongo:invalidPreference=jedi", + "mongo:readConcern", "mongo:readConcern:local", "mongo:readConcern=" }) + void shouldThrowExceptionOnInvalidAttribute(String label) { + + TransactionAttribute attribute = transactionAttribute(label); + + assertThatThrownBy(() -> DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Test // GH-1628 + public void shouldReturnEmptyOptionsIfNotTransactionAttribute() { + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(definition)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldReturnEmptyOptionsIfNoLabelsProvided() { + + TransactionAttribute attribute = new DefaultTransactionAttribute(); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldIgnoreNonMongoOptions() { + + TransactionAttribute attribute = transactionAttribute("jpa:ignore"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainMaxCommitTime() { + + TransactionAttribute attribute = transactionAttribute("mongo:maxCommitTime=PT5S"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(5L, from(options -> options.getMaxCommitTime().toSeconds())) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnReadConcernWhenPresent() { + + TransactionAttribute attribute = transactionAttribute("mongo:readConcern=majority"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(ReadConcern.MAJORITY, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainReadPreference() { + + TransactionAttribute attribute = transactionAttribute("mongo:readPreference=primaryPreferred"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(ReadPreference.primaryPreferred(), from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainWriteConcern() { + + TransactionAttribute attribute = transactionAttribute("mongo:writeConcern=w3"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(WriteConcern.W3, from(MongoTransactionOptions::getWriteConcern)); + + } + + private static TransactionAttribute transactionAttribute(String... labels) { + + DefaultTransactionAttribute attribute = new DefaultTransactionAttribute(); + attribute.setLabels(Set.of(labels)); + return attribute; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java new file mode 100644 index 0000000000..adcf9eb293 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static de.schauderhaft.degraph.check.JCheck.*; +import static org.hamcrest.MatcherAssert.*; + +import de.schauderhaft.degraph.configuration.NamedPattern; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +/** + * Tests package dependency constraints. + * + * @author Jens Schauder + * @author Oliver Gierke + */ +@Disabled("Needs to be tansitioned to ArchUnit") +class DependencyTests { + + @Test + void noInternalPackageCycles() { + + assertThat(classpath() // + .noJars() // + .including("org.springframework.data.mongodb.**") // + .filterClasspath("*target/classes") // + .printOnFailure("degraph.graphml"), // + violationFree() // + ); + } + + @Test + void onlyConfigMayUseRepository() { + + assertThat(classpath() // + .including("org.springframework.data.**") // + .filterClasspath("*target/classes") // + .printOnFailure("onlyConfigMayUseRepository.graphml") // + .withSlicing("slices", // + "**.(config).**", // + new NamedPattern("**.cdi.**", "config"), // + "**.(repository).**", // + new NamedPattern("**", "other")) + .allow("config", "repository", "other"), // + violationFree() // + ); + } + + @Test + void commonsInternaly() { + + assertThat(classpath() // + .noJars() // + .including("org.springframework.data.**") // + .excluding("org.springframework.data.mongodb.**") // + .filterClasspath("*target/classes") // + .printTo("commons.graphml"), // + violationFree() // + ); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java new file mode 100644 index 0000000000..db1ab68269 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java @@ -0,0 +1,310 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import jakarta.transaction.Status; +import jakarta.transaction.UserTransaction; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.jta.JtaTransactionManager; +import org.springframework.transaction.support.TransactionCallbackWithoutResult; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class MongoDatabaseUtilsUnitTests { + + @Mock ClientSession session; + @Mock ServerSession serverSession; + @Mock MongoDatabaseFactory dbFactory; + @Mock MongoDatabase db; + + @Mock UserTransaction userTransaction; + + @AfterEach + void verifyTransactionSynchronizationManagerState() { + + assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue(); + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse(); + assertThat(TransactionSynchronizationManager.getCurrentTransactionName()).isNull(); + assertThat(TransactionSynchronizationManager.isCurrentTransactionReadOnly()).isFalse(); + assertThat(TransactionSynchronizationManager.getCurrentTransactionIsolationLevel()).isNull(); + assertThat(TransactionSynchronizationManager.isActualTransactionActive()).isFalse(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldDetectTxViaFactory() { + + when(dbFactory.isTransactionActive()).thenReturn(true); + + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isTrue(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldReturnFalseIfNoTxActive() { + + when(dbFactory.isTransactionActive()).thenReturn(false); + + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isFalse(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(dbFactory.isTransactionActive()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isTrue(); + } + }); + } + + @Test // DATAMONGO-1920 + void shouldNotStartSessionWhenNoTransactionOngoing() { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + verify(dbFactory, never()).getSession(any()); + verify(dbFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() throws Exception { + + when(dbFactory.getMongoDatabase()).thenReturn(db); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.NEVER); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + } + }); + + verify(userTransaction).getStatus(); + verifyNoMoreInteractions(userTransaction); + verifyNoInteractions(session); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + } + }); + + verify(userTransaction).begin(); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsAny() throws Exception { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(userTransaction).rollback(); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldNotParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsNative() throws Exception { + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(userTransaction).rollback(); + + verify(session, never()).startTransaction(); + verify(session, never()).abortTransaction(); + verify(session, never()).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingMongoTransactionWhenSessionSynchronizationIsAny() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java new file mode 100644 index 0000000000..db1993e63d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.UnexpectedRollbackException; +import org.springframework.transaction.support.DefaultTransactionDefinition; +import org.springframework.transaction.support.TransactionCallbackWithoutResult; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; +import com.mongodb.session.ServerSession; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoTransactionManagerUnitTests { + + @Mock ClientSession session; + @Mock ClientSession session2; + @Mock ServerSession serverSession; + @Mock MongoDatabaseFactory dbFactory; + @Mock MongoDatabaseFactory dbFactory2; + @Mock MongoDatabase db; + @Mock MongoDatabase db2; + + @BeforeEach + void setUp() { + + when(dbFactory.getSession(any())).thenReturn(session, session2); + when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbFactory2.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + } + + @AfterEach + void verifyTransactionSynchronizationManager() { + + assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue(); + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse(); + } + + @Test // DATAMONGO-1920 + void triggerCommitCorrectly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void participateInOnGoingTransactionWithCommit() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + verify(dbFactory, times(2)).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void participateInOnGoingTransactionWithRollbackOnly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + + status.setRollbackOnly(); + } + }); + + verify(dbFactory, times(2)).withSession(eq(session)); + + assertThatExceptionOfType(UnexpectedRollbackException.class).isThrownBy(() -> txManager.commit(txStatus)); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void triggerRollbackCorrectly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.rollback(txStatus); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void suspendTransactionWhilePropagationNotSupported() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_NOT_SUPPORTED); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + template.execute(MongoDatabase::listCollections); + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session2, never()).startTransaction(); + + verify(dbFactory, times(2)).withSession(eq(session)); + verify(dbFactory, never()).withSession(eq(session2)); + + verify(db, times(2)).drop(); + verify(db).listCollections(); + + verify(session).close(); + verify(session2, never()).close(); + } + + @Test // DATAMONGO-1920 + void suspendTransactionWhilePropagationRequiresNew() { + + when(dbFactory.withSession(session2)).thenReturn(dbFactory2); + when(dbFactory2.getMongoDatabase()).thenReturn(db2); + when(session2.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + template.execute(MongoDatabase::listCollections); + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session2).startTransaction(); + + verify(dbFactory, times(2)).withSession(eq(session)); + verify(dbFactory).withSession(eq(session2)); + + verify(db).drop(); + verify(db2).drop(); + verify(db).listCollections(); + + verify(session).close(); + verify(session2).close(); + } + + @Test // DATAMONGO-1920 + void readonlyShouldInitiateASessionStartAndCommitTransaction() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + + TransactionStatus txStatus = txManager.getTransaction(readonlyTxDefinition); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void readonlyShouldInitiateASessionStartAndRollbackTransaction() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + + TransactionStatus txStatus = txManager.getTransaction(readonlyTxDefinition); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.rollback(txStatus); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java new file mode 100644 index 0000000000..44692348a0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java @@ -0,0 +1,118 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; + +/** + * Unit tests for {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + */ +class MongoTransactionOptionsUnitTests { + + private static final TransactionOptions NATIVE_OPTIONS = TransactionOptions.builder() // + .maxCommitTime(1L, TimeUnit.SECONDS) // + .readConcern(ReadConcern.SNAPSHOT) // + .readPreference(ReadPreference.secondaryPreferred()) // + .writeConcern(WriteConcern.W3) // + .build(); + + @Test // GH-1628 + void wrapsNativeDriverTransactionOptions() { + + assertThat(MongoTransactionOptions.of(NATIVE_OPTIONS)) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(NATIVE_OPTIONS.getWriteConcern(), MongoTransactionOptions::getWriteConcern) + .returns(NATIVE_OPTIONS, MongoTransactionOptions::toDriverOptions); + } + + @Test // GH-1628 + void mergeNoneWithDefaultsUsesDefaults() { + + assertThat(MongoTransactionOptions.NONE.mergeWith(MongoTransactionOptions.of(NATIVE_OPTIONS))) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(NATIVE_OPTIONS.getWriteConcern(), MongoTransactionOptions::getWriteConcern) + .returns(NATIVE_OPTIONS, MongoTransactionOptions::toDriverOptions); + } + + @Test // GH-1628 + void mergeExistingOptionsWithNoneUsesOptions() { + + MongoTransactionOptions source = MongoTransactionOptions.of(NATIVE_OPTIONS); + assertThat(source.mergeWith(MongoTransactionOptions.NONE)).isSameAs(source); + } + + @Test // GH-1628 + void mergeExistingOptionsWithUsesFirstNonNullValue() { + + MongoTransactionOptions source = MongoTransactionOptions + .of(TransactionOptions.builder().writeConcern(WriteConcern.UNACKNOWLEDGED).build()); + + assertThat(source.mergeWith(MongoTransactionOptions.of(NATIVE_OPTIONS))) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(source.getWriteConcern(), MongoTransactionOptions::getWriteConcern); + } + + @Test // GH-1628 + void testEquals() { + + assertThat(MongoTransactionOptions.NONE) // + .isSameAs(MongoTransactionOptions.NONE) // + .isNotEqualTo(new MongoTransactionOptions() { + @Nullable + @Override + public Duration getMaxCommitTime() { + return null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return null; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return null; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return null; + } + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java new file mode 100644 index 0000000000..64331704c6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * Unit tests for {@link ReactiveMongoDatabaseUtils}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class ReactiveMongoDatabaseUtilsUnitTests { + + @Mock ClientSession session; + @Mock ServerSession serverSession; + @Mock ReactiveMongoDatabaseFactory databaseFactory; + @Mock MongoDatabase db; + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldDetectTxViaFactory() { + + when(databaseFactory.isTransactionActive()).thenReturn(true); + + ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory) // + .as(StepVerifier::create) // + .expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldReturnFalseIfNoTxActive() { + + when(databaseFactory.isTransactionActive()).thenReturn(false); + + ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory) // + .as(StepVerifier::create) // + .expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() { + + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session)); + when(databaseFactory.isTransactionActive()).thenReturn(false); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory); + }).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.NEVER) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // DATAMONGO-2265 + void shouldNotStartSessionWhenNoTransactionOngoing() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // DATAMONGO-2265 + void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() { + + when(session.getServerSession()).thenReturn(serverSession); + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session)); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + when(session.abortTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return TransactionSynchronizationManager.forCurrentTransaction().doOnNext(synchronizationManager -> { + + assertThat(synchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(tx.isNewTransaction()).isTrue(); + + assertThat(synchronizationManager.hasResource(databaseFactory)).isTrue(); + + }).then(Mono.fromRunnable(tx::setRollbackOnly)); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java new file mode 100644 index 0000000000..9dbb2d550d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java @@ -0,0 +1,252 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * Unit tests for {@link ReactiveMongoTransactionManager}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class ReactiveMongoTransactionManagerUnitTests { + + @Mock ClientSession session; + @Mock ClientSession session2; + @Mock ServerSession serverSession; + @Mock ReactiveMongoDatabaseFactory databaseFactory; + @Mock ReactiveMongoDatabaseFactory databaseFactory2; + @Mock MongoDatabase db; + @Mock MongoDatabase db2; + + @BeforeEach + void setUp() { + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session), Mono.just(session2)); + when(databaseFactory.withSession(session)).thenReturn(databaseFactory); + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(session.getServerSession()).thenReturn(serverSession); + } + + @Test // DATAMONGO-2265 + void triggerCommitCorrectly() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void participateInOnGoingTransactionWithCommit() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + }).as(StepVerifier::create).verifyComplete(); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void participateInOnGoingTransactionWithRollbackOnly() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.abortTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return template.execute(db -> { + db.drop(); + tx.setRollbackOnly(); + return Mono.empty(); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void suspendTransactionWhilePropagationNotSupported() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator outer = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_NOT_SUPPORTED); + TransactionalOperator inner = TransactionalOperator.create(txManager, definition); + + outer.execute(tx1 -> { + + return template.execute(db -> { + + db.drop(); + + return inner.execute(tx2 -> { + return template.execute(db2 -> { + db2.drop(); + return Mono.empty(); + }); + }); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session2, never()).startTransaction(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + verify(databaseFactory, never()).withSession(eq(session2)); + + verify(db, times(2)).drop(); + + verify(session2, never()).close(); + } + + @Test // DATAMONGO-2265 + void suspendTransactionWhilePropagationRequiresNew() { + + when(databaseFactory.withSession(session2)).thenReturn(databaseFactory2); + when(databaseFactory2.getMongoDatabase()).thenReturn(Mono.just(db2)); + when(session2.getServerSession()).thenReturn(serverSession); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + when(session2.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator outer = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + TransactionalOperator inner = TransactionalOperator.create(txManager, definition); + + outer.execute(tx1 -> { + + return template.execute(db -> { + + db.drop(); + + return inner.execute(tx2 -> { + return template.execute(db2 -> { + db2.drop(); + return Mono.empty(); + }); + }); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session2).startTransaction(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + verify(databaseFactory).withSession(eq(session2)); + + verify(db).drop(); + verify(db2).drop(); + + verify(session).close(); + verify(session2).close(); + } + + @Test // DATAMONGO-2265 + void readonlyShouldInitiateASessionStartAndCommitTransaction() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + TransactionalOperator operator = TransactionalOperator.create(txManager, readonlyTxDefinition); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java new file mode 100644 index 0000000000..a6135939de --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java @@ -0,0 +1,613 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static java.util.UUID.*; +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + +import org.bson.types.ObjectId; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIfSystemProperty; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for reactive transaction management. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Yan Kardziyaka + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@EnableIfReplicaSetAvailable +@DisabledIfSystemProperty(named = "user.name", matches = "jenkins") +@SetSystemProperty(key = "tx.read.concern", value = "local") +public class ReactiveTransactionIntegrationTests { + + private static final String DATABASE = "rxtx-test"; + + static @Client MongoClient mongoClient; + static GenericApplicationContext context; + + PersonService personService; + ReactiveMongoOperations operations; + ReactiveTransactionOptionsTestService transactionOptionsTestService; + CapturingTransactionOptionsResolver transactionOptionsResolver; + + @BeforeAll + public static void init() { + context = new AnnotationConfigApplicationContext(TestMongoConfig.class, PersonService.class); + } + + @AfterAll + public static void after() { + context.close(); + } + + @BeforeEach + public void setUp() { + + personService = context.getBean(PersonService.class); + operations = context.getBean(ReactiveMongoOperations.class); + transactionOptionsTestService = context.getBean(ReactiveTransactionOptionsTestService.class); + transactionOptionsResolver = context.getBean(CapturingTransactionOptionsResolver.class); + transactionOptionsResolver.clear(); // clean out left overs from dirty context + + try (MongoClient client = MongoTestUtils.reactiveClient()) { + + Flux.merge( // + MongoTestUtils.createOrReplaceCollection(DATABASE, operations.getCollectionName(Person.class), client), + MongoTestUtils.createOrReplaceCollection(DATABASE, operations.getCollectionName(EventLog.class), client) // + ).then().as(StepVerifier::create).thenAwait(Duration.ofMillis(100)).verifyComplete(); + } + } + + @Test // DATAMONGO-2265 + public void shouldRollbackAfterException() { + + personService.savePersonErrors(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .verifyError(RuntimeException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void shouldRollbackAfterExceptionOfTxAnnotatedMethod() { + + personService.declarativeSavePersonErrors(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .verifyError(RuntimeException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntries() { + + personService.savePerson(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .thenAwait(Duration.ofMillis(100)) + .expectNextCount(1) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntriesOfTxAnnotatedMethod() { + + personService.declarativeSavePerson(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntriesAcrossCollections() { + + personService.saveWithLogs(new Person(null, "Walter", "White")) // + .then() // + .as(StepVerifier::create) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + operations.count(new Query(), EventLog.class) // + .as(StepVerifier::create) // + .expectNext(4L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void rollbackShouldAbortAcrossCollections() { + + personService.saveWithErrorLogs(new Person(null, "Walter", "White")) // + .then() // + .as(StepVerifier::create) // + .verifyError(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + + operations.count(new Query(), EventLog.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void countShouldWorkInsideTransaction() { + + personService.countDuringTx(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void emitMultipleElementsDuringTransaction() { + + personService.saveWithLogs(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNextCount(4L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void errorAfterTxShouldNotAffectPreviousStep() { + + personService.savePerson(new Person(null, "Walter", "White")) // + .delayElement(Duration.ofMillis(10)) // + .then(Mono.error(new RuntimeException("my big bad evil error"))).as(StepVerifier::create) // + .expectError() // + .verify(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidMaxCommitTime() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.saveWithInvalidMaxCommitTime(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldCommitOnTransactionWithinMaxCommitTime() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.saveWithinMaxCommitTime(person) // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(Duration.ofMinutes(1), + MongoTransactionOptions::getMaxCommitTime); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowInvalidDataAccessApiUsageExceptionOnTransactionWithAvailableReadConcern() { + transactionOptionsTestService.availableReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(InvalidDataAccessApiUsageException.class); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidReadConcern() { + transactionOptionsTestService.invalidReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + } + + @Test // GH-1628 + public void shouldReadTransactionOptionFromSystemProperty() { + + transactionOptionsTestService.environmentReadConcernFind(randomUUID().toString()).then().as(StepVerifier::create) + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns( + new ReadConcern(ReadConcernLevel.fromString(System.getProperty("tx.read.concern"))), + MongoTransactionOptions::getReadConcern); + } + + @Test // GH-1628 + public void shouldNotThrowOnTransactionWithMajorityReadConcern() { + transactionOptionsTestService.majorityReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .expectNextCount(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowUncategorizedMongoDbExceptionOnTransactionWithPrimaryPreferredReadPreference() { + transactionOptionsTestService.findFromPrimaryPreferredReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(UncategorizedMongoDbException.class); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidReadPreference() { + transactionOptionsTestService.findFromInvalidReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + } + + @Test // GH-1628 + public void shouldNotThrowOnTransactionWithPrimaryReadPreference() { + transactionOptionsTestService.findFromPrimaryReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .expectNextCount(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithUnacknowledgedWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.unacknowledgedWriteConcernSave(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create).expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.invalidWriteConcernSave(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldCommitOnTransactionWithAcknowledgedWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.acknowledgedWriteConcernSave(person) // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(WriteConcern.ACKNOWLEDGED, + MongoTransactionOptions::getWriteConcern); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Configuration + @EnableTransactionManagement + static class TestMongoConfig extends AbstractReactiveMongoConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DATABASE; + } + + @Bean + CapturingTransactionOptionsResolver txOptionsResolver() { + return new CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver.defaultResolver()); + } + + @Bean + public ReactiveMongoTransactionManager txManager(ReactiveMongoDatabaseFactory factory, + MongoTransactionOptionsResolver txOptionsResolver) { + return new ReactiveMongoTransactionManager(factory, txOptionsResolver, MongoTransactionOptions.NONE); + } + + @Bean + public ReactiveTransactionOptionsTestService transactionOptionsTestService( + ReactiveMongoOperations operations) { + return new ReactiveTransactionOptionsTestService<>(operations, Person.class); + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + } + + static class PersonService { + + final ReactiveMongoOperations operations; + final ReactiveMongoTransactionManager manager; + + PersonService(ReactiveMongoOperations operations, ReactiveMongoTransactionManager manager) { + + this.operations = operations; + this.manager = manager; + } + + public Mono savePersonErrors(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))) // + .as(transactionalOperator::transactional); + } + + public Mono savePerson(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + .flatMap(Mono::just) // + .as(transactionalOperator::transactional); + } + + public Mono countDuringTx(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + .then(operations.count(new Query(), Person.class)) // + .as(transactionalOperator::transactional); + } + + public Flux saveWithLogs(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return Flux.concat(operations.save(new EventLog(new ObjectId(), "beforeConvert")), // + operations.save(new EventLog(new ObjectId(), "afterConvert")), // + operations.save(new EventLog(new ObjectId(), "beforeInsert")), // + operations.save(person), // + operations.save(new EventLog(new ObjectId(), "afterInsert"))) // + .thenMany(operations.query(EventLog.class).all()) // + .as(transactionalOperator::transactional); + } + + public Flux saveWithErrorLogs(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return Flux.concat(operations.save(new EventLog(new ObjectId(), "beforeConvert")), // + operations.save(new EventLog(new ObjectId(), "afterConvert")), // + operations.save(new EventLog(new ObjectId(), "beforeInsert")), // + operations.save(person), // + operations.save(new EventLog(new ObjectId(), "afterInsert"))) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))) // + .as(transactionalOperator::transactional); + } + + @Transactional(transactionManager = "txManager") + public Flux declarativeSavePerson(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return transactionalOperator.execute(reactiveTransaction -> { + return operations.save(person); + }); + } + + @Transactional(transactionManager = "txManager") + public Flux declarativeSavePersonErrors(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return transactionalOperator.execute(reactiveTransaction -> { + + return operations.save(person) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))); + }); + } + } + + @Document("person-rx") + static class Person { + + ObjectId id; + String firstname, lastname; + + Person(ObjectId id, String firstname, String lastname) { + this.id = id; + this.firstname = firstname; + this.lastname = lastname; + } + + public ObjectId getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname); + } + + public String toString() { + return "ReactiveTransactionIntegrationTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } + + static class EventLog { + + ObjectId id; + String action; + + public EventLog(ObjectId id, String action) { + this.id = id; + this.action = action; + } + + public ObjectId getId() { + return this.id; + } + + public String getAction() { + return this.action; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public void setAction(String action) { + this.action = action; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventLog eventLog = (EventLog) o; + return Objects.equals(id, eventLog.id) && Objects.equals(action, eventLog.action); + } + + @Override + public int hashCode() { + return Objects.hash(id, action); + } + + public String toString() { + return "ReactiveTransactionIntegrationTests.EventLog(id=" + this.getId() + ", action=" + this.getAction() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java new file mode 100644 index 0000000000..98280b287a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java @@ -0,0 +1,101 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import java.util.function.Function; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.transaction.annotation.Transactional; + +/** + * Helper class for integration tests of {@link Transactional#label()} MongoDb options in reactive context. + * + * @param root document type + * @author Yan Kardziyaka + * @see org.springframework.data.mongodb.core.TransactionOptionsTestService + */ +public class ReactiveTransactionOptionsTestService { + private final Function> findByIdFunction; + + private final Function> saveFunction; + + public ReactiveTransactionOptionsTestService(ReactiveMongoOperations operations, Class entityClass) { + this.findByIdFunction = id -> operations.findById(id, entityClass); + this.saveFunction = operations::save; + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=-PT6H3M" }) + public Mono saveWithInvalidMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=PT1M" }) + public Mono saveWithinMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=available" }) + public Mono availableReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=invalid" }) + public Mono invalidReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=${tx.read.concern}" }) + public Mono environmentReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=majority" }) + public Mono majorityReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primaryPreferred" }) + public Mono findFromPrimaryPreferredReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=invalid" }) + public Mono findFromInvalidReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primary" }) + public Mono findFromPrimaryReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=unacknowledged" }) + public Mono unacknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=invalid" }) + public Mono invalidWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=acknowledged" }) + public Mono acknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java new file mode 100644 index 0000000000..0027fd89a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java @@ -0,0 +1,184 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor.MethodCache; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Unit tests for {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +public class SessionAwareMethodInterceptorUnitTests { + + @Mock ClientSession session; + @Mock MongoCollection targetCollection; + @Mock MongoDatabase targetDatabase; + + MongoCollection collection; + MongoDatabase database; + + @BeforeEach + public void setUp() { + + collection = createProxyInstance(session, targetCollection, MongoCollection.class); + database = createProxyInstance(session, targetDatabase, MongoDatabase.class); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnCollectionDelegatesToMethodWithSession() { + + collection.find(); + + verify(targetCollection).find(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnCollectionWithSessionInArgumentListProceedsWithExecution() { + + ClientSession yetAnotherSession = mock(ClientSession.class); + collection.find(yetAnotherSession); + + verify(targetCollection).find(eq(yetAnotherSession)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnDatabaseDelegatesToMethodWithSession() { + + database.drop(); + + verify(targetDatabase).drop(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnDatabaseWithSessionInArgumentListProceedsWithExecution() { + + ClientSession yetAnotherSession = mock(ClientSession.class); + database.drop(yetAnotherSession); + + verify(targetDatabase).drop(eq(yetAnotherSession)); + } + + @Test // DATAMONGO-1880 + public void justMoveOnIfNoOverloadWithSessionAvailable() { + + collection.getReadPreference(); + + verify(targetCollection).getReadPreference(); + } + + @Test // DATAMONGO-1880 + public void usesCacheForMethodLookup() { + + MethodCache cache = (MethodCache) ReflectionTestUtils.getField(SessionAwareMethodInterceptor.class, "METHOD_CACHE"); + Method countMethod = ClassUtils.getMethod(MongoCollection.class, "countDocuments"); + + assertThat(cache.contains(countMethod, MongoCollection.class)).isFalse(); + + collection.countDocuments(); + + assertThat(cache.contains(countMethod, MongoCollection.class)).isTrue(); + } + + @Test // DATAMONGO-1880 + public void cachesNullForMethodsThatDoNotHaveASessionOverload() { + + MethodCache cache = (MethodCache) ReflectionTestUtils.getField(SessionAwareMethodInterceptor.class, "METHOD_CACHE"); + Method readConcernMethod = ClassUtils.getMethod(MongoCollection.class, "getReadConcern"); + + assertThat(cache.contains(readConcernMethod, MongoCollection.class)).isFalse(); + + collection.getReadConcern(); + + collection.getReadConcern(); + + assertThat(cache.contains(readConcernMethod, MongoCollection.class)).isTrue(); + assertThat(cache.lookup(readConcernMethod, MongoCollection.class, ClientSession.class)).isEmpty(); + } + + @Test // DATAMONGO-1880 + public void proxiesNewDbInstanceReturnedByMethod() { + + MongoDatabase otherDb = mock(MongoDatabase.class); + when(targetDatabase.withCodecRegistry(any())).thenReturn(otherDb); + + MongoDatabase target = database.withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry()); + assertThat(target).isInstanceOf(Proxy.class).isNotSameAs(database).isNotSameAs(targetDatabase); + + target.drop(); + + verify(otherDb).drop(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxiesNewCollectionInstanceReturnedByMethod() { + + MongoCollection otherCollection = mock(MongoCollection.class); + when(targetCollection.withCodecRegistry(any())).thenReturn(otherCollection); + + MongoCollection target = collection.withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry()); + assertThat(target).isInstanceOf(Proxy.class).isNotSameAs(collection).isNotSameAs(targetCollection); + + target.drop(); + + verify(otherCollection).drop(eq(session)); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy()); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java new file mode 100644 index 0000000000..09b8a428fa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +class SpringDataMongoDBTests { + + @Test // DATAMONGO-2427 + void driverInformationHoldsSpringDataHint() { + assertThat(SpringDataMongoDB.driverInformation().getDriverNames()).contains("spring-data"); + } + + @Test // DATAMONGO-2427 + void versionIsDetectedFromPackage() { + assertThat(SpringDataMongoDB.version()).isNotNull(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java new file mode 100644 index 0000000000..c900e20a3b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.aot.generate.ClassNameGenerator; +import org.springframework.aot.generate.DefaultGenerationContext; +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.generate.InMemoryGeneratedFiles; +import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.javapoet.ClassName; + +/** + * Unit tests for {@link LazyLoadingProxyAotProcessor}. + * + * @author Christoph Strobl + */ +class LazyLoadingProxyAotProcessorUnitTests { + + @Test // GH-4351 + void registersProxyForLazyDbRefCorrectlyWhenTypeIsCollectionInterface() { + + GenerationContext ctx = new DefaultGenerationContext(new ClassNameGenerator(ClassName.get(this.getClass())), + new InMemoryGeneratedFiles()); + + new LazyLoadingProxyAotProcessor().registerLazyLoadingProxyIfNeeded(A.class, ctx); + + assertThat(ctx.getRuntimeHints()) + .satisfies(RuntimeHintsPredicates.proxies().forInterfaces(java.util.Collection.class, + org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class, java.util.List.class, + org.springframework.aop.SpringProxy.class, org.springframework.aop.framework.Advised.class, + org.springframework.core.DecoratingProxy.class)::test); + } + + static class A { + + String id; + + @DBRef(lazy = true) // + List listRef; + } + + static class B { + String id; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java new file mode 100644 index 0000000000..0f9ecb911e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java @@ -0,0 +1,129 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.aot.hint.MemberCategory.*; +import static org.springframework.aot.hint.predicate.RuntimeHintsPredicates.*; + +import java.util.function.Predicate; + +import org.junit.jupiter.api.Test; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.test.util.ClassPathExclusions; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * Unit Tests for {@link MongoRuntimeHints}. + * + * @author Christoph Strobl + */ +@SuppressWarnings("deprecation") +class MongoRuntimeHintsUnitTests { + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client", "com.mongodb.reactivestreams.client" }) + void shouldRegisterGeneralCompatibilityHints() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MongoClientSettings.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(MongoClientSettings.Builder.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(IndexOptions.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(ServerAddress.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(UnixServerAddress.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory")) + .withMemberCategory(INTROSPECT_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) + void shouldRegisterSyncCompatibilityHintsIfPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MapReduceIterable.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client" }) + void shouldNotRegisterSyncCompatibilityHintsIfClientNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(TypeReference.of("com.mongodb.client.MapReduceIterable")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate() + .and(reflection().onType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate()); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client" }) + void shouldRegisterReactiveCompatibilityHintsIfPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MapReducePublisher.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) + void shouldNotRegisterReactiveCompatibilityHintsIfClientNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection() + .onType(TypeReference.of("com.mongodb.reactivestreams.client.MapReducePublisher")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate() + .and(reflection().onType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate()); + + assertThat(runtimeHints).matches(expected); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java new file mode 100644 index 0000000000..d809101f73 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.classloading; + +import java.net.URLClassLoader; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +import org.springframework.instrument.classloading.ShadowingClassLoader; +import org.springframework.util.Assert; + +/** + * is intended for testing code that depends on the presence/absence of certain classes. Classes can be: + *
                      + *
                    • shadowed: reloaded by this classloader no matter if they are loaded already by the SystemClassLoader
                    • + *
                    • hidden: not loaded by this classloader no matter if they are loaded already by the SystemClassLoader. Trying to + * load these classes results in a {@link ClassNotFoundException}
                    • + *
                    • all other classes get loaded by the SystemClassLoader
                    • + *
                    + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Christoph Strobl + */ +public class HidingClassLoader extends ShadowingClassLoader { + + private final Collection hidden; + + public HidingClassLoader(String... hidden) { + this(Arrays.asList(hidden)); + } + + public HidingClassLoader(Collection hidden) { + + super(URLClassLoader.getSystemClassLoader(), false); + + this.hidden = hidden; + } + + /** + * Creates a new {@link HidingClassLoader} with the packages of the given classes hidden. + * + * @param packages must not be {@literal null}. + * @return + */ + public static HidingClassLoader hide(Class... packages) { + + Assert.notNull(packages, "Packages must not be null"); + + return new HidingClassLoader(Arrays.stream(packages)// + .map(it -> it.getPackage().getName())// + .collect(Collectors.toList())); + } + + public static HidingClassLoader hideTypes(Class... types) { + + Assert.notNull(types, "Types must not be null!"); + + return new HidingClassLoader(Arrays.stream(types)// + .map(it -> it.getName())// + .collect(Collectors.toList())); + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + + Class loaded = super.loadClass(name); + checkIfHidden(loaded); + return loaded; + } + + @Override + protected boolean isEligibleForShadowing(String className) { + return isExcluded(className); + } + + @Override + protected Class findClass(String name) throws ClassNotFoundException { + + Class loaded = super.findClass(name); + checkIfHidden(loaded); + return loaded; + } + + private void checkIfHidden(Class type) throws ClassNotFoundException { + + if (hidden.stream().anyMatch(it -> type.getName().startsWith(it))) { + throw new ClassNotFoundException(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java index 69b0297353..b7f945f2a3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,38 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.After; -import org.junit.Before; -import org.junit.runner.RunWith; +import java.util.Collections; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; /** * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration public abstract class AbstractIntegrationTests { @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends MongoClientClosingTestConfiguration { @Override protected String getDatabaseName() { @@ -51,24 +54,34 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; } } @Autowired MongoOperations operations; - @Before - @After + @BeforeEach + @AfterEach public void cleanUp() { for (String collectionName : operations.getCollectionNames()) { if (!collectionName.startsWith("system")) { operations.execute(collectionName, new CollectionCallback() { @Override - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { - collection.remove(new BasicDBObject()); - assertThat(collection.find().hasNext(), is(false)); + public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + collection.deleteMany(new Document()); + assertThat(collection.find().iterator().hasNext()).isFalse(); return null; } }); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java index bf4c0d01aa..b16cb6961a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,83 +15,77 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import example.first.First; +import example.second.Second; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.junit.jupiter.api.Test; + +import org.mockito.Mockito; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.support.AbstractApplicationContext; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoManagedTypes; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.MongoTypeMapper; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** - * Unit tests for {@link AbstractMongoConfiguration}. - * + * Unit tests for {@link AbstractMongoClientConfiguration}. + * * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch */ public class AbstractMongoConfigurationUnitTests { - @Rule public ExpectedException exception = ExpectedException.none(); - - /** - * @see DATAMONGO-496 - */ - @Test + @Test // DATAMONGO-496 public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException { - AbstractMongoConfiguration configuration = new SampleMongoConfiguration(); - assertThat(configuration.getMappingBasePackage(), is(SampleMongoConfiguration.class.getPackage().getName())); - assertThat(configuration.getInitialEntitySet(), hasSize(1)); - assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class)); + AbstractMongoClientConfiguration configuration = new SampleMongoConfiguration(); + assertThat(configuration.getMappingBasePackages()) + .containsExactly(SampleMongoConfiguration.class.getPackage().getName()); + assertThat(configuration.getInitialEntitySet()).hasSize(2); + assertThat(configuration.getInitialEntitySet()).contains(Entity.class); } - /** - * @see DATAMONGO-496 - */ - @Test + @Test // DATAMONGO-496 public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException { - assertScanningDisabled(null); - } - /** - * @see DATAMONGO-496 - */ - @Test + @Test // DATAMONGO-496 public void doesNotScanPackageIfMappingPackageIsEmpty() throws ClassNotFoundException { assertScanningDisabled(""); assertScanningDisabled(" "); } - /** - * @see DATAMONGO-569 - */ - @Test + @Test // DATAMONGO-569 public void containsMongoDbFactoryButNoMongoBean() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); - assertThat(context.getBean(MongoDbFactory.class), is(notNullValue())); + assertThat(context.getBean(MongoDatabaseFactory.class)).isNotNull(); + assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() -> context.getBean(MongoClient.class)); - exception.expect(NoSuchBeanDefinitionException.class); - context.getBean(Mongo.class); context.close(); } @@ -99,66 +93,65 @@ public void containsMongoDbFactoryButNoMongoBean() { public void returnsUninitializedMappingContext() throws Exception { SampleMongoConfiguration configuration = new SampleMongoConfiguration(); - MongoMappingContext context = configuration.mongoMappingContext(); + MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions(), + MongoManagedTypes.from(Entity.class)); - assertThat(context.getPersistentEntities(), is(emptyIterable())); + assertThat(context.getPersistentEntities()).isEmpty(); context.initialize(); - assertThat(context.getPersistentEntities(), is(not(emptyIterable()))); + assertThat(context.getPersistentEntities()).isNotEmpty(); } - /** - * @see DATAMONGO-717 - */ - @Test + @Test // DATAMONGO-717 public void lifecycleCallbacksAreInvokedInAppropriateOrder() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); - BasicMongoPersistentEntity entity = mappingContext.getPersistentEntity(Entity.class); - StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context"); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); + EvaluationContextProvider provider = (EvaluationContextProvider) ReflectionTestUtils.getField(entity, + "evaluationContextProvider"); - assertThat(spElContext.getBeanResolver(), is(notNullValue())); + assertThat(provider).isInstanceOf(ExtensionAwareEvaluationContextProvider.class); context.close(); } - /** - * @see DATAMONGO-725 - */ - @Test + @Test // DATAMONGO-725 public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class); MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class); - assertThat(mmc, is(notNullValue())); - assertThat(mmc.getTypeMapper(), is(typeMapper)); + assertThat(mmc).isNotNull(); + assertThat(mmc.getTypeMapper()).isEqualTo(typeMapper); context.close(); } - /** - * @see DATAMONGO-789 - */ - @Test - public void authenticationDatabaseShouldDefaultToNull() { - assertThat(new SampleMongoConfiguration().getAuthenticationDatabaseName(), is(nullValue())); + @Test // DATAMONGO-1470 + @SuppressWarnings("unchecked") + public void allowsMultipleEntityBasePackages() throws ClassNotFoundException { + + ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages(); + Set> entities = config.getInitialEntitySet(); + + assertThat(entities).hasSize(2); + assertThat(entities).contains(First.class, Second.class); } private static void assertScanningDisabled(final String value) throws ClassNotFoundException { - AbstractMongoConfiguration configuration = new SampleMongoConfiguration() { + AbstractMongoClientConfiguration configuration = new SampleMongoConfiguration() { @Override - protected String getMappingBasePackage() { - return value; + protected Collection getMappingBasePackages() { + return Collections.singleton(value); } }; - assertThat(configuration.getMappingBasePackage(), is(value)); - assertThat(configuration.getInitialEntitySet(), hasSize(0)); + assertThat(configuration.getMappingBasePackages()).contains(value); + assertThat(configuration.getInitialEntitySet()).hasSize(0); } @Configuration - static class SampleMongoConfiguration extends AbstractMongoConfiguration { + static class SampleMongoConfiguration extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -166,26 +159,44 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return Mockito.mock(MongoClient.class); } - @Bean @Override - public MappingMongoConverter mappingMongoConverter() throws Exception { - MappingMongoConverter mmc = super.mappingMongoConverter(); - mmc.setTypeMapper(typeMapper()); - return mmc; + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); + converter.setTypeMapper(typeMapper()); + + return converter; } @Bean public MongoTypeMapper typeMapper() { return new CustomMongoTypeMapper(); } + } - @Document - static class Entity { + static class ConfigurationWithMultipleBasePackages extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "test"; + } + @Override + public MongoClient mongoClient() { + return Mockito.mock(MongoClient.class); + } + + @Override + protected Collection getMappingBasePackages() { + return Arrays.asList("example.first", "example.second"); + } } + + @Document + static class Entity {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java new file mode 100644 index 0000000000..2fcb44a6e2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java @@ -0,0 +1,68 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.AssertionsForInterfaceTypes.*; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link AbstractReactiveMongoConfiguration}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class AbstractReactiveMongoConfigurationIntegrationTests { + + @Autowired ApplicationContext context; + + @Test // DATAMONGO-1444 + public void contextShouldContainTemplate() { + + assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class)).isNotNull(); + assertThat(context.getBean(ReactiveMongoOperations.class)).isNotNull(); + assertThat(context.getBean(ReactiveMongoTemplate.class)).isNotNull(); + } + + @Configuration + static class ReactiveConfiguration extends AbstractReactiveMongoConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return Mockito.mock(MongoClient.class); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java new file mode 100644 index 0000000000..6c80842556 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java @@ -0,0 +1,201 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; + +import example.first.First; +import example.second.Second; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.support.AbstractApplicationContext; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoTypeMapper; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Unit tests for {@link AbstractReactiveMongoConfiguration}. + * + * @author Mark Paluch + */ +public class AbstractReactiveMongoConfigurationUnitTests { + + @Test // DATAMONGO-1444 + public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException { + + AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration(); + assertThat(configuration.getMappingBasePackages()).contains(SampleMongoConfiguration.class.getPackage().getName()); + assertThat(configuration.getInitialEntitySet()).hasSize(2); + assertThat(configuration.getInitialEntitySet()).contains(Entity.class); + } + + @Test // DATAMONGO-1444 + public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException { + assertScanningDisabled(null); + } + + @Test // DATAMONGO-1444 + public void doesNotScanPackageIfMappingPackageIsEmpty() throws ClassNotFoundException { + + assertScanningDisabled(""); + assertScanningDisabled(" "); + } + + @Test // DATAMONGO-1444 + public void containsMongoDbFactoryButNoMongoBean() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + + assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class)).isNotNull(); + assertThatExceptionOfType(NoSuchBeanDefinitionException.class) + .isThrownBy(() -> context.getBean(com.mongodb.client.MongoClient.class)); + + context.close(); + } + + @Test // DATAMONGO-1444 + public void returnsUninitializedMappingContext() throws Exception { + + SampleMongoConfiguration configuration = new SampleMongoConfiguration(); + MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions(), + MongoManagedTypes.from(Entity.class)); + + assertThat(context.getPersistentEntities()).isEmpty(); + context.initialize(); + assertThat(context.getPersistentEntities()).isNotEmpty(); + } + + @Test // DATAMONGO-1444 + public void lifecycleCallbacksAreInvokedInAppropriateOrder() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); + EvaluationContextProvider provider = (EvaluationContextProvider) ReflectionTestUtils.getField(entity, + "evaluationContextProvider"); + + assertThat(provider).isInstanceOf(ExtensionAwareEvaluationContextProvider.class); + context.close(); + } + + @Test // DATAMONGO-1444 + public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class); + MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class); + + assertThat(mmc).isNotNull(); + assertThat(mmc.getTypeMapper()).isEqualTo(typeMapper); + context.close(); + } + + @Test // DATAMONGO-1444 + @SuppressWarnings("unchecked") + public void allowsMultipleEntityBasePackages() throws ClassNotFoundException { + + ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages(); + Set> entities = config.getInitialEntitySet(); + + assertThat(entities).hasSize(2); + assertThat(entities).contains(First.class, Second.class); + } + + private static void assertScanningDisabled(final String value) throws ClassNotFoundException { + + AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration() { + @Override + protected Collection getMappingBasePackages() { + return Collections.singleton(value); + } + }; + + assertThat(configuration.getMappingBasePackages()).contains(value); + assertThat(configuration.getInitialEntitySet()).hasSize(0); + } + + @Configuration + static class SampleMongoConfiguration extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + public MongoClient reactiveMongoClient() { + return Mockito.mock(MongoClient.class); + } + + @Override + public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + + MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); + converter.setTypeMapper(typeMapper()); + + return converter; + } + + @Bean + public MongoTypeMapper typeMapper() { + return new CustomMongoTypeMapper(); + } + } + + static class ConfigurationWithMultipleBasePackages extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "test"; + } + + @Override + public MongoClient reactiveMongoClient() { + return Mockito.mock(MongoClient.class); + } + + @Override + protected Collection getMappingBasePackages() { + return Arrays.asList("example.first", "example.second"); + } + } + + @Document + static class Entity {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java index 47cee10e51..ee411eb7c7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2015 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,30 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.junit.jupiter.api.Test; -import org.joda.time.DateTime; -import org.junit.Test; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.data.annotation.CreatedDate; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; /** * Integration test for the auditing support. - * + * * @author Oliver Gierke + * @author Mark Paluch */ public class AuditingIntegrationTests { - /** - * @see DATAMONGO-577, DATAMONGO-800, DATAMONGO-883 - */ - @Test + @Test // DATAMONGO-577, DATAMONGO-800, DATAMONGO-883, DATAMONGO-2261 public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception { AbstractApplicationContext context = new ClassPathXmlApplicationContext("auditing.xml", getClass()); @@ -46,31 +46,32 @@ public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception { MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); mappingContext.getPersistentEntity(Entity.class); + EntityCallbacks callbacks = EntityCallbacks.create(context); + Entity entity = new Entity(); - BeforeConvertEvent event = new BeforeConvertEvent(entity, "collection-1"); - context.publishEvent(event); + entity = callbacks.callback(BeforeConvertCallback.class, entity, "collection-1"); - assertThat(entity.created, is(notNullValue())); - assertThat(entity.modified, is(entity.created)); + assertThat(entity.created).isNotNull(); + assertThat(entity.modified).isEqualTo(entity.created); Thread.sleep(10); entity.id = 1L; - event = new BeforeConvertEvent(entity, "collection-1"); - context.publishEvent(event); - assertThat(entity.created, is(notNullValue())); - assertThat(entity.modified, is(not(entity.created))); + entity = callbacks.callback(BeforeConvertCallback.class, entity, "collection-1"); + + assertThat(entity.created).isNotNull(); + assertThat(entity.modified).isNotEqualTo(entity.created); context.close(); } class Entity { @Id Long id; - @CreatedDate DateTime created; - DateTime modified; + @CreatedDate Date created; + Date modified; @LastModifiedDate - public DateTime getModified() { + public Date getModified() { return modified; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java index b6ce44ccbc..c3122d2850 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,84 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.core.ResolvableType; +import org.springframework.data.annotation.Version; import org.springframework.data.domain.AuditorAware; +import org.springframework.data.mapping.callback.EntityCallback; import org.springframework.data.mongodb.core.AuditablePerson; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.stereotype.Repository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for auditing via Java config. - * + * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration -public class AuditingViaJavaConfigRepositoriesTests { +class AuditingViaJavaConfigRepositoriesTests { + + static @Client MongoClient mongoClient; @Autowired AuditablePersonRepository auditablePersonRepository; @Autowired AuditorAware auditorAware; + @Autowired MongoMappingContext context; + @Autowired MongoOperations operations; + AuditablePerson auditor; @Configuration @EnableMongoAuditing(auditorAwareRef = "auditorProvider") - @EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true) - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true, + includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE, classes = AuditablePersonRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { + return "database"; } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return mongoClient; } @Bean @@ -75,70 +100,163 @@ public Mongo mongo() throws Exception { public AuditorAware auditorProvider() { return mock(AuditorAware.class); } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>( + Arrays.asList(AuditablePerson.class, VersionedAuditablePerson.class, SimpleVersionedAuditablePerson.class)); + } } - @Before - public void setup() { + @BeforeEach + void setup() { auditablePersonRepository.deleteAll(); this.auditor = auditablePersonRepository.save(new AuditablePerson("auditor")); } - /** - * @see DATAMONGO-792, DATAMONGO-883 - */ - @Test - public void basicAuditing() { + @Test // DATAMONGO-792, DATAMONGO-883 + void basicAuditing() { - doReturn(this.auditor).when(this.auditorAware).getCurrentAuditor(); + doReturn(Optional.of(this.auditor)).when(this.auditorAware).getCurrentAuditor(); AuditablePerson savedUser = auditablePersonRepository.save(new AuditablePerson("user")); AuditablePerson createdBy = savedUser.getCreatedBy(); - assertThat(createdBy, is(notNullValue())); - assertThat(createdBy.getFirstname(), is(this.auditor.getFirstname())); - assertThat(savedUser.getCreatedAt(), is(notNullValue())); + assertThat(createdBy).isNotNull(); + assertThat(createdBy.getFirstname()).isEqualTo(this.auditor.getFirstname()); + assertThat(savedUser.getCreatedAt()).isNotNull(); } - /** - * @see DATAMONGO-843 - */ - @Test + @Test // DATAMONGO-843 @SuppressWarnings("resource") - public void auditingUsesFallbackMappingContextIfNoneConfiguredWithRepositories() { + void auditingUsesFallbackMappingContextIfNoneConfiguredWithRepositories() { new AnnotationConfigApplicationContext(SimpleConfigWithRepositories.class); } - /** - * @see DATAMONGO-843 - */ - @Test + @Test // DATAMONGO-843 @SuppressWarnings("resource") - public void auditingUsesFallbackMappingContextIfNoneConfigured() { + void auditingUsesFallbackMappingContextIfNoneConfigured() { new AnnotationConfigApplicationContext(SimpleConfig.class); } + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2179 + void auditingWorksForVersionedEntityBatchWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + s -> auditablePersonRepository.saveAll(Collections.singletonList(s)).get(0), // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithSimpleVersion() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithWrapperVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithSimpleVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2586 + void auditingShouldOnlyRegisterImperativeAuditingCallback() { + + Object callbacks = ReflectionTestUtils.getField(operations, "entityCallbacks"); + Object callbackDiscoverer = ReflectionTestUtils.getField(callbacks, "callbackDiscoverer"); + List> actualCallbacks = ReflectionTestUtils.invokeMethod(callbackDiscoverer, "getEntityCallbacks", + AuditablePerson.class, ResolvableType.forClass(EntityCallback.class)); + + assertThat(actualCallbacks) // + .hasAtLeastOneElementOfType(AuditingEntityCallback.class) // + .doesNotHaveAnyElementsOfTypes(ReactiveAuditingEntityCallback.class); + } + + private void verifyAuditingViaVersionProperty(T instance, + Function versionExtractor, Function createdDateExtractor, Function persister, + Object... expectedValues) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(instance.getClass()); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[0]); + assertThat(createdDateExtractor.apply(instance)).isNull(); + assertThat(entity.isNew(instance)).isTrue(); + + instance = persister.apply(instance); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[1]); + assertThat(createdDateExtractor.apply(instance)).isNotNull(); + assertThat(entity.isNew(instance)).isFalse(); + + instance = persister.apply(instance); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[2]); + assertThat(entity.isNew(instance)).isFalse(); + } + @Repository - static interface AuditablePersonRepository extends MongoRepository {} + interface AuditablePersonRepository extends MongoRepository {} @Configuration @EnableMongoRepositories + static class SimpleConfigWithRepositories extends SimpleConfig {} + + @Configuration @EnableMongoAuditing - static class SimpleConfigWithRepositories { + static class SimpleConfig extends MongoClientClosingTestConfiguration { - @Bean - public MongoTemplate mongoTemplate() throws UnknownHostException { - return new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database")); + @Override + public MongoClient mongoClient() { + return MongoTestUtils.client(); } - } - @Configuration - @EnableMongoAuditing - static class SimpleConfig { + @Override + protected String getDatabaseName() { + return "database"; + } - @Bean - public MongoTemplate mongoTemplate() throws UnknownHostException { - return new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database")); + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } + + static class VersionedAuditablePerson extends AuditablePerson { + @Version Long version; + } + + static class SimpleVersionedAuditablePerson extends AuditablePerson { + @Version long version; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java index b3d1881570..0d89487955 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java index 7efa806778..be96469878 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,24 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.core.GeoJsonConfiguration; import org.springframework.data.mongodb.core.geo.GeoJsonModule; import org.springframework.data.web.config.EnableSpringDataWebSupport; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link GeoJsonConfiguration}. - * + * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class GeoJsonConfigurationIntegrationTests { @@ -43,11 +42,8 @@ static class Config {} @Autowired GeoJsonModule geoJsonModule; - /** - * @see DATAMONGO-1181 - */ - @Test + @Test // DATAMONGO-1181 public void picksUpGeoJsonModuleConfigurationByDefault() { - assertThat(geoJsonModule, is(notNullValue())); + assertThat(geoJsonModule).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java index 5082cc3ae9..11143da832 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,13 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; import java.util.Set; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; @@ -34,121 +32,108 @@ import org.springframework.core.convert.converter.Converter; import org.springframework.core.convert.converter.GenericConverter; import org.springframework.core.io.ClassPathResource; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; -import org.springframework.data.mongodb.core.convert.CustomConversions; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoTypeMapper; import org.springframework.data.mongodb.core.mapping.Account; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.Person; import org.springframework.stereotype.Component; -import com.mongodb.DBObject; - /** * Integration tests for {@link MappingMongoConverterParser}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Ryan Tenney + * @author Tomasz Forys */ public class MappingMongoConverterParserIntegrationTests { - @Rule public ExpectedException exception = ExpectedException.none(); + private DefaultListableBeanFactory factory; - DefaultListableBeanFactory factory; - - /** - * @see DATAMONGO-243 - */ - @Test - public void allowsDbFactoryRefAttribute() { + @Test // DATAMONGO-243 + void allowsDbFactoryRefAttribute() { loadValidConfiguration(); factory.getBeanDefinition("converter"); factory.getBean("converter"); } - /** - * @see DATAMONGO-725 - */ - @Test - public void hasCustomTypeMapper() { + @Test // GH-4275 + void defaultsToFalseForAutoIndexCreation() { + + loadValidConfiguration(); + MongoMappingContext mongoMappingContext = factory.getBean("converter.mongoMappingContext", + MongoMappingContext.class); + assertThat(mongoMappingContext.isAutoIndexCreation()).isFalse(); + } + + @Test // GH-4275 + void allowsToOverrideAutoIndexCreation() { + + loadValidConfiguration(); + MongoMappingContext mongoMappingContext = factory.getBean("autoIndexCreationConverter.mongoMappingContext", + MongoMappingContext.class); + assertThat(mongoMappingContext.isAutoIndexCreation()).isTrue(); + } + + @Test // DATAMONGO-725 + void hasCustomTypeMapper() { loadValidConfiguration(); MappingMongoConverter converter = factory.getBean("converter", MappingMongoConverter.class); MongoTypeMapper customMongoTypeMapper = factory.getBean(CustomMongoTypeMapper.class); - assertThat(converter.getTypeMapper(), is(customMongoTypeMapper)); + assertThat(converter.getTypeMapper()).isEqualTo(customMongoTypeMapper); } - /** - * @see DATAMONGO-301 - */ - @Test - public void scansForConverterAndSetsUpCustomConversionsAccordingly() { + @Test // DATAMONGO-301 + void scansForConverterAndSetsUpCustomConversionsAccordingly() { loadValidConfiguration(); CustomConversions conversions = factory.getBean(CustomConversions.class); - assertThat(conversions.hasCustomWriteTarget(Person.class), is(true)); - assertThat(conversions.hasCustomWriteTarget(Account.class), is(true)); + assertThat(conversions.hasCustomWriteTarget(Person.class)).isTrue(); + assertThat(conversions.hasCustomWriteTarget(Account.class)).isTrue(); } - /** - * @see DATAMONGO-607 - */ - @Test - public void activatesAbbreviatingPropertiesCorrectly() { + @Test // DATAMONGO-607 + void activatesAbbreviatingPropertiesCorrectly() { loadValidConfiguration(); BeanDefinition definition = factory.getBeanDefinition("abbreviatingConverter.mongoMappingContext"); Object value = definition.getPropertyValues().getPropertyValue("fieldNamingStrategy").getValue(); - assertThat(value, is(instanceOf(BeanDefinition.class))); + assertThat(value).isInstanceOf(BeanDefinition.class); BeanDefinition strategy = (BeanDefinition) value; - assertThat(strategy.getBeanClassName(), is(CamelCaseAbbreviatingFieldNamingStrategy.class.getName())); + assertThat(strategy.getBeanClassName()).isEqualTo(CamelCaseAbbreviatingFieldNamingStrategy.class.getName()); } - /** - * @see DATAMONGO-866 - */ - @Test - public void rejectsInvalidFieldNamingStrategyConfiguration() { - - exception.expect(BeanDefinitionParsingException.class); - exception.expectMessage("abbreviation"); - exception.expectMessage("field-naming-strategy-ref"); + @Test // DATAMONGO-866 + void rejectsInvalidFieldNamingStrategyConfiguration() { BeanDefinitionRegistry factory = new DefaultListableBeanFactory(); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); - reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-invalid.xml")); - } - - /** - * @see DATAMONGO-892 - */ - @Test - public void shouldThrowBeanDefinitionParsingExceptionIfConverterDefinedAsNestedBean() { - exception.expect(BeanDefinitionParsingException.class); - exception.expectMessage("Mongo Converter must not be defined as nested bean."); + assertThatExceptionOfType(BeanDefinitionParsingException.class) + .isThrownBy(() -> reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-invalid.xml"))) + .withMessageContaining("abbreviation").withMessageContaining("field-naming-strategy-ref"); + } - loadNestedBeanConfiguration(); + @Test // DATAMONGO-892 + void shouldThrowBeanDefinitionParsingExceptionIfConverterDefinedAsNestedBean() { + assertThatExceptionOfType(BeanDefinitionParsingException.class).isThrownBy(this::loadNestedBeanConfiguration); } - /** - * @see DATAMONGO-925, DATAMONGO-928 - */ - @Test - public void shouldSupportCustomFieldNamingStrategy() { + @Test // DATAMONGO-925, DATAMONGO-928 + void shouldSupportCustomFieldNamingStrategy() { assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategy"); } - /** - * @see DATAMONGO-925, DATAMONGO-928 - */ - @Test - public void shouldNotFailLoadingConfigIfAbbreviationIsDisabledAndStrategySet() { + @Test // DATAMONGO-925, DATAMONGO-928 + void shouldNotFailLoadingConfigIfAbbreviationIsDisabledAndStrategySet() { assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategyAndAbbreviationDisabled"); } @@ -162,13 +147,15 @@ private void loadNestedBeanConfiguration() { private void loadConfiguration(String configLocation) { factory = new DefaultListableBeanFactory(); + factory.setAllowBeanDefinitionOverriding(false); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); reader.loadBeanDefinitions(new ClassPathResource(configLocation)); } private static void assertStrategyReferenceSetFor(String beanId) { - BeanDefinitionRegistry factory = new DefaultListableBeanFactory(); + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.setAllowBeanDefinitionOverriding(false); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-fieldnamingstrategy.xml")); @@ -176,12 +163,12 @@ private static void assertStrategyReferenceSetFor(String beanId) { BeanReference value = (BeanReference) definition.getPropertyValues().getPropertyValue("fieldNamingStrategy") .getValue(); - assertThat(value.getBeanName(), is("customFieldNamingStrategy")); + assertThat(value.getBeanName()).isEqualTo("customFieldNamingStrategy"); } @Component - public static class SampleConverter implements Converter { - public DBObject convert(Person source) { + public static class SampleConverter implements Converter { + public Document convert(Person source) { return null; } } @@ -190,7 +177,7 @@ public DBObject convert(Person source) { public static class SampleConverterFactory implements GenericConverter { public Set getConvertibleTypes() { - return Collections.singleton(new ConvertiblePair(Account.class, DBObject.class)); + return Collections.singleton(new ConvertiblePair(Account.class, Document.class)); } public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java index d9f34df89f..6cdd99cb3b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Before; import org.junit.Test; + import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; @@ -30,8 +30,7 @@ * Integration test for creation of instance of * {@link org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener} by defining * {@code } in context XML. - * - * @see DATAMONGO-36 + * * @author Maciej Walkowiak * @author Thomas Darimont * @author Oliver Gierke @@ -47,43 +46,31 @@ public void setUp() { reader = new XmlBeanDefinitionReader(factory); } - /** - * @see DATAMONGO-36 - */ - @Test + @Test // DATAMONGO-36 public void validatingEventListenerCreatedWithDefaultConfig() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-default.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } - /** - * @see DATAMONGO-36 - */ - @Test + @Test // DATAMONGO-36 public void validatingEventListenerCreatedWhenValidationEnabled() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-enabled.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } - /** - * @see DATAMONGO-36 - */ - @Test(expected = NoSuchBeanDefinitionException.class) + @Test(expected = NoSuchBeanDefinitionException.class) // DATAMONGO-36 public void validatingEventListenersIsNotCreatedWhenDisabled() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-disabled.xml")); factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME); } - /** - * @see DATAMONGO-36 - */ - @Test + @Test // DATAMONGO-36 public void validatingEventListenerCreatedWithCustomTypeMapperConfig() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-typeMapper.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java index ad076c90d3..7a711707fd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,36 @@ */ package org.springframework.data.mongodb.config; -import org.junit.Test; -import org.junit.runner.RunWith; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.core.type.AnnotationMetadata; /** - * Unit tests for {@link JpaAuditingRegistrar}. - * - * @see DATAMONGO-792 + * Unit tests for {@link MongoAuditingRegistrar}. + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class MongoAuditingRegistrarUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoAuditingRegistrarUnitTests { - MongoAuditingRegistrar registrar = new MongoAuditingRegistrar(); + private MongoAuditingRegistrar registrar = new MongoAuditingRegistrar(); @Mock AnnotationMetadata metadata; @Mock BeanDefinitionRegistry registry; - @Test(expected = IllegalArgumentException.class) - public void rejectsNullAnnotationMetadata() { - registrar.registerBeanDefinitions(null, registry); + @Test // DATAMONGO-792 + void rejectsNullAnnotationMetadata() { + assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(null, registry)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullBeanDefinitionRegistry() { - registrar.registerBeanDefinitions(metadata, null); + @Test // DATAMONGO-792 + void rejectsNullBeanDefinitionRegistry() { + assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(metadata, null)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java new file mode 100644 index 0000000000..f83e0ec76b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -0,0 +1,164 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.test.util.ReflectionTestUtils.*; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import org.bson.UuidRepresentation; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mongodb.core.MongoClientFactoryBean; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoCredential; +import com.mongodb.ServerAddress; +import com.mongodb.ServerApiVersion; +import com.mongodb.connection.ClusterType; + +/** + * Integration tests for the MongoDB namespace. + * + * @author Christoph Strobl + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration +public class MongoClientNamespaceTests { + + @Autowired ApplicationContext ctx; + + @Test // DATAMONGO-2384 + public void clientWithJustHostAndPort() { + + assertThat(ctx.containsBean("client-with-just-host-port")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-just-host-port", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isEqualTo("127.0.0.1"); + assertThat(getField(factoryBean, "port")).isEqualTo(27017); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithConnectionString() { + + assertThat(ctx.containsBean("client-with-connection-string")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-connection-string", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")) + .isEqualTo(new ConnectionString("mongodb://127.0.0.1:27017/?replicaSet=rs0")); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithReplicaSet() { + + assertThat(ctx.containsBean("client-with-replica-set")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-replica-set", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isEqualTo("rs0"); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithCredential() { + + assertThat(ctx.containsBean("client-with-auth")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-auth", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isEqualTo( + Collections.singletonList(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray()))); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithClusterSettings() { + + assertThat(ctx.containsBean("client-with-cluster-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-cluster-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + + assertThat(settings.getClusterSettings().getRequiredClusterType()).isEqualTo(ClusterType.REPLICA_SET); + assertThat(settings.getClusterSettings().getServerSelectionTimeout(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getClusterSettings().getLocalThreshold(TimeUnit.MILLISECONDS)).isEqualTo(5); + assertThat(settings.getClusterSettings().getHosts()).contains(new ServerAddress("localhost", 27018), + new ServerAddress("localhost", 27019), new ServerAddress("localhost", 27020)); + } + + @Test // DATAMONGO-2384 + public void clientWithConnectionPoolSettings() { + + assertThat(ctx.containsBean("client-with-connection-pool-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-connection-pool-settings", + MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + + assertThat(settings.getConnectionPoolSettings().getMaxConnectionLifeTime(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMinSize()).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMaxSize()).isEqualTo(20); + assertThat(settings.getConnectionPoolSettings().getMaintenanceFrequency(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMaintenanceInitialDelay(TimeUnit.MILLISECONDS)).isEqualTo(11); + assertThat(settings.getConnectionPoolSettings().getMaxConnectionIdleTime(TimeUnit.MILLISECONDS)).isEqualTo(30); + assertThat(settings.getConnectionPoolSettings().getMaxWaitTime(TimeUnit.MILLISECONDS)).isEqualTo(15); + } + + @Test // DATAMONGO-2427 + public void clientWithUUidSettings() { + + assertThat(ctx.containsBean("client-with-uuid-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-uuid-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); + } + + @Test // GH-3820 + public void clientWithServerVersion() { + + assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-server-api-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getServerApi()).isNotNull().satisfies(it -> { + assertThat(it.getVersion()).isEqualTo(ServerApiVersion.V1); + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java index a2abd7c6e1..4b3bb25a9f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,114 +15,117 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.Is.*; -import static org.hamcrest.core.IsInstanceOf.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.io.ClassPathResource; +import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link MongoClientParser}. - * + * * @author Christoph Strobl + * @author Mark Paluch */ public class MongoClientParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @Before + @BeforeEach public void setUp() { this.factory = new DefaultListableBeanFactory(); this.reader = new XmlBeanDefinitionReader(factory); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void createsMongoClientCorrectlyWhenGivenHostAndPort() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - assertThat(factory.getBean("mongo-client-with-host-and-port"), instanceOf(MongoClient.class)); + assertThat(factory.getBean("mongo-client-with-host-and-port")).isInstanceOf(MongoClient.class); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158, DATAMONGO-2199 public void createsMongoClientWithOptionsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + context.refresh(); - try { - MongoClient client = context.getBean("mongo-client-with-options-for-write-concern-and-read-preference", - MongoClient.class); - - assertThat(client.getReadPreference(), is(ReadPreference.secondary())); - assertThat(client.getWriteConcern(), is(WriteConcern.NORMAL)); - } finally { - context.close(); + MongoClientSettings settings = extractClientSettingsFromBean(context, + "mongo-client-with-options-for-write-concern-and-read-preference"); + assertThat(settings.getReadPreference()).isEqualTo(ReadPreference.secondary()); + assertThat(settings.getWriteConcern()).isEqualTo(WriteConcern.UNACKNOWLEDGED); } } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void createsMongoClientWithDefaultsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { - try { - MongoClient client = context.getBean("mongo", MongoClient.class); + context.refresh(); - assertThat(client.getAddress().getHost(), is("127.0.0.1")); - assertThat(client.getAddress().getPort(), is(27017)); - } finally { - context.close(); + MongoClient client = context.getBean("mongoClient", MongoClient.class); + assertThat(client.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); } } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void createsMongoClientWithCredentialsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + + context.refresh(); - try { - MongoClient client = context.getBean("mongo-client-with-credentials", MongoClient.class); + MongoClientSettings settings = extractClientSettingsFromBean(context, "mongo-client-with-credentials"); - assertThat(client.getCredentialsList(), - contains(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray()))); - } finally { - context.close(); + assertThat(settings.getCredential()) + .isEqualTo(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray())); } } + + @Test // DATAMONGO-1620 + public void createsMongoClientWithServerSelectionTimeoutCorrectly() { + + reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); + + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + context.refresh(); + + MongoClientSettings settings = extractClientSettingsFromBean(context, + "mongo-client-with-server-selection-timeout"); + assertThat(settings.getClusterSettings().getServerSelectionTimeout(TimeUnit.MILLISECONDS)).isEqualTo(100); + } + } + + private MongoClientSettings extractClientSettingsFromBean(AbstractApplicationContext context, String beanName) { + return extractClientSettings(context.getBean(beanName, MongoClient.class)); + } + + private MongoClientSettings extractClientSettings(MongoClient client) { + return (MongoClientSettings) ReflectionTestUtils.getField(client, "settings"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java index ba9257cbe2..92a7e0036d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; import java.util.Arrays; import java.util.List; @@ -30,8 +30,9 @@ /** * Unit tests for {@link MongoCredentialPropertyEditor}. - * + * * @author Christoph Strobl + * @author Stephen Tyler Conrad */ public class MongoCredentialPropertyEditorUnitTests { @@ -46,6 +47,16 @@ public class MongoCredentialPropertyEditorUnitTests { static final String USER_3_NAME = "CN=myName,OU=myOrgUnit,O=myOrg,L=myLocality,ST=myState,C=myCountry"; static final String USER_3_DB = "stark"; + static final String USER_4_PLAIN_NAME = "m0ng0@dmin"; + static final String USER_4_ENCODED_NAME; + static final String USER_4_PLAIN_PWD = "mo_res:bw6},Qsdxx@admin"; + static final String USER_4_ENCODED_PWD; + static final String USER_4_DB = "targaryen"; + + static final String USER_5_NAME = "lyanna"; + static final String USER_5_PWD = "random?password"; + static final String USER_5_DB = "mormont"; + static final String USER_1_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB; static final String USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_1_AUTH_STRING + "?uri.authMechanism=PLAIN"; @@ -56,6 +67,15 @@ public class MongoCredentialPropertyEditorUnitTests { static final String USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM = "'" + USER_3_NAME + "@" + USER_3_DB + "?uri.authMechanism=MONGODB-X509'"; + static final String USER_4_AUTH_STRING; + + static final String USER_5_AUTH_STRING = USER_5_NAME + ":" + USER_5_PWD + "@" + USER_5_DB; + static final String USER_5_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_5_AUTH_STRING + "?uri.authMechanism=PLAIN"; + static final String USER_5_AUTH_STRING_WITH_QUERY_ARGS = USER_5_AUTH_STRING + "?uri.authMechanism=PLAIN&foo=&bar"; + + static final String SCRAM_SHA_256_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB + + "?uri.authMechanism=SCRAM-SHA-256"; + static final MongoCredential USER_1_CREDENTIALS = MongoCredential.createCredential(USER_1_NAME, USER_1_DB, USER_1_PWD.toCharArray()); static final MongoCredential USER_1_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_1_NAME, @@ -63,188 +83,203 @@ public class MongoCredentialPropertyEditorUnitTests { static final MongoCredential USER_2_CREDENTIALS = MongoCredential.createCredential(USER_2_NAME, USER_2_DB, USER_2_PWD.toCharArray()); - static final MongoCredential USER_2_CREDENTIALS_CR_AUTH = MongoCredential.createMongoCRCredential(USER_2_NAME, - USER_2_DB, USER_2_PWD.toCharArray()); static final MongoCredential USER_3_CREDENTIALS_X509_AUTH = MongoCredential.createMongoX509Credential(USER_3_NAME); + static final MongoCredential USER_4_CREDENTIALS = MongoCredential.createCredential(USER_4_PLAIN_NAME, USER_4_DB, + USER_4_PLAIN_PWD.toCharArray()); + + static final MongoCredential USER_5_CREDENTIALS = MongoCredential.createCredential(USER_5_NAME, USER_5_DB, + USER_5_PWD.toCharArray()); + static final MongoCredential USER_5_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_5_NAME, + USER_5_DB, USER_5_PWD.toCharArray()); + + static final MongoCredential SCRAM_SHA_256_CREDENTIALS = MongoCredential.createScramSha256Credential(USER_1_NAME, + USER_1_DB, USER_1_PWD.toCharArray()); + MongoCredentialPropertyEditor editor; + static { + + String encodedUserName = null; + String encodedUserPassword = null; + try { + encodedUserName = URLEncoder.encode(USER_4_PLAIN_NAME, "UTF-8"); + encodedUserPassword = URLEncoder.encode(USER_4_PLAIN_PWD, "UTF-8"); + } catch (UnsupportedEncodingException e) {} + + USER_4_ENCODED_NAME = encodedUserName; + USER_4_ENCODED_PWD = encodedUserPassword; + USER_4_AUTH_STRING = USER_4_ENCODED_NAME + ":" + USER_4_ENCODED_PWD + "@" + USER_4_DB; + + } + @Before public void setUp() { this.editor = new MongoCredentialPropertyEditor(); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void shouldReturnNullValueForNullText() { editor.setAsText(null); - assertThat(editor.getValue(), nullValue()); + assertThat(getValue()).isNull(); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void shouldReturnNullValueForEmptyText() { editor.setAsText(" "); - assertThat(editor.getValue(), nullValue()); + assertThat(getValue()).isNull(); } - /** - * @see DATAMONGO-1158 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1158 public void shouldThrowExceptionForMalformatedCredentialsString() { - editor.setAsText("tyrion"); + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText("tyrion")); } - /** - * @see DATAMONGO-1158 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1158 public void shouldThrowExceptionForMalformatedAuthMechanism() { - editor.setAsText(USER_2_AUTH_STRING + "?uri.authMechanism=Targaryen"); + assertThatIllegalArgumentException() + .isThrownBy(() -> editor.setAsText(USER_2_AUTH_STRING + "?uri.authMechanism=Targaryen")); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleUserNamePasswordStringWithDatabaseAndNoOptions() { editor.setAsText(USER_1_AUTH_STRING); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleUserNamePasswordStringWithDatabaseAndAuthOptions() { editor.setAsText(USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH)); + assertThat(getValue()).contains(USER_1_CREDENTIALS_PLAIN_AUTH); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswordStringWithDatabaseAndNoOptions() { editor .setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList(USER_1_AUTH_STRING, USER_2_AUTH_STRING))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS); } - /** - * @see DATAMONGO-1158 - */ - @Test - @SuppressWarnings("unchecked") - public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswordStringWithDatabaseAndAuthOptions() { - - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList( - USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING_WITH_MONGODB_CR_AUTH_MECHANISM))); - - assertThat((List) editor.getValue(), - contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS_CR_AUTH)); - } - - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswordStringWithDatabaseAndMixedOptions() { - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList( - USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING))); + editor.setAsText(StringUtils.collectionToCommaDelimitedString( + Arrays.asList(USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS); } - /** - * @see DATAMONGO-1257 - */ - @Test + @Test // DATAMONGO-1257 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() { - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'" - + USER_2_AUTH_STRING + "'"))); + editor.setAsText(StringUtils.collectionToCommaDelimitedString( + Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'" + USER_2_AUTH_STRING + "'"))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS); } - /** - * @see DATAMONGO-1257 - */ - @Test + @Test // DATAMONGO-1257 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() { editor.setAsText("'" + USER_1_AUTH_STRING + "'"); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS); } - /** - * @see DATAMONGO-1257 - */ - @Test + @Test // DATAMONGO-1257 @SuppressWarnings("unchecked") public void shouldReturnX509CredentialsCorrectly() { editor.setAsText(USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM); - assertThat((List) editor.getValue(), contains(USER_3_CREDENTIALS_X509_AUTH)); + assertThat(getValue()).contains(USER_3_CREDENTIALS_X509_AUTH); } - /** - * @see DATAMONGO-1257 - */ - @Test + @Test // DATAMONGO-1257 @SuppressWarnings("unchecked") public void shouldReturnX509CredentialsCorrectlyWhenNoDbSpecified() { editor.setAsText("tyrion?uri.authMechanism=MONGODB-X509"); - assertThat((List) editor.getValue(), contains(MongoCredential.createMongoX509Credential("tyrion"))); + assertThat(getValue()).contains(MongoCredential.createMongoX509Credential("tyrion")); } - /** - * @see DATAMONGO-1257 - */ - @Test(expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) // DATAMONGO-1257 public void shouldThrowExceptionWhenNoDbSpecifiedForMongodbCR() { editor.setAsText("tyrion?uri.authMechanism=MONGODB-CR"); - editor.getValue(); + getValue(); } - /** - * @see DATAMONGO-1257 - */ - @Test(expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) // DATAMONGO-1257 public void shouldThrowExceptionWhenDbIsEmptyForMongodbCR() { editor.setAsText("tyrion@?uri.authMechanism=MONGODB-CR"); - editor.getValue(); + getValue(); + } + + @Test // DATAMONGO-1317 + @SuppressWarnings("unchecked") + public void encodedUserNameAndPasswordShouldBeDecoded() { + + editor.setAsText(USER_4_AUTH_STRING); + + assertThat(getValue()).contains(USER_4_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void passwordWithQuestionMarkShouldNotBeInterpretedAsOptionString() { + + editor.setAsText(USER_5_AUTH_STRING); + + assertThat(getValue()).contains(USER_5_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void passwordWithQuestionMarkShouldNotBreakParsingOfOptionString() { + + editor.setAsText(USER_5_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM); + + assertThat(getValue()).contains(USER_5_CREDENTIALS_PLAIN_AUTH); + } + + @Test // DATAMONGO-2051 + public void shouldReturnScramSha256Credentials() { + + editor.setAsText(SCRAM_SHA_256_AUTH_STRING); + + assertThat(getValue()).contains(SCRAM_SHA_256_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void failsGracefullyOnEmptyQueryArgument() { + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText(USER_5_AUTH_STRING_WITH_QUERY_ARGS)); + } + + @SuppressWarnings("unchecked") + private List getValue() { + return (List) editor.getValue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java index 6e0972c5dd..a283666c05 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,38 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** - * Integration tests for {@link MongoDbFactory}. - * + * Integration tests for {@link MongoDatabaseFactory}. + * * @author Thomas Risberg * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoDbFactoryNoDatabaseRunningTests { @Autowired MongoTemplate mongoTemplate; - /** - * @see DATAMONGO-139 - */ - @Test + @Test // DATAMONGO-139 public void startsUpWithoutADatabaseRunning() { - assertThat(mongoTemplate.getClass().getName(), is("org.springframework.data.mongodb.core.MongoTemplate")); + assertThat(mongoTemplate.getClass().getName()).isEqualTo("org.springframework.data.mongodb.core.MongoTemplate"); } - @Test(expected = DataAccessResourceFailureException.class) + @Test public void failsDataAccessWithoutADatabaseRunning() { - mongoTemplate.getCollectionNames(); + assertThatExceptionOfType(DataAccessResourceFailureException.class) + .isThrownBy(() -> mongoTemplate.getCollectionNames()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java index a98a2c06d0..3ebdf61ae4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,39 +15,32 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConstructorArgumentValues; import org.springframework.beans.factory.config.ConstructorArgumentValues.ValueHolder; -import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.core.io.ClassPathResource; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.ReflectiveMongoOptionsInvokerTestUtil; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; +import com.mongodb.ConnectionString; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; /** * Integration tests for {@link MongoDbFactoryParser}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Viktor Khoroshko @@ -57,62 +50,61 @@ public class MongoDbFactoryParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @BeforeClass - public static void validateMongoDriver() { - assumeFalse(isMongo3Driver()); - } - - @Before + @BeforeEach public void setUp() { factory = new DefaultListableBeanFactory(); reader = new XmlBeanDefinitionReader(factory); } - @Test + @Test // DATAMONGO-2199 public void testWriteConcern() throws Exception { - SimpleMongoDbFactory dbFactory = new SimpleMongoDbFactory(new MongoClient("localhost"), "database"); - dbFactory.setWriteConcern(WriteConcern.SAFE); - dbFactory.getDb(); + try (MongoClient client = MongoTestUtils.client()) { + SimpleMongoClientDatabaseFactory dbFactory = new SimpleMongoClientDatabaseFactory(client, "database"); + dbFactory.setWriteConcern(WriteConcern.ACKNOWLEDGED); + dbFactory.getMongoDatabase(); - assertThat(ReflectionTestUtils.getField(dbFactory, "writeConcern"), is((Object) WriteConcern.SAFE)); + assertThat(ReflectionTestUtils.getField(dbFactory, "writeConcern")).isEqualTo(WriteConcern.ACKNOWLEDGED); + } } - @Test + @Test // DATAMONGO-2199 public void parsesWriteConcern() { + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); - assertWriteConcern(ctx, WriteConcern.SAFE); + assertWriteConcern(ctx, WriteConcern.ACKNOWLEDGED); + ctx.close(); } - @Test + @Test // DATAMONGO-2199 public void parsesCustomWriteConcern() { + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext( "namespace/db-factory-bean-custom-write-concern.xml"); assertWriteConcern(ctx, new WriteConcern("rack1")); + ctx.close(); } - /** - * @see DATAMONGO-331 - */ - @Test + @Test // DATAMONGO-331 public void readsReplicasWriteConcernCorrectly() { AbstractApplicationContext ctx = new ClassPathXmlApplicationContext( "namespace/db-factory-bean-custom-write-concern.xml"); - MongoDbFactory factory = ctx.getBean("second", MongoDbFactory.class); - DB db = factory.getDb(); - - assertThat(db.getWriteConcern(), is(WriteConcern.REPLICAS_SAFE)); + MongoDatabaseFactory factory = ctx.getBean("second", MongoDatabaseFactory.class); ctx.close(); + + MongoDatabase db = factory.getMongoDatabase(); + assertThat(db.getWriteConcern()).isEqualTo(WriteConcern.W2); } // This test will fail since equals in WriteConcern uses == for _w and not .equals public void testWriteConcernEquality() { + String s1 = new String("rack1"); String s2 = new String("rack1"); WriteConcern wc1 = new WriteConcern(s1); WriteConcern wc2 = new WriteConcern(s2); - assertThat(wc1, is(wc2)); + assertThat(wc1).isEqualTo(wc2); } @Test @@ -122,143 +114,94 @@ public void createsDbFactoryBean() { factory.getBean("first"); } - /** - * @see DATAMONGO-280 - */ - @Test - @SuppressWarnings("deprecation") - public void parsesMaxAutoConnectRetryTimeCorrectly() { - - reader.loadBeanDefinitions(new ClassPathResource("namespace/db-factory-bean.xml")); - Mongo mongo = factory.getBean(Mongo.class); - assertThat(ReflectiveMongoOptionsInvokerTestUtil.getMaxAutoConnectRetryTime(mongo.getMongoOptions()), is(27L)); - } - - /** - * @see DATAMONGO-295 - */ - @Test - public void setsUpMongoDbFactoryUsingAMongoUri() { - - reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri.xml")); - BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory"); - ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoURI.class); - assertThat(argument, is(notNullValue())); - } - - /** - * @see DATAMONGO-306 - */ - @Test + @Test // DATAMONGO-306 public void setsUpMongoDbFactoryUsingAMongoUriWithoutCredentials() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri-no-credentials.xml")); BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); - MongoDbFactory dbFactory = factory.getBean("mongoDbFactory", MongoDbFactory.class); - DB db = dbFactory.getDb(); - assertThat(db.getName(), is("database")); - } + MongoDatabaseFactory dbFactory = factory.getBean("mongoDbFactory", MongoDatabaseFactory.class); + MongoDatabase db = dbFactory.getMongoDatabase(); + assertThat(db.getName()).isEqualTo("database"); - /** - * @see DATAMONGO-295 - */ - @Test(expected = BeanDefinitionParsingException.class) - public void rejectsUriPlusDetailedConfiguration() { - reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri-and-details.xml")); + factory.destroyBean(dbFactory); } - /** - * @see DATAMONGO-1218 - */ - @Test + @Test // DATAMONGO-1218 public void setsUpMongoDbFactoryUsingAMongoClientUri() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri.xml")); BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); } - /** - * @see DATAMONGO-1218 - */ - @Test(expected = BeanDefinitionParsingException.class) - public void rejectsClientUriPlusDetailedConfiguration() { - reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-and-details.xml")); - } - - /** - * @see DATAMONGO-1293 - */ - @Test + @Test // DATAMONGO-1293 public void setsUpClientUriWithId() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-and-id.xml")); BeanDefinition definition = factory.getBeanDefinition("testMongo"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); } - /** - * @see DATAMONGO-1293 - */ - @Test + @Test // DATAMONGO-1293 public void setsUpUriWithId() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-uri-and-id.xml")); BeanDefinition definition = factory.getBeanDefinition("testMongo"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); } - /** - * @see DATAMONGO-1293 - */ - @Test(expected = BeanDefinitionParsingException.class) - public void rejectsClientUriPlusDetailedConfigurationAndWriteConcern() { - reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-write-concern-and-details.xml")); + @Test // DATAMONGO-2384 + public void usesConnectionStringToCreateClientClient() { + + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); + + MongoDatabaseFactory dbFactory = ctx.getBean("with-connection-string", MongoDatabaseFactory.class); + ctx.close(); + + assertThat(dbFactory).isInstanceOf(SimpleMongoClientDatabaseFactory.class); + assertThat(ReflectionTestUtils.getField(dbFactory, "mongoClient")) + .isInstanceOf(com.mongodb.client.MongoClient.class); } - /** - * @see DATAMONGO-1293 - */ - @Test(expected = BeanDefinitionParsingException.class) - public void rejectsUriPlusDetailedConfigurationAndWriteConcern() { - reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-client-uri-write-concern-and-details.xml")); + @Test // DATAMONGO-2384 + public void usesMongoClientClientRef() { + + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); + + MongoDatabaseFactory dbFactory = ctx.getBean("with-mongo-client-client-ref", MongoDatabaseFactory.class); + assertThat(dbFactory).isInstanceOf(SimpleMongoClientDatabaseFactory.class); + assertThat(ReflectionTestUtils.getField(dbFactory, "mongoClient")) + .isInstanceOf(com.mongodb.client.MongoClient.class); } private static void assertWriteConcern(ClassPathXmlApplicationContext ctx, WriteConcern expectedWriteConcern) { - SimpleMongoDbFactory dbFactory = ctx.getBean("first", SimpleMongoDbFactory.class); - DB db = dbFactory.getDb(); - assertThat(db.getName(), is("db")); + SimpleMongoClientDatabaseFactory dbFactory = ctx.getBean("first", SimpleMongoClientDatabaseFactory.class); + MongoDatabase db = dbFactory.getMongoDatabase(); + assertThat(db.getName()).isEqualTo("db"); WriteConcern configuredConcern = (WriteConcern) ReflectionTestUtils.getField(dbFactory, "writeConcern"); - MyWriteConcern myDbFactoryWriteConcern = new MyWriteConcern(configuredConcern); - MyWriteConcern myDbWriteConcern = new MyWriteConcern(db.getWriteConcern()); - MyWriteConcern myExpectedWriteConcern = new MyWriteConcern(expectedWriteConcern); - - assertThat(myDbFactoryWriteConcern, is(myExpectedWriteConcern)); - assertThat(myDbWriteConcern, is(myExpectedWriteConcern)); - assertThat(myDbWriteConcern, is(myDbFactoryWriteConcern)); + assertThat(configuredConcern).isEqualTo(expectedWriteConcern); + assertThat(db.getWriteConcern()).isEqualTo(expectedWriteConcern); + assertThat(db.getWriteConcern()).isEqualTo(expectedWriteConcern); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java index f76e768930..d7689c8e2a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java @@ -1,106 +1,106 @@ -/* - * Copyright 2011-2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.data.mongodb.config; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; - -import java.net.InetAddress; -import java.util.ArrayList; -import java.util.List; - -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.data.mongodb.core.MongoFactoryBean; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.util.ReflectionTestUtils; - -import com.mongodb.CommandResult; -import com.mongodb.Mongo; -import com.mongodb.ServerAddress; - -/** - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Mark Paluch - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class MongoNamespaceReplicaSetTests { - - @Autowired private ApplicationContext ctx; - - @Test - @SuppressWarnings("unchecked") - public void testParsingMongoWithReplicaSets() throws Exception { - - assertTrue(ctx.containsBean("replicaSetMongo")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&replicaSetMongo"); - - List replicaSetSeeds = (List) ReflectionTestUtils.getField(mfb, "replicaSetSeeds"); - - assertThat(replicaSetSeeds, is(notNullValue())); - assertThat( - replicaSetSeeds, - hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001), - new ServerAddress(InetAddress.getByName("localhost"), 10002))); - } - - @Test - @SuppressWarnings("unchecked") - public void testParsingWithPropertyPlaceHolder() throws Exception { - - assertTrue(ctx.containsBean("manyReplicaSetMongo")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&manyReplicaSetMongo"); - - List replicaSetSeeds = (List) ReflectionTestUtils.getField(mfb, "replicaSetSeeds"); - - assertThat(replicaSetSeeds, is(notNullValue())); - assertThat(replicaSetSeeds, hasSize(3)); - - List ports = new ArrayList(); - for (ServerAddress replicaSetSeed : replicaSetSeeds) { - ports.add(replicaSetSeed.getPort()); - } - - assertThat(ports, hasItems(27017, 27018, 27019)); - } - - @Test - @Ignore("CI infrastructure does not yet support replica sets") - public void testMongoWithReplicaSets() { - - Mongo mongo = ctx.getBean(Mongo.class); - assertEquals(2, mongo.getAllAddress().size()); - List servers = mongo.getAllAddress(); - assertEquals("127.0.0.1", servers.get(0).getHost()); - assertEquals("localhost", servers.get(1).getHost()); - assertEquals(10001, servers.get(0).getPort()); - assertEquals(10002, servers.get(1).getPort()); - - MongoTemplate template = new MongoTemplate(mongo, "admin"); - CommandResult result = template.executeCommand("{replSetGetStatus : 1}"); - assertEquals("blort", result.getString("set")); - } -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; + +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mongodb.core.MongoClientFactoryBean; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; + +/** + * @author Mark Pollack + * @author Oliver Gierke + * @author Thomas Darimont + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class MongoNamespaceReplicaSetTests { + + @Autowired private ApplicationContext ctx; + + @Test + @SuppressWarnings("unchecked") + public void testParsingMongoWithReplicaSets() throws Exception { + + assertThat(ctx.containsBean("replicaSetMongo")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&replicaSetMongo"); + + MongoClientSettings settings = (MongoClientSettings) ReflectionTestUtils.getField(mfb, "mongoClientSettings"); + List replicaSetSeeds = settings.getClusterSettings().getHosts(); + + assertThat(replicaSetSeeds).isNotNull(); + assertThat(replicaSetSeeds).contains(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001), + new ServerAddress(InetAddress.getByName("localhost"), 10002)); + } + + @Test + @SuppressWarnings("unchecked") + public void testParsingWithPropertyPlaceHolder() throws Exception { + + assertThat(ctx.containsBean("manyReplicaSetMongo")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&manyReplicaSetMongo"); + + MongoClientSettings settings = (MongoClientSettings) ReflectionTestUtils.getField(mfb, "mongoClientSettings"); + List replicaSetSeeds = settings.getClusterSettings().getHosts(); + + assertThat(replicaSetSeeds).isNotNull(); + assertThat(replicaSetSeeds).hasSize(3); + + List ports = new ArrayList(); + for (ServerAddress replicaSetSeed : replicaSetSeeds) { + ports.add(replicaSetSeed.getPort()); + } + + assertThat(ports).contains(27017, 27018, 27019); + } + + @Test + @Ignore("CI infrastructure does not yet support replica sets") + public void testMongoWithReplicaSets() { + + MongoClient mongo = ctx.getBean(MongoClient.class); + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).isEqualTo(2); + List servers = mongo.getClusterDescription().getClusterSettings().getHosts(); + assertThat(servers.get(0).getHost()).isEqualTo("127.0.0.1"); + assertThat(servers.get(1).getHost()).isEqualTo("localhost"); + assertThat(servers.get(0).getPort()).isEqualTo(10001); + assertThat(servers.get(1).getPort()).isEqualTo(10002); + + MongoTemplate template = new MongoTemplate(mongo, "admin"); + Document result = template.executeCommand("{replSetGetStatus : 1}"); + assertThat(result.get("set").toString()).isEqualTo("blort"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java index f0f97e006a..a3b1a16a85 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java @@ -1,265 +1,233 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; -import static org.springframework.test.util.ReflectionTestUtils.*; - -import javax.net.ssl.SSLSocketFactory; - -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoFactoryBean; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.ReflectiveMongoOptionsInvokerTestUtil; -import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.gridfs.GridFsOperations; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.Mongo; -import com.mongodb.MongoOptions; -import com.mongodb.WriteConcern; - -/** - * Integration tests for the MongoDB namespace. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Martin Baumgartner - * @author Thomas Darimont - * @author Christoph Strobl - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class MongoNamespaceTests { - - @Autowired ApplicationContext ctx; - - @BeforeClass - public static void validateMongoDriver() { - assumeFalse(isMongo3Driver()); - } - - @Test - public void testMongoSingleton() throws Exception { - - assertTrue(ctx.containsBean("noAttrMongo")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&noAttrMongo"); - - assertNull(getField(mfb, "host")); - assertNull(getField(mfb, "port")); - } - - @Test - public void testMongoSingletonWithAttributes() throws Exception { - - assertTrue(ctx.containsBean("defaultMongo")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&defaultMongo"); - - String host = (String) getField(mfb, "host"); - Integer port = (Integer) getField(mfb, "port"); - - assertEquals("localhost", host); - assertEquals(new Integer(27017), port); - - MongoOptions options = (MongoOptions) getField(mfb, "mongoOptions"); - assertFalse("By default socketFactory should not be a SSLSocketFactory", - options.getSocketFactory() instanceof SSLSocketFactory); - } - - /** - * @see DATAMONGO-764 - */ - @Test - public void testMongoSingletonWithSslEnabled() throws Exception { - - assertTrue(ctx.containsBean("mongoSsl")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&mongoSsl"); - - MongoOptions options = (MongoOptions) getField(mfb, "mongoOptions"); - assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory); - } - - /** - * @see DATAMONGO-764 - */ - @Test - public void testMongoSingletonWithSslEnabledAndCustomSslSocketFactory() throws Exception { - - assertTrue(ctx.containsBean("mongoSslWithCustomSslFactory")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&mongoSslWithCustomSslFactory"); - - SSLSocketFactory customSslSocketFactory = ctx.getBean("customSslSocketFactory", SSLSocketFactory.class); - MongoOptions options = (MongoOptions) getField(mfb, "mongoOptions"); - - assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory); - assertSame(customSslSocketFactory, options.getSocketFactory()); - } - - @Test - public void testSecondMongoDbFactory() { - - assertTrue(ctx.containsBean("secondMongoDbFactory")); - MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory"); - - Mongo mongo = (Mongo) getField(dbf, "mongo"); - assertEquals("localhost", mongo.getAddress().getHost()); - assertEquals(27017, mongo.getAddress().getPort()); - assertEquals(new UserCredentials("joe", "secret"), getField(dbf, "credentials")); - assertEquals("database", getField(dbf, "databaseName")); - } - - /** - * @see DATAMONGO-789 - */ - @Test - public void testThirdMongoDbFactory() { - - assertTrue(ctx.containsBean("thirdMongoDbFactory")); - - MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("thirdMongoDbFactory"); - Mongo mongo = (Mongo) getField(dbf, "mongo"); - - assertEquals("localhost", mongo.getAddress().getHost()); - assertEquals(27017, mongo.getAddress().getPort()); - assertEquals(new UserCredentials("joe", "secret"), getField(dbf, "credentials")); - assertEquals("database", getField(dbf, "databaseName")); - assertEquals("admin", getField(dbf, "authenticationDatabaseName")); - } - - /** - * @see DATAMONGO-140 - */ - @Test - public void testMongoTemplateFactory() { - - assertTrue(ctx.containsBean("mongoTemplate")); - MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate"); - - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - - MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter"); - assertNotNull(converter); - } - - /** - * @see DATAMONGO-140 - */ - @Test - public void testSecondMongoTemplateFactory() { - - assertTrue(ctx.containsBean("anotherMongoTemplate")); - MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate"); - - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - - WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern"); - assertEquals(WriteConcern.SAFE, writeConcern); - } - - /** - * @see DATAMONGO-628 - */ - @Test - public void testGridFsTemplateFactory() { - - assertTrue(ctx.containsBean("gridFsTemplate")); - GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate"); - - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - - MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); - } - - /** - * @see DATAMONGO-628 - */ - @Test - public void testSecondGridFsTemplateFactory() { - - assertTrue(ctx.containsBean("secondGridFsTemplate")); - GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate"); - - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - assertEquals(null, getField(operations, "bucket")); - - MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); - } - - /** - * @see DATAMONGO-823 - */ - @Test - public void testThirdGridFsTemplateFactory() { - - assertTrue(ctx.containsBean("thirdGridFsTemplate")); - GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate"); - - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - assertEquals("bucketString", getField(operations, "bucket")); - - MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); - } - - @Test - @SuppressWarnings("deprecation") - public void testMongoSingletonWithPropertyPlaceHolders() throws Exception { - - assertTrue(ctx.containsBean("mongo")); - MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&mongo"); - - String host = (String) getField(mfb, "host"); - Integer port = (Integer) getField(mfb, "port"); - - assertEquals("127.0.0.1", host); - assertEquals(new Integer(27017), port); - - Mongo mongo = mfb.getObject(); - MongoOptions mongoOpts = mongo.getMongoOptions(); - - assertEquals(8, mongoOpts.connectionsPerHost); - assertEquals(1000, mongoOpts.connectTimeout); - assertEquals(1500, mongoOpts.maxWaitTime); - - assertEquals(1500, mongoOpts.socketTimeout); - assertEquals(4, mongoOpts.threadsAllowedToBlockForConnectionMultiplier); - assertEquals(true, mongoOpts.socketKeepAlive); - - assertEquals(1, mongoOpts.getWriteConcern().getW()); - assertEquals(0, mongoOpts.getWriteConcern().getWtimeout()); - assertEquals(true, mongoOpts.getWriteConcern().fsync()); - - assertEquals(true, mongoOpts.fsync); - assertEquals(true, ReflectiveMongoOptionsInvokerTestUtil.getAutoConnectRetry(mongoOpts)); - assertEquals(true, ReflectiveMongoOptionsInvokerTestUtil.getSlaveOk(mongoOpts)); - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.test.util.ReflectionTestUtils.*; + +import javax.net.ssl.SSLSocketFactory; + +import java.util.function.Supplier; + +import com.mongodb.client.MongoCollection; +import com.mongodb.client.gridfs.GridFSBucket; +import com.mongodb.client.gridfs.model.GridFSFile; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoClientFactoryBean; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.gridfs.GridFsOperations; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; + +/** + * Integration tests for the MongoDB namespace. + * + * @author Mark Pollack + * @author Oliver Gierke + * @author Martin Baumgartner + * @author Thomas Darimont + * @author Christoph Strobl + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class MongoNamespaceTests { + + @Autowired ApplicationContext ctx; + + @Test + public void testMongoSingleton() throws Exception { + + assertThat(ctx.containsBean("noAttrMongo")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&noAttrMongo"); + + assertThat(getField(mfb, "host")).isNull(); + assertThat(getField(mfb, "port")).isNull(); + } + + @Test + public void testMongoSingletonWithAttributes() throws Exception { + + assertThat(ctx.containsBean("defaultMongo")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&defaultMongo"); + + String host = (String) getField(mfb, "host"); + Integer port = (Integer) getField(mfb, "port"); + + assertThat(host).isEqualTo("localhost"); + assertThat(port).isEqualTo(new Integer(27017)); + + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options).isNull(); + } + + @Test // DATAMONGO-764 + public void testMongoSingletonWithSslEnabled() throws Exception { + + assertThat(ctx.containsBean("mongoSsl")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSsl"); + + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); + } + + @Test // DATAMONGO-1490 + public void testMongoClientSingletonWithSslEnabled() { + + assertThat(ctx.containsBean("mongoClientSsl")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClientSsl"); + + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); + } + + @Test // DATAMONGO-764 + public void testMongoSingletonWithSslEnabledAndCustomSslSocketFactory() throws Exception { + + assertThat(ctx.containsBean("mongoSslWithCustomSslFactory")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSslWithCustomSslFactory"); + + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); + assertThat(options.getSslSettings().getContext().getProvider().getName()).isEqualTo("SunJSSE"); + } + + @Test + public void testSecondMongoDbFactory() { + + assertThat(ctx.containsBean("secondMongoDbFactory")).isTrue(); + MongoDatabaseFactory dbf = (MongoDatabaseFactory) ctx.getBean("secondMongoDbFactory"); + + MongoClient mongo = (MongoClient) getField(dbf, "mongoClient"); + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); + } + + @Test // DATAMONGO-789 + public void testThirdMongoDbFactory() { + + assertThat(ctx.containsBean("thirdMongoDbFactory")).isTrue(); + + MongoDatabaseFactory dbf = (MongoDatabaseFactory) ctx.getBean("thirdMongoDbFactory"); + MongoClient mongo = (MongoClient) getField(dbf, "mongoClient"); + + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); + } + + @Test // DATAMONGO-140 + public void testMongoTemplateFactory() { + + assertThat(ctx.containsBean("mongoTemplate")).isTrue(); + MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate"); + + MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "mongoDbFactory"); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); + + MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter"); + assertThat(converter).isNotNull(); + } + + @Test // DATAMONGO-140 + public void testSecondMongoTemplateFactory() { + + assertThat(ctx.containsBean("anotherMongoTemplate")).isTrue(); + MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate"); + + MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "mongoDbFactory"); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); + + WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern"); + assertThat(writeConcern).isEqualTo(WriteConcern.ACKNOWLEDGED); + } + + @Test // DATAMONGO-628 + public void testGridFsTemplateFactory() { + + assertThat(ctx.containsBean("gridFsTemplate")).isTrue(); + GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate"); + + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); + + MongoConverter converter = (MongoConverter) getField(operations, "converter"); + assertThat(converter).isNotNull(); + } + + @Test // DATAMONGO-628 + public void testSecondGridFsTemplateFactory() { + + assertThat(ctx.containsBean("secondGridFsTemplate")).isTrue(); + GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate"); + + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); + + MongoConverter converter = (MongoConverter) getField(operations, "converter"); + assertThat(converter).isNotNull(); + } + + @Test // DATAMONGO-823 + public void testThirdGridFsTemplateFactory() { + + assertThat(ctx.containsBean("thirdGridFsTemplate")).isTrue(); + GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate"); + + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("bucketString"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); + + MongoConverter converter = (MongoConverter) getField(operations, "converter"); + assertThat(converter).isNotNull(); + } + + @Test + public void testMongoSingletonWithPropertyPlaceHolders() { + + assertThat(ctx.containsBean("mongoClient")).isTrue(); + MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClient"); + + String host = (String) getField(mfb, "host"); + Integer port = (Integer) getField(mfb, "port"); + + assertThat(host).isEqualTo("127.0.0.1"); + assertThat(port).isEqualTo(new Integer(27017)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java index 0ca28717e4..2ae67e583b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionReader; @@ -31,11 +32,11 @@ import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.io.ClassPathResource; -import com.mongodb.Mongo; +import com.mongodb.client.MongoClient; /** - * Integration tests for {@link MongoParser}. - * + * Integration tests for {@link MongoClientParser}. + * * @author Oliver Gierke */ public class MongoParserIntegrationTests { @@ -43,7 +44,7 @@ public class MongoParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @Before + @BeforeEach public void setUp() { this.factory = new DefaultListableBeanFactory(); @@ -51,21 +52,23 @@ public void setUp() { } @Test + @Disabled public void readsMongoAttributesCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-bean.xml")); - BeanDefinition definition = factory.getBeanDefinition("mongo"); + BeanDefinition definition = factory.getBeanDefinition("mongoClient"); List values = definition.getPropertyValues().getPropertyValueList(); - assertThat(values, hasItem(new PropertyValue("writeConcern", "SAFE"))); - factory.getBean("mongo"); + assertThat(values.get(2).getValue()).isInstanceOf(BeanDefinition.class); + BeanDefinition x = (BeanDefinition) values.get(2).getValue(); + + assertThat(x.getPropertyValues().getPropertyValueList()).contains(new PropertyValue("writeConcern", "SAFE")); + + factory.getBean("mongoClient"); } - /** - * @see DATAMONGO-343 - */ - @Test + @Test // DATAMONGO-343 public void readsServerAddressesCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-bean.xml")); @@ -73,7 +76,7 @@ public void readsServerAddressesCorrectly() { AbstractApplicationContext context = new GenericApplicationContext(factory); context.refresh(); - assertThat(context.getBean("mongo2", Mongo.class), is(notNullValue())); + assertThat(context.getBean("mongo2", MongoClient.class)).isNotNull(); context.close(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java deleted file mode 100644 index e2bc6c2529..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.springframework.data.mongodb.config; - -import com.mongodb.WriteConcern; - -public class MyWriteConcern { - - public MyWriteConcern(WriteConcern wc) { - this._w = wc.getWObject(); - this._fsync = wc.getFsync(); - this._j = wc.getJ(); - this._wtimeout = wc.getWtimeout(); - } - - Object _w = 0; - int _wtimeout = 0; - boolean _fsync = false; - boolean _j = false; - boolean _continueOnErrorForInsert = false; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (_continueOnErrorForInsert ? 1231 : 1237); - result = prime * result + (_fsync ? 1231 : 1237); - result = prime * result + (_j ? 1231 : 1237); - result = prime * result + ((_w == null) ? 0 : _w.hashCode()); - result = prime * result + _wtimeout; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - MyWriteConcern other = (MyWriteConcern) obj; - if (_continueOnErrorForInsert != other._continueOnErrorForInsert) - return false; - if (_fsync != other._fsync) - return false; - if (_j != other._j) - return false; - if (_w == null) { - if (other._w != null) - return false; - } else if (!_w.equals(other._w)) - return false; - if (_wtimeout != other._wtimeout) - return false; - return true; - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java new file mode 100644 index 0000000000..fc47fd0572 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java @@ -0,0 +1,209 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; + +import org.springframework.core.ResolvableType; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.test.util.ReflectionTestUtils; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.ReactiveAuditorAware; +import org.springframework.data.mongodb.core.AuditablePerson; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration test for the auditing support via {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate}. + * + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration +class ReactiveAuditingTests { + + static @Client MongoClient mongoClient; + + @Autowired ReactiveAuditablePersonRepository auditablePersonRepository; + @Autowired MongoMappingContext context; + @Autowired ReactiveMongoOperations operations; + + @Configuration + @EnableReactiveMongoAuditing + @EnableReactiveMongoRepositories(basePackageClasses = ReactiveAuditingTests.class, considerNestedRepositories = true, + includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ReactiveAuditablePersonRepository.class)) + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected Set> getInitialEntitySet() { + return new HashSet<>( + Arrays.asList(AuditablePerson.class, VersionedAuditablePerson.class, SimpleVersionedAuditablePerson.class)); + } + + @Bean + public ReactiveAuditorAware auditorProvider() { + + AuditablePerson person = new AuditablePerson("some-person"); + person.setId("foo"); + + return () -> Mono.just(person); + } + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2179 + void auditingWorksForVersionedEntityBatchWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + s -> auditablePersonRepository.saveAll(Collections.singletonList(s)).next(), // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithSimpleVersion() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithWrapperVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithSimpleVersionOnTemplate() { + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2586 + void auditingShouldOnlyRegisterReactiveAuditingCallback() { + + Object callbacks = ReflectionTestUtils.getField(operations, "entityCallbacks"); + Object callbackDiscoverer = ReflectionTestUtils.getField(callbacks, "callbackDiscoverer"); + List> actualCallbacks = ReflectionTestUtils.invokeMethod(callbackDiscoverer, "getEntityCallbacks", + AuditablePerson.class, ResolvableType.forClass(EntityCallback.class)); + + assertThat(actualCallbacks) // + .hasAtLeastOneElementOfType(ReactiveAuditingEntityCallback.class) // + .doesNotHaveAnyElementsOfTypes(AuditingEntityCallback.class); + } + + private void verifyAuditingViaVersionProperty(T instance, + Function versionExtractor, Function createdDateExtractor, Function> persister, + Object... expectedValues) { + + AtomicReference instanceHolder = new AtomicReference<>(instance); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(instance.getClass()); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[0]); + assertThat(createdDateExtractor.apply(instance)).isNull(); + assertThat(entity.isNew(instance)).isTrue(); + + persister.apply(instanceHolder.get()) // + .as(StepVerifier::create).consumeNextWith(actual -> { + + instanceHolder.set(actual); + + assertThat(versionExtractor.apply(actual)).isEqualTo(expectedValues[1]); + assertThat(createdDateExtractor.apply(instance)).isNotNull(); + assertThat(entity.isNew(actual)).isFalse(); + }).verifyComplete(); + + persister.apply(instanceHolder.get()) // + .as(StepVerifier::create).consumeNextWith(actual -> { + + instanceHolder.set(actual); + + assertThat(versionExtractor.apply(actual)).isEqualTo(expectedValues[2]); + assertThat(entity.isNew(actual)).isFalse(); + }).verifyComplete(); + } + + interface ReactiveAuditablePersonRepository extends ReactiveMongoRepository {} + + static class VersionedAuditablePerson extends AuditablePerson { + @Version Long version; + } + + static class SimpleVersionedAuditablePerson extends AuditablePerson { + @Version long version; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java index 6f8f0c9548..d9280a416d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,64 +15,47 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import com.mongodb.ReadPreference; /** * Unit tests for {@link ReadPreferencePropertyEditor}. - * + * * @author Christoph Strobl */ public class ReadPreferencePropertyEditorUnitTests { - @Rule public ExpectedException expectedException = ExpectedException.none(); - ReadPreferencePropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new ReadPreferencePropertyEditor(); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void shouldThrowExceptionOnUndefinedPreferenceString() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("ReadPreference"); - expectedException.expectMessage("foo"); - - editor.setAsText("foo"); + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText("foo")).withMessageContaining("foo") + .withMessageContaining("ReadPreference"); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void shouldAllowUsageNativePreferenceStrings() { editor.setAsText("secondary"); - assertThat(editor.getValue(), is((Object) ReadPreference.secondary())); + assertThat(editor.getValue()).isEqualTo((Object) ReadPreference.secondary()); } - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void shouldAllowUsageOfUppcaseEnumStringsForPreferences() { editor.setAsText("NEAREST"); - assertThat(editor.getValue(), is((Object) ReadPreference.nearest())); + assertThat(editor.getValue()).isEqualTo((Object) ReadPreference.nearest()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java index 19565433c2..38210b3f33 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,44 +15,36 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collection; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfSystemProperty; import com.mongodb.ServerAddress; /** * Unit tests for {@link ServerAddressPropertyEditor}. - * + * * @author Oliver Gierke * @author Thomas Darimont */ public class ServerAddressPropertyEditorUnitTests { - @Rule public ExpectedException expectedException = ExpectedException.none(); - ServerAddressPropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new ServerAddressPropertyEditor(); } - /** - * @see DATAMONGO-454 - * @see DATAMONGO-1062 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-454, DATAMONGO-1062 public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress() { String unknownHost1 = "gugu.nonexistant.example.org"; @@ -60,43 +52,33 @@ public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress assertUnresolveableHostnames(unknownHost1, unknownHost2); - editor.setAsText(unknownHost1 + "," + unknownHost2); + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> editor.setAsText(unknownHost1 + "," + unknownHost2)); } - /** - * @see DATAMONGO-454 - */ - @Test + @Test // DATAMONGO-454 + @EnabledIfSystemProperty(named = "user.name", matches = "jenkins") public void skipsUnparsableAddressIfAtLeastOneIsParsable() throws UnknownHostException { editor.setAsText("foo, localhost"); assertSingleAddressOfLocalhost(editor.getValue()); } - /** - * @see DATAMONGO-454 - */ - @Test + @Test // DATAMONGO-454 public void handlesEmptyAddressAsParseError() throws UnknownHostException { editor.setAsText(", localhost"); assertSingleAddressOfLocalhost(editor.getValue()); } - /** - * @see DATAMONGO-693 - */ - @Test + @Test // DATAMONGO-693 public void interpretEmptyStringAsNull() { editor.setAsText(""); - assertNull(editor.getValue()); + assertThat(editor.getValue()).isNull(); } - /** - * @see DATAMONGO-808 - */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressLoopbackShort() throws UnknownHostException { String hostAddress = "::1"; @@ -105,10 +87,7 @@ public void handleIPv6HostaddressLoopbackShort() throws UnknownHostException { assertSingleAddressWithPort(hostAddress, null, editor.getValue()); } - /** - * @see DATAMONGO-808 - */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressLoopbackShortWithPort() throws UnknownHostException { String hostAddress = "::1"; @@ -120,10 +99,8 @@ public void handleIPv6HostaddressLoopbackShortWithPort() throws UnknownHostExcep /** * Here we detect no port since the last segment of the address contains leading zeros. - * - * @see DATAMONGO-808 */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressLoopbackLong() throws UnknownHostException { String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001"; @@ -132,10 +109,7 @@ public void handleIPv6HostaddressLoopbackLong() throws UnknownHostException { assertSingleAddressWithPort(hostAddress, null, editor.getValue()); } - /** - * @see DATAMONGO-808 - */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostException { String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]"; @@ -146,22 +120,16 @@ public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostEx /** * We can't tell whether the last part of the hostAddress represents a port or not. - * - * @see DATAMONGO-808 */ - @Test - public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() throws UnknownHostException { - - expectedException.expect(IllegalArgumentException.class); + @Test // DATAMONGO-808 + public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() { String hostAddress = "0000:0000:0000:0000:0000:0000:0000:128"; - editor.setAsText(hostAddress); + + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText(hostAddress)); } - /** - * @see DATAMONGO-808 - */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressExampleAddressWithPort() throws UnknownHostException { String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001"; @@ -171,10 +139,7 @@ public void handleIPv6HostaddressExampleAddressWithPort() throws UnknownHostExce assertSingleAddressWithPort(hostAddress, port, editor.getValue()); } - /** - * @see DATAMONGO-808 - */ - @Test + @Test // DATAMONGO-808 public void handleIPv6HostaddressExampleAddressInBracketsWithPort() throws UnknownHostException { String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]"; @@ -191,13 +156,13 @@ private static void assertSingleAddressOfLocalhost(Object result) throws Unknown private static void assertSingleAddressWithPort(String hostAddress, Integer port, Object result) throws UnknownHostException { - assertThat(result, is(instanceOf(ServerAddress[].class))); + assertThat(result).isInstanceOf(ServerAddress[].class); Collection addresses = Arrays.asList((ServerAddress[]) result); - assertThat(addresses, hasSize(1)); + assertThat(addresses).hasSize(1); if (port == null) { - assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress)))); + assertThat(addresses).contains(new ServerAddress(InetAddress.getByName(hostAddress))); } else { - assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port))); + assertThat(addresses).contains(new ServerAddress(InetAddress.getByName(hostAddress), port)); } } @@ -205,9 +170,9 @@ private void assertUnresolveableHostnames(String... hostnames) { for (String hostname : hostnames) { try { - InetAddress.getByName(hostname); - Assert.fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved."); - } catch (UnknownHostException expected) { + InetAddress.getByName(hostname).isReachable(1500); + fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved."); + } catch (IOException expected) { // ok } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java index f1aa0d2aaf..7f400c80e0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,29 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import com.mongodb.WriteConcern; /** * Unit tests for {@link StringToWriteConcernConverter}. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ public class StringToWriteConcernConverterUnitTests { StringToWriteConcernConverter converter = new StringToWriteConcernConverter(); - @Test + @Test // DATAMONGO-2199 public void createsWellKnownConstantsCorrectly() { - assertThat(converter.convert("SAFE"), is(WriteConcern.SAFE)); + assertThat(converter.convert("ACKNOWLEDGED")).isEqualTo(WriteConcern.ACKNOWLEDGED); } @Test public void createsWriteConcernForUnknownValue() { - assertThat(converter.convert("-1"), is(new WriteConcern("-1"))); + assertThat(converter.convert("-1")).isEqualTo(new WriteConcern("-1")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java index e315bbf1f0..2f9732a33a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,39 +15,39 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import com.mongodb.WriteConcern; /** * Unit tests for {@link WriteConcernPropertyEditor}. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ public class WriteConcernPropertyEditorUnitTests { WriteConcernPropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new WriteConcernPropertyEditor(); } - @Test + @Test // DATAMONGO-2199 public void createsWriteConcernForWellKnownConstants() { - editor.setAsText("SAFE"); - assertThat(editor.getValue(), is((Object) WriteConcern.SAFE)); + editor.setAsText("JOURNALED"); + assertThat(editor.getValue()).isEqualTo(WriteConcern.JOURNALED); } @Test public void createsWriteConcernForUnknownConstants() { editor.setAsText("-1"); - assertThat(editor.getValue(), is((Object) new WriteConcern("-1"))); + assertThat(editor.getValue()).isEqualTo(new WriteConcern("-1")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java index eb07953c36..b7bc73a728 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,7 @@ /** * Domain class for auditing functionality testing. - * + * * @author Thomas Darimont */ public class AuditablePerson { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java new file mode 100644 index 0000000000..a4192df40a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.BsonDocument; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ChangeStreamOptions}. + * + * @author Mark Paluch + */ +public class ChangeStreamOptionsUnitTests { + + @Test // DATAMONGO-2258 + public void shouldReportResumeAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.builder().resumeAfter(new BsonDocument()).build(); + + assertThat(options.isResumeAfter()).isTrue(); + assertThat(options.isStartAfter()).isFalse(); + } + + @Test // DATAMONGO-2258 + public void shouldReportStartAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.builder().startAfter(new BsonDocument()).build(); + + assertThat(options.isResumeAfter()).isFalse(); + assertThat(options.isStartAfter()).isTrue(); + } + + @Test // DATAMONGO-2258 + public void shouldNotReportResumeStartAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.empty(); + + assertThat(options.isResumeAfter()).isFalse(); + assertThat(options.isStartAfter()).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java new file mode 100644 index 0000000000..7fb1cddafb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java @@ -0,0 +1,273 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link ClientSession} through {@link MongoTemplate#withSession(ClientSession)}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +class ClientSessionTests { + + private static final String DB_NAME = "client-session-tests"; + private static final String COLLECTION_NAME = "test"; + private static final String REF_COLLECTION_NAME = "test-with-ref"; + + private static @ReplSetClient MongoClient mongoClient; + + private MongoTemplate template; + + @BeforeEach + void setUp() { + + MongoTestUtils.createOrReplaceCollection(DB_NAME, COLLECTION_NAME, mongoClient); + + template = new MongoTemplate(mongoClient, DB_NAME); + template.getDb().getCollection(COLLECTION_NAME).insertOne(new Document("_id", "id-1").append("value", "spring")); + } + + @Test // DATAMONGO-1880 + void shouldApplyClientSession() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + Document doc = template.withSession(() -> session) + .execute(action -> action.findOne(new Query(), Document.class, "test")); + + assertThat(doc).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-2241 + void shouldReuseConfiguredInfrastructure() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + MappingMongoConverter source = MappingMongoConverter.class.cast(template.getConverter()); + MappingMongoConverter sessionTemplateConverter = MappingMongoConverter.class + .cast(template.withSession(() -> session).execute(MongoOperations::getConverter)); + + assertThat(sessionTemplateConverter.getMappingContext()).isSameAs(source.getMappingContext()); + assertThat(ReflectionTestUtils.getField(sessionTemplateConverter, "conversions")) + .isSameAs(ReflectionTestUtils.getField(source, "conversions")); + assertThat(ReflectionTestUtils.getField(sessionTemplateConverter, "instantiators")) + .isSameAs(ReflectionTestUtils.getField(source, "instantiators")); + } + + @Test // DATAMONGO-1920 + void withCommittedTransaction() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + SomeDoc saved = template.withSession(() -> session).execute(action -> { + + SomeDoc doc = new SomeDoc("id-2", "value2"); + action.insert(doc); + return doc; + }); + + session.commitTransaction(); + session.close(); + + assertThat(saved).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + + assertThat(template.exists(query(where("id").is(saved.getId())), SomeDoc.class)).isTrue(); + } + + @Test // DATAMONGO-1920 + void withAbortedTransaction() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + SomeDoc saved = template.withSession(() -> session).execute(action -> { + + SomeDoc doc = new SomeDoc("id-2", "value2"); + action.insert(doc); + return doc; + }); + + session.abortTransaction(); + session.close(); + + assertThat(saved).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + + assertThat(template.exists(query(where("id").is(saved.getId())), SomeDoc.class)).isFalse(); + } + + @Test // DATAMONGO-2490 + void shouldBeAbleToReadDbRefDuringTransaction() { + + SomeDoc ref = new SomeDoc("ref-1", "da value"); + WithDbRef source = new WithDbRef("source-1", "da source", ref); + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + WithDbRef saved = template.withSession(() -> session).execute(action -> { + + template.save(ref); + template.save(source); + + return template.findOne(query(where("id").is(source.id)), WithDbRef.class); + }); + + assertThat(saved.getSomeDocRef()).isEqualTo(ref); + + session.abortTransaction(); + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class SomeDoc { + + @Id String id; + String value; + + SomeDoc(String id, String value) { + + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeDoc someDoc = (SomeDoc) o; + return Objects.equals(id, someDoc.id) && Objects.equals(value, someDoc.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "ClientSessionTests.SomeDoc(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(REF_COLLECTION_NAME) + static class WithDbRef { + + @Id String id; + String value; + @DBRef SomeDoc someDocRef; + + WithDbRef(String id, String value, SomeDoc someDocRef) { + this.id = id; + this.value = value; + this.someDocRef = someDocRef; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public SomeDoc getSomeDocRef() { + return this.someDocRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setSomeDocRef(SomeDoc someDocRef) { + this.someDocRef = someDocRef; + } + + public String toString() { + return "ClientSessionTests.WithDbRef(id=" + this.getId() + ", value=" + this.getValue() + ", someDocRef=" + + this.getSomeDocRef() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java index 1ac451b619..a912b04adc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,69 +15,61 @@ */ package org.springframework.data.mongodb.core; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.core.MongoTemplate.CloseableIterableCursorAdapter; -import org.springframework.data.mongodb.core.MongoTemplate.DbObjectCallback; +import org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback; import org.springframework.data.util.CloseableIterator; -import com.mongodb.Cursor; +import com.mongodb.client.MongoCursor; /** * Unit tests for {@link CloseableIterableCursorAdapter}. - * + * * @author Oliver Gierke - * @see DATAMONGO-1276 */ -@RunWith(MockitoJUnitRunner.class) -public class CloseableIterableCursorAdapterUnitTests { +@ExtendWith(MockitoExtension.class) +class CloseableIterableCursorAdapterUnitTests { @Mock PersistenceExceptionTranslator exceptionTranslator; - @Mock DbObjectCallback callback; - - Cursor cursor; - CloseableIterator adapter; + @Mock DocumentCallback callback; - @Before - public void setUp() { + private MongoCursor cursor; + private CloseableIterator adapter; - this.cursor = doThrow(IllegalArgumentException.class).when(mock(Cursor.class)); - this.adapter = new CloseableIterableCursorAdapter(cursor, exceptionTranslator, callback); + @BeforeEach + void setUp() { + this.cursor = mock(MongoCursor.class); + this.adapter = new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, callback); } - /** - * @see DATAMONGO-1276 - */ - @Test(expected = IllegalArgumentException.class) - public void propagatesOriginalExceptionFromAdapterDotNext() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotNext() { - cursor.next(); - adapter.next(); + doThrow(IllegalArgumentException.class).when(cursor).next(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.next()); } - /** - * @see DATAMONGO-1276 - */ - @Test(expected = IllegalArgumentException.class) - public void propagatesOriginalExceptionFromAdapterDotHasNext() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotHasNext() { - cursor.hasNext(); - adapter.hasNext(); + doThrow(IllegalArgumentException.class).when(cursor).hasNext(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.hasNext()); } - /** - * @see DATAMONGO-1276 - */ - @Test(expected = IllegalArgumentException.class) - public void propagatesOriginalExceptionFromAdapterDotClose() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotClose() { - cursor.close(); - adapter.close(); + doThrow(IllegalArgumentException.class).when(cursor).close(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.close()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java new file mode 100644 index 0000000000..0d5e22202c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java @@ -0,0 +1,188 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Locale; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Collation.Alternate; +import org.springframework.data.mongodb.core.query.Collation.CaseFirst; +import org.springframework.data.mongodb.core.query.Collation.CollationLocale; +import org.springframework.data.mongodb.core.query.Collation.ComparisonLevel; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +public class CollationUnitTests { + + static final Document BINARY_COMPARISON = new Document().append("locale", "simple"); + static final Document JUST_LOCALE = new Document().append("locale", "en_US"); + static final Document LOCALE_WITH_VARIANT = new Document().append("locale", "de_AT@collation=phonebook"); + static final Document WITH_STRENGTH_PRIMARY = new Document(JUST_LOCALE).append("strength", 1); + static final Document WITH_STRENGTH_PRIMARY_INCLUDE_CASE = new Document(WITH_STRENGTH_PRIMARY).append("caseLevel", + true); + static final Document WITH_NORMALIZATION = new Document(JUST_LOCALE).append("normalization", true); + static final Document WITH_BACKWARDS = new Document(JUST_LOCALE).append("backwards", true); + static final Document WITH_NUMERIC_ORDERING = new Document(JUST_LOCALE).append("numericOrdering", true); + static final Document WITH_CASE_FIRST_UPPER = new Document(JUST_LOCALE).append("strength", 3).append("caseFirst", + "upper"); + static final Document WITH_ALTERNATE_SHIFTED = new Document(JUST_LOCALE).append("alternate", "shifted"); + static final Document WITH_ALTERNATE_SHIFTED_MAX_VARIABLE_PUNCT = new Document(WITH_ALTERNATE_SHIFTED) + .append("maxVariable", "punct"); + static final Document ALL_THE_THINGS = new Document(LOCALE_WITH_VARIANT).append("strength", 1) + .append("caseLevel", true).append("backwards", true).append("numericOrdering", true) + .append("alternate", "shifted").append("maxVariable", "punct").append("normalization", true); + + @Test // DATAMONGO-1518 + public void justLocale() { + assertThat(Collation.of("en_US").toDocument()).isEqualTo(JUST_LOCALE); + } + + @Test // DATAMONGO-1518 + public void justLocaleFromDocument() { + assertThat(Collation.from(JUST_LOCALE).toDocument()).isEqualTo(JUST_LOCALE); + } + + @Test // DATAMONGO-1518 + public void localeWithVariant() { + assertThat(Collation.of(CollationLocale.of("de_AT").variant("phonebook")).toDocument()) + .isEqualTo(LOCALE_WITH_VARIANT); + } + + @Test // DATAMONGO-1518 + public void localeWithVariantFromDocument() { + assertThat(Collation.from(LOCALE_WITH_VARIANT).toDocument()).isEqualTo(LOCALE_WITH_VARIANT); + } + + @Test // DATAMONGO-1518 + public void localeFromJavaUtilLocale() { + + assertThat(Collation.of(java.util.Locale.US).toDocument()).isEqualTo(new Document().append("locale", "en_US")); + assertThat(Collation.of(Locale.ENGLISH).toDocument()).isEqualTo(new Document().append("locale", "en")); + } + + @Test // DATAMONGO-1518 + public void withStrenghPrimary() { + assertThat(Collation.of("en_US").strength(ComparisonLevel.primary()).toDocument()).isEqualTo(WITH_STRENGTH_PRIMARY); + } + + @Test // DATAMONGO-1518 + public void withStrenghPrimaryFromDocument() { + assertThat(Collation.from(WITH_STRENGTH_PRIMARY).toDocument()).isEqualTo(WITH_STRENGTH_PRIMARY); + } + + @Test // DATAMONGO-1518 + public void withStrenghPrimaryAndIncludeCase() { + + assertThat(Collation.of("en_US").strength(ComparisonLevel.primary().includeCase()).toDocument()) + .isEqualTo(WITH_STRENGTH_PRIMARY_INCLUDE_CASE); + } + + @Test // DATAMONGO-1518 + public void withStrenghPrimaryAndIncludeCaseFromDocument() { + + assertThat(Collation.from(WITH_STRENGTH_PRIMARY_INCLUDE_CASE).toDocument()) + .isEqualTo(WITH_STRENGTH_PRIMARY_INCLUDE_CASE); + } + + @Test // DATAMONGO-1518 + public void withNormalization() { + assertThat(Collation.of("en_US").normalization(true).toDocument()).isEqualTo(WITH_NORMALIZATION); + } + + @Test // DATAMONGO-1518 + public void withNormalizationFromDocument() { + assertThat(Collation.from(WITH_NORMALIZATION).toDocument()).isEqualTo(WITH_NORMALIZATION); + } + + @Test // DATAMONGO-1518 + public void withBackwards() { + assertThat(Collation.of("en_US").backwards(true).toDocument()).isEqualTo(WITH_BACKWARDS); + } + + @Test // DATAMONGO-1518 + public void withBackwardsFromDocument() { + assertThat(Collation.from(WITH_BACKWARDS).toDocument()).isEqualTo(WITH_BACKWARDS); + } + + @Test // DATAMONGO-1518 + public void withNumericOrdering() { + assertThat(Collation.of("en_US").numericOrdering(true).toDocument()).isEqualTo(WITH_NUMERIC_ORDERING); + } + + @Test // DATAMONGO-1518 + public void withNumericOrderingFromDocument() { + assertThat(Collation.from(WITH_NUMERIC_ORDERING).toDocument()).isEqualTo(WITH_NUMERIC_ORDERING); + } + + @Test // DATAMONGO-1518 + public void withCaseFirst() { + assertThat(Collation.of("en_US").caseFirst(CaseFirst.upper()).toDocument()).isEqualTo(WITH_CASE_FIRST_UPPER); + } + + @Test // DATAMONGO-1518 + public void withCaseFirstFromDocument() { + assertThat(Collation.from(WITH_CASE_FIRST_UPPER).toDocument()).isEqualTo(WITH_CASE_FIRST_UPPER); + } + + @Test // DATAMONGO-1518 + public void withAlternate() { + assertThat(Collation.of("en_US").alternate(Alternate.shifted()).toDocument()).isEqualTo(WITH_ALTERNATE_SHIFTED); + } + + @Test // DATAMONGO-1518 + public void withAlternateFromDocument() { + assertThat(Collation.from(WITH_ALTERNATE_SHIFTED).toDocument()).isEqualTo(WITH_ALTERNATE_SHIFTED); + } + + @Test // DATAMONGO-1518 + public void withAlternateAndMaxVariable() { + + assertThat(Collation.of("en_US").alternate(Alternate.shifted().punct()).toDocument()) + .isEqualTo(WITH_ALTERNATE_SHIFTED_MAX_VARIABLE_PUNCT); + } + + @Test // DATAMONGO-1518 + public void withAlternateAndMaxVariableFromDocument() { + + assertThat(Collation.from(WITH_ALTERNATE_SHIFTED_MAX_VARIABLE_PUNCT).toDocument()) + .isEqualTo(WITH_ALTERNATE_SHIFTED_MAX_VARIABLE_PUNCT); + } + + @Test // DATAMONGO-1518 + public void allTheThings() { + + assertThat(Collation.of(CollationLocale.of("de_AT").variant("phonebook")) + .strength(ComparisonLevel.primary().includeCase()).normalizationEnabled().backwardDiacriticSort() + .numericOrderingEnabled().alternate(Alternate.shifted().punct()).toDocument()).isEqualTo(ALL_THE_THINGS); + } + + @Test // DATAMONGO-1518 + public void allTheThingsFromDocument() { + assertThat(Collation.from(ALL_THE_THINGS).toDocument()).isEqualTo(ALL_THE_THINGS); + } + + @Test // DATAMONGO-1518 + public void justTheDefault() { + assertThat(Collation.simple().toDocument()).isEqualTo(BINARY_COMPARISON); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java new file mode 100644 index 0000000000..9de0863cd2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java @@ -0,0 +1,180 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.emitChangedRevisions; +import static org.springframework.data.mongodb.core.CollectionOptions.empty; +import static org.springframework.data.mongodb.core.CollectionOptions.encryptedCollection; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.int32; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.queryable; + +import java.util.List; + +import org.bson.BsonNull; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; +import org.springframework.data.mongodb.core.validation.Validator; + +/** + * @author Christoph Strobl + */ +class CollectionOptionsUnitTests { + + @Test // GH-4210 + void emptyEquals() { + assertThat(empty()).isEqualTo(empty()); + } + + @Test // GH-4210 + void collectionProperties() { + assertThat(empty().maxDocuments(10).size(1).disableValidation()) + .isEqualTo(empty().maxDocuments(10).size(1).disableValidation()); + } + + @Test // GH-4210 + void changedRevisionsEquals() { + assertThat(emitChangedRevisions()).isNotEqualTo(empty()).isEqualTo(emitChangedRevisions()); + } + + @Test // GH-4210 + void cappedEquals() { + assertThat(empty().capped()).isNotEqualTo(empty()).isEqualTo(empty().capped()); + } + + @Test // GH-4210 + void collationEquals() { + + assertThat(empty().collation(Collation.of("en_US"))) // + .isEqualTo(empty().collation(Collation.of("en_US"))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().collation(Collation.of("de_AT"))); + } + + @Test // GH-4210 + void timeSeriesEquals() { + + assertThat(empty().timeSeries(TimeSeriesOptions.timeSeries("tf"))) // + .isEqualTo(empty().timeSeries(TimeSeriesOptions.timeSeries("tf"))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().timeSeries(TimeSeriesOptions.timeSeries("other"))); + } + + @Test // GH-4210 + void validatorEquals() { + + assertThat(empty().validator(Validator.document(new Document("one", "two")))) // + .isEqualTo(empty().validator(Validator.document(new Document("one", "two")))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().validator(Validator.document(new Document("three", "four")))) + .isNotEqualTo(empty().validator(Validator.document(new Document("one", "two"))).moderateValidation()); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionOptionsFromSchemaRenderCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build(); + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(schema); + + assertThat(encryptionOptions.toDocument().get("fields", List.class)).hasSize(2) + .contains(new Document("path", "mongodb").append("bsonType", "long").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)) + .contains(new Document("path", "spring.data").append("bsonType", "int").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverrideByPath() { + + CollectionOptions collectionOptions = encryptedCollection(options -> options // + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring"))) + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data"))) + + // override first with data type long + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring")), List.of())) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + void encryptionOptionsAreImmutable() { + + EncryptedFieldsOptions source = EncryptedFieldsOptions + .fromProperties(List.of(queryable(int32("spring.data"), List.of(QueryCharacteristics.range().min(1))))); + + assertThat(source.queryable(queryable(int32("mongodb"), List.of(QueryCharacteristics.range().min(1))))) + .isNotSameAs(source).satisfies(it -> { + assertThat(it.toDocument().get("fields", List.class)).hasSize(2); + }); + + assertThat(source.toDocument().get("fields", List.class)).hasSize(1); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesNestedPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring.data")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring.data") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java new file mode 100644 index 0000000000..5be870a295 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java @@ -0,0 +1,193 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link CountQuery}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class CountQueryUnitTests { + + private QueryMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; + + @BeforeEach + void setUp() { + + this.context = new MongoMappingContext(); + + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.afterPropertiesSet(); + + this.mapper = new QueryMapper(converter); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithoutDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}}")); + } + + @Test // DATAMONGO-2059 + void nearAndExisting$and() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)) + .addCriteria(new Criteria().andOperator(where("foo").is("bar"))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse("{\"$and\":[" // + + "{\"foo\":\"bar\"}" // + + "{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]},"// + + " {\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}},"// + + "]}")); + } + + @Test // DATAMONGO-2059 + void nearSphereToGeoWithinWithoutDistance() { + + Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo( + org.bson.Document.parse("{\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearSphereToGeoWithinWithMaxDistance() { + + Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMinDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$and\":[{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]}," + + " {\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}}]}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistanceAndCombinedWithOtherCriteria() { + + Query source = query( + where("name").is("food").and("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"name\": \"food\", \"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMinDistanceOrCombinedWithOtherCriteria() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"$and\":[{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]},{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}}]} ]}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistanceOrCombinedWithOtherCriteria() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}} ]}")); + } + + @Test // GH-4004 + void nearToGeoWithinWithMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new GeoJsonPoint(-73.99171, 40.738868)).maxDistance(10))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}} ]}")); + } + + @Test // GH-4004 + void nearSphereToGeoWithinWithoutMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").nearSphere(new GeoJsonPoint(-73.99171, 40.738868)))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}} ]}")); + } + + @Test // GH-4004 + void nearSphereToGeoWithinWithMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), where("location") + .nearSphere(new GeoJsonPoint(-73.99171, 40.738868)).maxDistance/*in meters for geojson*/(10d))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.567855942887398E-6]}}} ]}")); + } + + private org.bson.Document postProcessQueryForCount(Query source) { + + org.bson.Document intermediate = mapper.getMappedObject(source.getQueryObject(), (MongoPersistentEntity) null); + return CountQuery.of(intermediate).toQueryDocument(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DBObjectTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DBObjectTestUtils.java deleted file mode 100644 index f35391e626..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DBObjectTestUtils.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; - -import com.mongodb.BasicDBList; -import com.mongodb.DBObject; - -/** - * Helper classes to ease assertions on {@link DBObject}s. - * - * @author Oliver Gierke - */ -public abstract class DBObjectTestUtils { - - private DBObjectTestUtils() { - - } - - /** - * Expects the field with the given key to be not {@literal null} and a {@link DBObject} in turn and returns it. - * - * @param source the {@link DBObject} to lookup the nested one - * @param key the key of the field to lokup the nested {@link DBObject} - * @return - */ - public static DBObject getAsDBObject(DBObject source, String key) { - return getTypedValue(source, key, DBObject.class); - } - - /** - * Expects the field with the given key to be not {@literal null} and a {@link BasicDBList}. - * - * @param source the {@link DBObject} to lookup the {@link BasicDBList} in - * @param key the key of the field to find the {@link BasicDBList} in - * @return - */ - public static BasicDBList getAsDBList(DBObject source, String key) { - return getTypedValue(source, key, BasicDBList.class); - } - - /** - * Expects the list element with the given index to be a non-{@literal null} {@link DBObject} and returns it. - * - * @param source the {@link BasicDBList} to look up the {@link DBObject} element in - * @param index the index of the element expected to contain a {@link DBObject} - * @return - */ - public static DBObject getAsDBObject(BasicDBList source, int index) { - - assertThat(source.size(), greaterThanOrEqualTo(index + 1)); - Object value = source.get(index); - assertThat(value, is(instanceOf(DBObject.class))); - return (DBObject) value; - } - - @SuppressWarnings("unchecked") - public static T getTypedValue(DBObject source, String key, Class type) { - - Object value = source.get(key); - assertThat(value, is(notNullValue())); - assertThat(value, is(instanceOf(type))); - - return (T) value; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java index 0f3f0b2043..f0e7eb67b0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,239 +15,280 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.Sort.Direction.DESC; import java.util.ArrayList; import java.util.Arrays; import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import java.util.Optional; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; import org.springframework.data.util.Pair; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import com.mongodb.BasicDBObject; -import com.mongodb.BulkWriteResult; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; +import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.MongoCollection; /** * Integration tests for {@link DefaultBulkOperations}. - * + * * @author Tobias Trelle * @author Oliver Gierke + * @author Christoph Strobl + * @author Minsu Kim */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class DefaultBulkOperationsIntegrationTests { static final String COLLECTION_NAME = "bulk_ops"; - @Autowired MongoOperations operations; + @Template(initialEntitySet = BaseDoc.class) // + static MongoTestTemplate operations; - DBCollection collection; - - @Before + @BeforeEach public void setUp() { - - this.collection = this.operations.getCollection(COLLECTION_NAME); - this.collection.remove(new BasicDBObject()); + operations.flush(COLLECTION_NAME); } - /** - * @see DATAMONGO-934 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-934 public void rejectsNullMongoOperations() { - new DefaultBulkOperations(null, null, COLLECTION_NAME, null); + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(null, COLLECTION_NAME, + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } - /** - * @see DATAMONGO-934 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-934 public void rejectsNullCollectionName() { - new DefaultBulkOperations(operations, null, null, null); + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(operations, null, + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } - /** - * @see DATAMONGO-934 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-934 public void rejectsEmptyCollectionName() { - new DefaultBulkOperations(operations, null, "", null); + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(operations, "", + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void insertOrdered() { List documents = Arrays.asList(newDoc("1"), newDoc("2")); - assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount(), is(2)); + assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount()).isEqualTo(2); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934, DATAMONGO-2285 public void insertOrderedFails() { List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); - try { - createBulkOps(BulkMode.ORDERED).insert(documents).execute(); - fail(); - } catch (BulkOperationException e) { - assertThat(e.getResult().getInsertedCount(), is(1)); // fails after first error - assertThat(e.getErrors(), notNullValue()); - assertThat(e.getErrors().size(), is(1)); - } + assertThatThrownBy(() -> createBulkOps(BulkMode.ORDERED).insert(documents).execute()) // + .isInstanceOf(BulkOperationException.class) // + .hasCauseInstanceOf(MongoBulkWriteException.class) // + .extracting(Throwable::getCause) // + .satisfies(it -> { + + MongoBulkWriteException ex = (MongoBulkWriteException) it; + assertThat(ex.getWriteResult().getInsertedCount()).isOne(); + assertThat(ex.getWriteErrors()).isNotNull(); + assertThat(ex.getWriteErrors().size()).isOne(); + }); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void insertUnOrdered() { List documents = Arrays.asList(newDoc("1"), newDoc("2")); - assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount(), is(2)); + assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount()).isEqualTo(2); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934, DATAMONGO-2285 public void insertUnOrderedContinuesOnError() { List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); - try { - createBulkOps(BulkMode.UNORDERED).insert(documents).execute(); - fail(); - } catch (BulkOperationException e) { - assertThat(e.getResult().getInsertedCount(), is(2)); // two docs were inserted - assertThat(e.getErrors(), notNullValue()); - assertThat(e.getErrors().size(), is(1)); - } + assertThatThrownBy(() -> createBulkOps(BulkMode.UNORDERED).insert(documents).execute()) // + .isInstanceOf(BulkOperationException.class) // + .hasCauseInstanceOf(MongoBulkWriteException.class) // + .extracting(Throwable::getCause) // + .satisfies(it -> { + + MongoBulkWriteException ex = (MongoBulkWriteException) it; + assertThat(ex.getWriteResult().getInsertedCount()).isEqualTo(2); + assertThat(ex.getWriteErrors()).isNotNull(); + assertThat(ex.getWriteErrors().size()).isOne(); + }); } - /** - * @see DATAMONGO-934 - */ - @Test - public void upsertDoesUpdate() { + @ParameterizedTest // DATAMONGO-934, GH-3872 + @MethodSource("upsertArguments") + void upsertDoesUpdate(UpdateDefinition update) { insertSomeDocuments(); - BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// - upsert(where("value", "value1"), set("value", "value2")).// + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + upsert(where("value", "value1"), update).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getMatchedCount(), is(2)); - assertThat(result.getModifiedCount(), is(2)); - assertThat(result.getInsertedCount(), is(0)); - assertThat(result.getUpserts(), is(notNullValue())); - assertThat(result.getUpserts().size(), is(0)); + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isEqualTo(2); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isZero(); } - /** - * @see DATAMONGO-934 - */ - @Test - public void upsertDoesInsert() { + @ParameterizedTest // DATAMONGO-934, GH-3872 + @MethodSource("upsertArguments") + void upsertDoesInsert(UpdateDefinition update) { - BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// - upsert(where("_id", "1"), set("value", "v1")).// + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + upsert(where("_id", "1"), update).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getMatchedCount(), is(0)); - assertThat(result.getModifiedCount(), is(0)); - assertThat(result.getUpserts(), is(notNullValue())); - assertThat(result.getUpserts().size(), is(1)); + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isOne(); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void updateOneOrdered() { testUpdate(BulkMode.ORDERED, false, 2); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // GH-3872 + public void updateOneWithAggregation() { + + insertSomeDocuments(); + + BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED); + bulkOps.updateOne(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3")); + BulkWriteResult result = bulkOps.execute(); + + assertThat(result.getModifiedCount()).isEqualTo(1); + assertThat(operations.execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isOne(); + } + + @Test // DATAMONGO-934 public void updateMultiOrdered() { testUpdate(BulkMode.ORDERED, true, 4); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // GH-3872 + public void updateMultiWithAggregation() { + + insertSomeDocuments(); + + BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED); + bulkOps.updateMulti(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3")); + BulkWriteResult result = bulkOps.execute(); + + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(operations.execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isEqualTo(2); + } + + @Test // DATAMONGO-934 public void updateOneUnOrdered() { testUpdate(BulkMode.UNORDERED, false, 2); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void updateMultiUnOrdered() { testUpdate(BulkMode.UNORDERED, true, 4); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void removeOrdered() { testRemove(BulkMode.ORDERED); } - /** - * @see DATAMONGO-934 - */ - @Test + @Test // DATAMONGO-934 public void removeUnordered() { testRemove(BulkMode.UNORDERED); } + @Test // DATAMONGO-2218 + public void replaceOneOrdered() { + testReplaceOne(BulkMode.ORDERED); + } + + @Test // DATAMONGO-2218 + public void replaceOneUnordered() { + testReplaceOne(BulkMode.UNORDERED); + } + + @Test // DATAMONGO-2218 + public void replaceOneDoesReplace() { + + insertSomeDocuments(); + + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2")).// + execute(); + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getInsertedCount()).isZero(); + } + + @Test // DATAMONGO-2218 + public void replaceOneWithUpsert() { + + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2"), FindAndReplaceOptions.options().upsert()).// + execute(); + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts().size()).isOne(); + } + /** * If working on the same set of documents, only an ordered bulk operation will yield predictable results. - * - * @see DATAMONGO-934 */ - @Test + @Test // DATAMONGO-934 public void mixedBulkOrdered() { - BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(newDoc("1", "v1")).// + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(newDoc("1", "v1")).// updateOne(where("_id", "1"), set("value", "v2")).// remove(where("value", "v2")).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getInsertedCount(), is(1)); - assertThat(result.getModifiedCount(), is(1)); - assertThat(result.getRemovedCount(), is(1)); + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getDeletedCount()).isOne(); } /** @@ -258,16 +299,65 @@ public void mixedBulkOrdered() { public void mixedBulkOrderedWithList() { List inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2")); - List> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3"))); + List> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3"))); List removes = Arrays.asList(where("_id", "1")); - BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(inserts).updateMulti(updates).remove(removes) - .execute(); + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts) + .updateMulti(updates).remove(removes).execute(); + + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isEqualTo(3); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getDeletedCount()).isOne(); + } + + @Test // DATAMONGO-1534 + public void insertShouldConsiderInheritance() { + + SpecialDoc specialDoc = new SpecialDoc(); + specialDoc.id = "id-special"; + specialDoc.value = "normal-value"; + specialDoc.specialValue = "special-value"; + + createBulkOps(BulkMode.ORDERED, SpecialDoc.class).insert(Arrays.asList(specialDoc)).execute(); + + BaseDoc doc = operations.findOne(where("_id", specialDoc.id), BaseDoc.class, COLLECTION_NAME); + + assertThat(doc).isNotNull(); + assertThat(doc).isInstanceOf(SpecialDoc.class); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void updateShouldConsiderSorting() { + + insertSomeDocuments(); + + BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) + .updateOne(new Query().with(Sort.by(DESC, "renamedField")), new Update().set("bsky", "altnps")).execute(); + + assertThat(result.getModifiedCount()).isOne(); + + Document raw = operations.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()); + assertThat(raw).containsEntry("bsky", "altnps"); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void replaceShouldConsiderSorting() { + + insertSomeDocuments(); + + BaseDocWithRenamedField target = new BaseDocWithRenamedField(); + target.value = "replacement"; + + BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) + .replaceOne(new Query().with(Sort.by(DESC, "renamedField")), target).execute(); + + assertThat(result.getModifiedCount()).isOne(); - assertThat(result, notNullValue()); - assertThat(result.getInsertedCount(), is(3)); - assertThat(result.getModifiedCount(), is(2)); - assertThat(result.getRemovedCount(), is(1)); + Document raw = operations.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()); + assertThat(raw).containsEntry("value", target.value); } private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) { @@ -276,14 +366,14 @@ private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) { insertSomeDocuments(); - List> updates = new ArrayList>(); + List> updates = new ArrayList<>(); updates.add(Pair.of(where("value", "value1"), set("value", "value3"))); updates.add(Pair.of(where("value", "value2"), set("value", "value4"))); int modifiedCount = multi ? bulkOps.updateMulti(updates).execute().getModifiedCount() : bulkOps.updateOne(updates).execute().getModifiedCount(); - assertThat(modifiedCount, is(expectedUpdates)); + assertThat(modifiedCount).isEqualTo(expectedUpdates); } private void testRemove(BulkMode mode) { @@ -292,25 +382,53 @@ private void testRemove(BulkMode mode) { List removes = Arrays.asList(where("_id", "1"), where("value", "value2")); - assertThat(createBulkOps(mode).remove(removes).execute().getRemovedCount(), is(3)); + assertThat(createBulkOps(mode).remove(removes).execute().getDeletedCount()).isEqualTo(3); + } + + private void testReplaceOne(BulkMode mode) { + + BulkOperations bulkOps = createBulkOps(mode); + + insertSomeDocuments(); + + Query query = where("_id", "1"); + Document document = rawDoc("1", "value2"); + int modifiedCount = bulkOps.replaceOne(query, document).execute().getModifiedCount(); + + assertThat(modifiedCount).isOne(); } private BulkOperations createBulkOps(BulkMode mode) { + return createBulkOps(mode, null); + } + + private BulkOperations createBulkOps(BulkMode mode, Class entityType) { - DefaultBulkOperations operations = new DefaultBulkOperations(this.operations, mode, COLLECTION_NAME, null); - operations.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED); + Optional> entity = entityType != null + ? Optional.of(operations.getConverter().getMappingContext().getPersistentEntity(entityType)) + : Optional.empty(); - return operations; + BulkOperationContext bulkOperationContext = new BulkOperationContext(mode, entity, + new QueryMapper(operations.getConverter()), new UpdateMapper(operations.getConverter()), null, null); + + DefaultBulkOperations bulkOps = new DefaultBulkOperations(operations, COLLECTION_NAME, bulkOperationContext); + bulkOps.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED); + + return bulkOps; } private void insertSomeDocuments() { - final DBCollection coll = operations.getCollection(COLLECTION_NAME); + final MongoCollection coll = operations.getCollection(COLLECTION_NAME); + + coll.insertOne(rawDoc("1", "value1").append("rn_f", "001")); + coll.insertOne(rawDoc("2", "value1").append("rn_f", "002")); + coll.insertOne(rawDoc("3", "value2").append("rn_f", "003")); + coll.insertOne(rawDoc("4", "value2").append("rn_f", "004")); + } - coll.insert(rawDoc("1", "value1")); - coll.insert(rawDoc("2", "value1")); - coll.insert(rawDoc("3", "value2")); - coll.insert(rawDoc("4", "value2")); + private static Stream upsertArguments() { + return Stream.of(Arguments.of(set("value", "value2")), Arguments.of(AggregationUpdate.update().set("value").toValue("value2"))); } private static BaseDoc newDoc(String id) { @@ -337,7 +455,13 @@ private static Update set(String field, String value) { return new Update().set(field, value); } - private static DBObject rawDoc(String id, String value) { - return new BasicDBObject("_id", id).append("value", value); + private static Document rawDoc(String id, String value) { + return new Document("_id", id).append("value", value); + } + + static class BaseDocWithRenamedField extends BaseDoc { + + @Field("rn_f") + String renamedField; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java new file mode 100644 index 0000000000..6bdcb132f9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java @@ -0,0 +1,463 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mongodb.BulkOperationException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.MongoBulkWriteException; +import com.mongodb.MongoWriteException; +import com.mongodb.ServerAddress; +import com.mongodb.WriteConcern; +import com.mongodb.WriteError; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.WriteConcernError; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.WriteModel; + +/** + * Unit tests for {@link DefaultBulkOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Minsu Kim + * @author Jens Schauder + * @author Roman Puchkovskiy + * @author Jacob Botuck + */ +@ExtendWith(MockitoExtension.class) +class DefaultBulkOperationsUnitTests { + + private MongoTemplate template; + @Mock MongoDatabase database; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Mock MongoDatabaseFactory factory; + @Mock DbRefResolver dbRefResolver; + @Captor ArgumentCaptor>> captor; + private MongoConverter converter; + private MongoMappingContext mappingContext; + + private DefaultBulkOperations ops; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(database); + when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator()); + when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection); + + mappingContext = new MongoMappingContext(); + mappingContext.afterPropertiesSet(); + + converter = new MappingMongoConverter(dbRefResolver, mappingContext); + template = new MongoTemplate(factory, converter); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter), + new UpdateMapper(converter), null, null)); + } + + @Test // DATAMONGO-1518 + void updateOneShouldUseCollationWhenPresent() { + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(UpdateOneModel.class); + assertThat(((UpdateOneModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // DATAMONGO-1518 + void updateManyShouldUseCollationWhenPresent() { + + ops.updateMulti(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(UpdateManyModel.class); + assertThat(((UpdateManyModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // DATAMONGO-1518 + void removeShouldUseCollationWhenPresent() { + + ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(DeleteManyModel.class); + assertThat(((DeleteManyModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // DATAMONGO-2218 + void replaceOneShouldUseCollationWhenPresent() { + + ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class); + assertThat(((ReplaceOneModel) captor.getValue().get(0)).getReplaceOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // DATAMONGO-1678 + void bulkUpdateShouldMapQueryAndUpdateCorrectly() { + + ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "queen danerys")).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getUpdate()).isEqualTo(new Document("$set", new Document("first_name", "queen danerys"))); + } + + @Test // DATAMONGO-1678 + void bulkRemoveShouldMapQueryCorrectly() { + + ops.remove(query(where("firstName").is("danerys"))).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + DeleteManyModel updateModel = (DeleteManyModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + } + + @Test // DATAMONGO-2218 + void bulkReplaceOneShouldMapQueryCorrectly() { + + SomeDomainType replacement = new SomeDomainType(); + replacement.firstName = "Minsu"; + replacement.lastName = "Kim"; + + ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + ReplaceOneModel updateModel = (ReplaceOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu"); + assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim"); + } + + @Test // DATAMONGO-2261, DATAMONGO-2479 + void bulkInsertInvokesEntityCallbacks() { + + BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback()); + BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback()); + AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback()); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), null, + EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback))); + + Person entity = new Person("init"); + ops.insert(entity); + + ArgumentCaptor personArgumentCaptor = ArgumentCaptor.forClass(Person.class); + verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1")); + verifyNoInteractions(beforeSaveCallback); + + ops.execute(); + + verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert", + "before-convert"); + verify(collection).bulkWrite(captor.capture(), any()); + + InsertOneModel updateModel = (InsertOneModel) captor.getValue().get(0); + assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save"); + } + + @Test // DATAMONGO-2290 + void bulkReplaceOneEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute(); + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // DATAMONGO-2290 + void bulkInsertEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute(); + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // DATAMONGO-2290 + void noAfterSaveEventOnFailure() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException( + new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null)); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + + try { + ops.execute(); + fail("Missing MongoWriteException"); + } catch (MongoWriteException expected) { + + } + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + } + + @Test // DATAMONGO-2330 + void writeConcernNotAppliedWhenNotSet() { + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection, never()).withWriteConcern(any()); + } + + @Test // DATAMONGO-2330 + void writeConcernAppliedCorrectlyWhenSet() { + + ops.setDefaultWriteConcern(WriteConcern.MAJORITY); + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection).withWriteConcern(eq(WriteConcern.MAJORITY)); + } + + @Test // DATAMONGO-2450 + void appliesArrayFilterWhenPresent() { + + ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getOptions().getArrayFilters().get(0)) + .isEqualTo(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-2502 + void shouldRetainNestedArrayPathWithPlaceholdersForNoMatchingPaths() { + + ops.updateOne(new BasicQuery("{}"), new Update().set("items.$.documents.0.fileId", "new-id")).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getUpdate()) + .isEqualTo(new Document("$set", new Document("items.$.documents.0.fileId", "new-id"))); + } + + @Test // DATAMONGO-2502 + void shouldRetainNestedArrayPathWithPlaceholdersForMappedEntity() { + + DefaultBulkOperations ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(OrderTest.class)), + new QueryMapper(converter), new UpdateMapper(converter), null, null)); + + ops.updateOne(new BasicQuery("{}"), Update.update("items.$.documents.0.fileId", "file-id")).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getUpdate()) + .isEqualTo(new Document("$set", new Document("items.$.documents.0.the_file_id", "file-id"))); + } + + @Test // DATAMONGO-2285 + public void translateMongoBulkOperationExceptionWithWriteConcernError() { + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null, + Collections.emptyList(), + new WriteConcernError(42, "codename", "writeconcern error happened", new BsonDocument()), new ServerAddress(), Collections.emptySet())); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> ops.insert(new SomeDomainType()).execute()); + + } + + @Test // DATAMONGO-2285 + public void translateMongoBulkOperationExceptionWithoutWriteConcernError() { + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null, + Collections.singletonList(new BulkWriteError(42, "a write error happened", new BsonDocument(), 49)), null, + new ServerAddress(), Collections.emptySet())); + + assertThatExceptionOfType(BulkOperationException.class) + .isThrownBy(() -> ops.insert(new SomeDomainType()).execute()); + } + + static class OrderTest { + + String id; + List items; + } + + static class OrderTestItem { + + private String cartId; + private List documents; + } + + static class OrderTestDocument { + + @Field("the_file_id") + private String fileId; + } + + class SomeDomainType { + + @Id String id; + Gender gender; + @Field("first_name") String firstName; + @Field String lastName; + } + + enum Gender { + M, F + } + + static class BeforeConvertPersonCallback implements BeforeConvertCallback { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + return new Person("before-convert"); + } + } + + static class BeforeSavePersonCallback implements BeforeSaveCallback { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + document.put("firstName", "before-save"); + return new Person("before-save"); + } + } + + static class AfterSavePersonCallback implements AfterSaveCallback { + + @Override + public Person onAfterSave(Person entity, Document document, String collection) { + + document.put("firstName", "after-save"); + return new Person("after-save"); + } + } + + static class NullExceptionTranslator implements PersistenceExceptionTranslator { + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return null; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java index 744387de80..78a6e6b496 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,68 +15,204 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.ReflectiveDBCollectionInvoker.*; +import static org.springframework.data.mongodb.core.index.PartialIndexFilter.of; +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.bson.BsonDocument; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Collation.CaseFirst; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.IndexOptions; /** * Integration tests for {@link DefaultIndexOperations}. - * + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class DefaultIndexOperationsIntegrationTests { - static final DBObject GEO_SPHERE_2D = new BasicDBObject("loaction", "2dsphere"); + static final String COLLECTION_NAME = "default-index-operations-tests"; + static final org.bson.Document GEO_SPHERE_2D = new org.bson.Document("loaction", "2dsphere"); + + @Template // + static MongoTestTemplate template; - @Autowired MongoTemplate template; - DefaultIndexOperations indexOps; - DBCollection collection; + MongoCollection collection = template.getCollection(COLLECTION_NAME); + IndexOperations indexOps = template.indexOps(COLLECTION_NAME); - @Before + @BeforeEach public void setUp() { + template.dropIndexes(COLLECTION_NAME); + } - String collectionName = this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class); + @Test // DATAMONGO-1008 + public void getIndexInfoShouldBeAbleToRead2dsphereIndex() { - this.collection = this.template.getDb().getCollection(collectionName); - this.collection.dropIndexes(); + template.getCollection(COLLECTION_NAME).createIndex(GEO_SPHERE_2D); - this.indexOps = new DefaultIndexOperations(template, collectionName); + IndexInfo info = findAndReturnIndexInfo(GEO_SPHERE_2D); + assertThat(info.getIndexFields().get(0).isGeo()).isEqualTo(true); } - /** - * @see DATAMONGO-1008 - */ - @Test - public void getIndexInfoShouldBeAbleToRead2dsphereIndex() { + @Test // DATAMONGO-1467, DATAMONGO-2198 + public void shouldApplyPartialFilterCorrectly() { - collection.createIndex(GEO_SPHERE_2D); + IndexDefinition id = new Index().named("partial-with-criteria").on("k3y", Direction.ASC) + .partial(of(where("q-t-y").gte(10))); - IndexInfo info = findAndReturnIndexInfo(GEO_SPHERE_2D); - assertThat(info.getIndexFields().get(0).isGeo(), is(true)); + indexOps.createIndex(id); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-criteria"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"q-t-y\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-1467, DATAMONGO-2198 + public void shouldApplyPartialFilterWithMappedPropertyCorrectly() { + + IndexDefinition id = new Index().named("partial-with-mapped-criteria").on("k3y", Direction.ASC) + .partial(of(where("quantity").gte(10))); + + template.indexOps(DefaultIndexOperationsIntegrationTestsSample.class).createIndex(id); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-mapped-criteria"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-1467, DATAMONGO-2198 + public void shouldApplyPartialDBOFilterCorrectly() { + + IndexDefinition id = new Index().named("partial-with-dbo").on("k3y", Direction.ASC) + .partial(of(new org.bson.Document("qty", new org.bson.Document("$gte", 10)))); + + indexOps.createIndex(id); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-dbo"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); } - private IndexInfo findAndReturnIndexInfo(DBObject keys) { + @Test // DATAMONGO-1467, DATAMONGO-2198 + public void shouldFavorExplicitMappingHintViaClass() { + + IndexDefinition id = new Index().named("partial-with-inheritance").on("k3y", Direction.ASC) + .partial(of(where("age").gte(10))); + + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + + indexOps.createIndex(id); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-inheritance"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"a_g_e\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-2388 + public void shouldReadIndexWithPartialFilterContainingDbRefCorrectly() { + + BsonDocument partialFilter = BsonDocument.parse( + "{ \"the-ref\" : { \"$ref\" : \"other-collection\", \"$id\" : { \"$oid\" : \"59ce08baf264b906810fe8c5\"} } }"); + IndexOptions indexOptions = new IndexOptions(); + indexOptions.name("partial-with-dbref"); + indexOptions.partialFilterExpression(partialFilter); + + collection.createIndex(BsonDocument.parse("{ \"key-1\" : 1, \"key-2\": 1}"), indexOptions); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-dbref"); + assertThat(BsonDocument.parse(info.getPartialFilterExpression())).isEqualTo(partialFilter); + } + + @Test // DATAMONGO-1518 + public void shouldCreateIndexWithCollationCorrectly() { + + IndexDefinition id = new Index().named("with-collation").on("xyz", Direction.ASC) + .collation(Collation.of("de_AT").caseFirst(CaseFirst.off())); + + new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + + indexOps.createIndex(id); + + Document expected = new Document("locale", "de_AT") // + .append("caseLevel", false) // + .append("caseFirst", "off") // + .append("strength", 3) // + .append("numericOrdering", false) // + .append("alternate", "non-ignorable") // + .append("maxVariable", "punct") // + .append("normalization", false) // + .append("backwards", false); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "with-collation"); + + assertThat(info.getCollation()).isPresent(); + + // version is set by MongoDB server - we remove it to avoid errors when upgrading server version. + Document result = info.getCollation().get(); + result.remove("version"); + + assertThat(result).isEqualTo(expected); + } + + @Test // GH-4348 + void indexShouldNotBeHiddenByDefault() { + + IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC); + + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + indexOps.createIndex(index); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index"); + assertThat(info.isHidden()).isFalse(); + } + + @Test // GH-4348 + void shouldCreateHiddenIndex() { + + IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden(); + + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + indexOps.createIndex(index); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-hidden-index"); + assertThat(info.isHidden()).isTrue(); + } + + @Test // GH-4348 + void alterIndexShouldAllowHiding() { + + collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index")); + + indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden()); + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index"); + assertThat(info.isHidden()).isTrue(); + } + + private IndexInfo findAndReturnIndexInfo(org.bson.Document keys) { return findAndReturnIndexInfo(indexOps.getIndexInfo(), keys); } - private static IndexInfo findAndReturnIndexInfo(Iterable candidates, DBObject keys) { - return findAndReturnIndexInfo(candidates, generateIndexName(keys)); + private static IndexInfo findAndReturnIndexInfo(Iterable candidates, org.bson.Document keys) { + return findAndReturnIndexInfo(candidates, genIndexName(keys)); } private static IndexInfo findAndReturnIndexInfo(Iterable candidates, String name) { @@ -89,5 +225,36 @@ private static IndexInfo findAndReturnIndexInfo(Iterable candidates, throw new AssertionError(String.format("Index with %s was not found", name)); } - static class DefaultIndexOperationsIntegrationTestsSample {} + private static String genIndexName(Document keys) { + + StringBuilder name = new StringBuilder(); + + for (String s : keys.keySet()) { + + if (name.length() > 0) { + name.append('_'); + } + + name.append(s).append('_'); + Object val = keys.get(s); + + if (val instanceof Number || val instanceof String) { + name.append(val.toString().replace(' ', '_')); + } + } + + return name.toString(); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "default-index-operations-tests") + static class DefaultIndexOperationsIntegrationTestsSample { + + @Field("qty") Integer quantity; + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "default-index-operations-tests") + static class MappingToSameCollection extends DefaultIndexOperationsIntegrationTestsSample { + + @Field("a_g_e") Integer age; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java new file mode 100644 index 0000000000..14550e4c17 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java @@ -0,0 +1,162 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.index.HashedIndex; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; + +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; + +/** + * Unit tests for {@link DefaultIndexOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class DefaultIndexOperationsUnitTests { + + private MongoTemplate template; + + @Mock MongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(db); + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + when(collection.createIndex(any(), any(IndexOptions.class))).thenReturn("OK"); + + this.mappingContext = new MongoMappingContext(); + this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); + this.template = new MongoTemplate(factory, converter); + } + + @Test // DATAMONGO-1183 + void indexOperationsMapFieldNameCorrectly() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("name", Direction.DESC)); + + verify(collection).createIndex(eq(new Document("firstname", -1)), any()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotSetCollectionIfNoDefaultDefined() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("firstname", Direction.DESC)); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void ensureIndexUsesDefaultCollationIfNoneDefinedInOptions() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC)); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotUseDefaultCollationIfExplicitlySpecifiedInTheIndex() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC).collation(Collation.of("en_US"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1183 + void shouldCreateHashedIndexCorrectly() { + + indexOpsFor(Jedi.class).ensureIndex(HashedIndex.hashed("name")); + + verify(collection).createIndex(eq(new Document("firstname", "hashed")), any()); + } + + @Test // GH-4698 + void shouldConsiderGivenCollectionName() { + + DefaultIndexOperations operations = new DefaultIndexOperations(template, "foo", Jedi.class); + + operations.ensureIndex(HashedIndex.hashed("name")); + verify(db).getCollection(eq("foo"), any(Class.class)); + } + + private DefaultIndexOperations indexOpsFor(Class type) { + return new DefaultIndexOperations(template, template.getCollectionName(type), type); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "DefaultIndexOperationsUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + @Field("firstname") String name; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java new file mode 100644 index 0000000000..79bf563159 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java @@ -0,0 +1,395 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.Sort.Direction.DESC; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.MongoBulkWriteException; +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; + +/** + * Tests for {@link DefaultReactiveBulkOperations}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +class DefaultReactiveBulkOperationsTests { + + static final String COLLECTION_NAME = "reactive-bulk-ops"; + + @Template(initialEntitySet = BaseDoc.class) static ReactiveMongoTestTemplate template; + + @BeforeEach + public void setUp() { + template.flush(COLLECTION_NAME).as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-2821 + void insertOrdered() { + + List documents = Arrays.asList(newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.ORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result.getInsertedCount()).isEqualTo(2); + }).verifyComplete(); + } + + @Test // GH-2821 + void insertOrderedFails() { + + List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.ORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .verifyErrorSatisfies(error -> { + assertThat(error).isInstanceOf(DuplicateKeyException.class); + }); + } + + @Test // GH-2821 + public void insertUnOrdered() { + + List documents = Arrays.asList(newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.UNORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result.getInsertedCount()).isEqualTo(2); + }).verifyComplete(); + } + + @Test // GH-2821 + public void insertUnOrderedContinuesOnError() { + + List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.UNORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .verifyErrorSatisfies(error -> { + + assertThat(error).isInstanceOf(DuplicateKeyException.class); + assertThat(error.getCause()).isInstanceOf(MongoBulkWriteException.class); + + MongoBulkWriteException cause = (MongoBulkWriteException) error.getCause(); + assertThat(cause.getWriteResult().getInsertedCount()).isEqualTo(2); + assertThat(cause.getWriteErrors()).isNotNull(); + assertThat(cause.getWriteErrors().size()).isOne(); + }); + } + + @Test // GH-2821 + void upsertDoesUpdate() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED).// + upsert(where("value", "value1"), set("value", "value2")).// + execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isEqualTo(2); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isZero(); + }) // + .verifyComplete(); + } + + @Test // GH-2821 + public void upsertDoesInsert() { + + createBulkOps(BulkMode.ORDERED).// + upsert(where("_id", "1"), set("value", "v1")).// + execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isOne(); + }) // + .verifyComplete(); + } + + @ParameterizedTest // GH-2821 + @MethodSource + public void testUpdates(BulkMode mode, boolean multi, int expectedUpdateCount) { + + insertSomeDocuments(); + ReactiveBulkOperations bulkOps = createBulkOps(mode); + + if (multi) { + bulkOps.updateMulti(where("value", "value1"), set("value", "value3")); + bulkOps.updateMulti(where("value", "value2"), set("value", "value4")); + } else { + bulkOps.updateOne(where("value", "value1"), set("value", "value3")); + bulkOps.updateOne(where("value", "value2"), set("value", "value4")); + } + + bulkOps.execute().map(BulkWriteResult::getModifiedCount) // + .as(StepVerifier::create) // + .expectNext(expectedUpdateCount) // + .verifyComplete(); + } + + private static Stream testUpdates() { + return Stream.of(Arguments.of(BulkMode.ORDERED, false, 2), Arguments.of(BulkMode.ORDERED, true, 4), + Arguments.of(BulkMode.UNORDERED, false, 2), Arguments.of(BulkMode.UNORDERED, false, 2)); + } + + @ParameterizedTest // GH-2821 + @EnumSource(BulkMode.class) + void testRemove(BulkMode mode) { + + insertSomeDocuments(); + + List removes = Arrays.asList(where("_id", "1"), where("value", "value2")); + + createBulkOps(mode).remove(removes).execute().map(BulkWriteResult::getDeletedCount).as(StepVerifier::create) + .expectNext(3).verifyComplete(); + } + + @ParameterizedTest // GH-2821 + @EnumSource(BulkMode.class) + void testReplaceOne(BulkMode mode) { + + insertSomeDocuments(); + + Query query = where("_id", "1"); + Document document = rawDoc("1", "value2"); + createBulkOps(mode).replaceOne(query, document).execute().map(BulkWriteResult::getModifiedCount) + .as(StepVerifier::create).expectNext(1).verifyComplete(); + } + + @Test // GH-2821 + public void replaceOneDoesReplace() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2")).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getInsertedCount()).isZero(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void replaceOneWithUpsert() { + + createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2"), FindAndReplaceOptions.options().upsert()).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts().size()).isOne(); + }); + } + + @Test // GH-2821 + public void mixedBulkOrdered() { + + createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(newDoc("1", "v1")).// + updateOne(where("_id", "1"), set("value", "v2")).// + remove(where("value", "v2")).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getDeletedCount()).isOne(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void mixedBulkOrderedWithList() { + + List inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2")); + List removes = Arrays.asList(where("_id", "1")); + + createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts).updateMulti(where("value", "v2"), set("value", "v3")) + .remove(removes).execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isEqualTo(3); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getDeletedCount()).isOne(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void insertShouldConsiderInheritance() { + + SpecialDoc specialDoc = new SpecialDoc(); + specialDoc.id = "id-special"; + specialDoc.value = "normal-value"; + specialDoc.specialValue = "special-value"; + + createBulkOps(BulkMode.ORDERED, SpecialDoc.class).insert(Arrays.asList(specialDoc)).execute().then() + .as(StepVerifier::create).verifyComplete(); + + template.findOne(where("_id", specialDoc.id), BaseDoc.class, COLLECTION_NAME).as(StepVerifier::create) + .consumeNextWith(doc -> { + + assertThat(doc).isNotNull(); + assertThat(doc).isInstanceOf(SpecialDoc.class); + }).verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void updateShouldConsiderSorting() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) // + .updateOne(new Query().with(Sort.by(DESC, "renamedField")), new Update().set("bsky", "altnps")).execute() // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + template.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()).as(StepVerifier::create) // + .consumeNextWith(raw -> assertThat(raw).containsEntry("bsky", "altnps")) // + .verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void replaceShouldConsiderSorting() { + + insertSomeDocuments(); + + BaseDocWithRenamedField target = new BaseDocWithRenamedField(); + target.value = "replacement"; + + createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) // + .replaceOne(new Query().with(Sort.by(DESC, "renamedField")), target).execute() // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + template.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()).as(StepVerifier::create) // + .consumeNextWith(raw -> assertThat(raw).containsEntry("value", target.value)) // + .verifyComplete(); + } + + private void insertSomeDocuments() { + + template.execute(COLLECTION_NAME, collection -> { + return Flux.from(collection.insertMany( + List.of(rawDoc("1", "value1").append("rn_f", "001"), rawDoc("2", "value1").append("rn_f", "002"), rawDoc("3", "value2").append("rn_f", "003"), rawDoc("4", "value2").append("rn_f", "004")))); + }).then().as(StepVerifier::create).verifyComplete(); + + } + + private DefaultReactiveBulkOperations createBulkOps(BulkMode mode) { + return createBulkOps(mode, null); + } + + private DefaultReactiveBulkOperations createBulkOps(BulkMode mode, Class entityType) { + + Optional> entity = entityType != null + ? Optional.of(template.getConverter().getMappingContext().getPersistentEntity(entityType)) + : Optional.empty(); + + ReactiveBulkOperationContext bulkOperationContext = new ReactiveBulkOperationContext(mode, entity, + new QueryMapper(template.getConverter()), new UpdateMapper(template.getConverter()), null, null); + + DefaultReactiveBulkOperations bulkOps = new DefaultReactiveBulkOperations(template, COLLECTION_NAME, + bulkOperationContext); + bulkOps.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED); + + return bulkOps; + } + + private static BaseDoc newDoc(String id) { + + BaseDoc doc = new BaseDoc(); + doc.id = id; + + return doc; + } + + private static BaseDoc newDoc(String id, String value) { + + BaseDoc doc = newDoc(id); + doc.value = value; + + return doc; + } + + private static Query where(String field, String value) { + return new Query().addCriteria(Criteria.where(field).is(value)); + } + + private static Update set(String field, String value) { + return new Update().set(field, value); + } + + private static Document rawDoc(String id, String value) { + return new Document("_id", id).append("value", value); + } + + static class BaseDocWithRenamedField extends BaseDoc { + + @Field("rn_f") + String renamedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java new file mode 100644 index 0000000000..3b4cb322bc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java @@ -0,0 +1,347 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.List; +import java.util.Optional; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultBulkOperationsUnitTests.NullExceptionTranslator; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.MongoWriteException; +import com.mongodb.WriteError; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.WriteModel; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class DefaultReactiveBulkOperationsUnitTests { + + ReactiveMongoTemplate template; + @Mock ReactiveMongoDatabaseFactory factory; + + @Mock MongoDatabase database; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Captor ArgumentCaptor>> captor; + + private MongoConverter converter; + private MongoMappingContext mappingContext; + + private DefaultReactiveBulkOperations ops; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(Mono.just(database)); + when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator()); + when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection); + when(collection.bulkWrite(anyList(), any())).thenReturn(Mono.just(mock(BulkWriteResult.class))); + + mappingContext = new MongoMappingContext(); + mappingContext.afterPropertiesSet(); + + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + template = new ReactiveMongoTemplate(factory, converter); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter), + new UpdateMapper(converter), null, null)); + } + + @Test // GH-2821 + void updateOneShouldUseCollationWhenPresent() { + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + assertThat(captor.getValue().get(0)).isInstanceOf(UpdateOneModel.class); + assertThat(((UpdateOneModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void replaceOneShouldUseCollationWhenPresent() { + + ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class); + assertThat(((ReplaceOneModel) captor.getValue().get(0)).getReplaceOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void removeShouldUseCollationWhenPresent() { + + ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(DeleteManyModel.class); + assertThat(((DeleteManyModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void bulkUpdateShouldMapQueryAndUpdateCorrectly() { + + ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "queen danerys")).execute() + .subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getUpdate()).isEqualTo(new Document("$set", new Document("first_name", "queen danerys"))); + } + + @Test // GH-2821 + void bulkRemoveShouldMapQueryCorrectly() { + + ops.remove(query(where("firstName").is("danerys"))).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + DeleteManyModel updateModel = (DeleteManyModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + } + + @Test // GH-2821 + void bulkReplaceOneShouldMapQueryCorrectly() { + + SomeDomainType replacement = new SomeDomainType(); + replacement.firstName = "Minsu"; + replacement.lastName = "Kim"; + + ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + ReplaceOneModel updateModel = (ReplaceOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu"); + assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim"); + } + + @Test // GH-2821 + void bulkInsertInvokesEntityCallbacks() { + + BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback()); + BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback()); + AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback()); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), null, + ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback))); + + Person entity = new Person("init"); + ops.insert(entity); + + ArgumentCaptor personArgumentCaptor = ArgumentCaptor.forClass(Person.class); + verifyNoInteractions(beforeConvertCallback); + verifyNoInteractions(beforeSaveCallback); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1")); + verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert", + "before-save"); + verify(collection).bulkWrite(captor.capture(), any()); + + InsertOneModel updateModel = (InsertOneModel) captor.getValue().get(0); + assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save"); + } + + @Test // GH-2821 + void bulkReplaceOneEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType()); + + verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void bulkInsertEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void noAfterSaveEventOnFailure() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException( + new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null)); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + ops.execute().as(StepVerifier::create).expectError(); + + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void appliesArrayFilterWhenPresent() { + + ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute() + .subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getOptions().getArrayFilters().get(0)) + .isEqualTo(new org.bson.Document("element", new Document("$gte", 100))); + } + + static class BeforeConvertPersonCallback implements ReactiveBeforeConvertCallback { + + @Override + public Mono onBeforeConvert(Person entity, String collection) { + return Mono.just(new Person("before-convert")); + } + } + + static class BeforeSavePersonCallback implements ReactiveBeforeSaveCallback { + + @Override + public Mono onBeforeSave(Person entity, Document document, String collection) { + + document.put("firstName", "before-save"); + return Mono.just(new Person("before-save")); + } + } + + static class AfterSavePersonCallback implements ReactiveAfterSaveCallback { + + @Override + public Mono onAfterSave(Person entity, Document document, String collection) { + + document.put("firstName", "after-save"); + return Mono.just(new Person("after-save")); + } + } + + class SomeDomainType { + + @Id String id; + DefaultBulkOperationsUnitTests.Gender gender; + @Field("first_name") String firstName; + @Field String lastName; + } + + enum Gender { + M, F + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java new file mode 100644 index 0000000000..5ecce43102 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.index.PartialIndexFilter.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.test.StepVerifier; + +import java.util.function.Predicate; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Collation.CaseFirst; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@ExtendWith(MongoTemplateExtension.class) +public class DefaultReactiveIndexOperationsTests { + + @Template(initialEntitySet = DefaultIndexOperationsIntegrationTestsSample.class) // + static ReactiveMongoTestTemplate template; + + String collectionName = template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class); + + DefaultReactiveIndexOperations indexOps = new DefaultReactiveIndexOperations(template, collectionName, + new QueryMapper(template.getConverter())); + + @BeforeEach + public void setUp() { + template.getCollection(collectionName).flatMapMany(MongoCollection::dropIndexes) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1518 + public void shouldCreateIndexWithCollationCorrectly() { + + IndexDefinition id = new Index().named("with-collation").on("xyz", Direction.ASC) + .collation(Collation.of("de_AT").caseFirst(CaseFirst.off())); + + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + Document expected = new Document("locale", "de_AT") // + .append("caseLevel", false) // + .append("caseFirst", "off") // + .append("strength", 3) // + .append("numericOrdering", false) // + .append("alternate", "non-ignorable") // + .append("maxVariable", "punct") // + .append("normalization", false) // + .append("backwards", false); + + indexOps.getIndexInfo().filter(this.indexByName("with-collation")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + + assertThat(indexInfo.getCollation()).isPresent(); + + // version is set by MongoDB server - we remove it to avoid errors when upgrading server version. + Document result = indexInfo.getCollation().get(); + result.remove("version"); + + assertThat(result).isEqualTo(expected); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1682, DATAMONGO-2198 + public void shouldApplyPartialFilterCorrectly() { + + IndexDefinition id = new Index().named("partial-with-criteria").on("k3y", Direction.ASC) + .partial(of(where("q-t-y").gte(10))); + + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("partial-with-criteria")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"q-t-y\" : { \"$gte\" : 10 } }")); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1682, DATAMONGO-2198 + public void shouldApplyPartialFilterWithMappedPropertyCorrectly() { + + IndexDefinition id = new Index().named("partial-with-mapped-criteria").on("k3y", Direction.ASC) + .partial(of(where("quantity").gte(10))); + + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("partial-with-mapped-criteria")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); + }).verifyComplete(); + } + + @Test // DATAMONGO-1682, DATAMONGO-2198 + public void shouldApplyPartialDBOFilterCorrectly() { + + IndexDefinition id = new Index().named("partial-with-dbo").on("k3y", Direction.ASC) + .partial(of(new org.bson.Document("qty", new org.bson.Document("$gte", 10)))); + + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("partial-with-dbo")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); + }) // + .verifyComplete(); + + } + + @Test // DATAMONGO-1682, DATAMONGO-2198 + public void shouldFavorExplicitMappingHintViaClass() { + + IndexDefinition id = new Index().named("partial-with-inheritance").on("k3y", Direction.ASC) + .partial(of(where("age").gte(10))); + + indexOps = new DefaultReactiveIndexOperations(template, + this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class), + new QueryMapper(template.getConverter()), MappingToSameCollection.class); + + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("partial-with-inheritance")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"a_g_e\" : { \"$gte\" : 10 } }")); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void indexShouldNotBeHiddenByDefault() { + + IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC); + + indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(indexInfo.isHidden()).isFalse(); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void shouldCreateHiddenIndex() { + + IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden(); + + indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("my-hidden-index")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(indexInfo.isHidden()).isTrue(); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void alterIndexShouldAllowHiding() { + + template.execute(collectionName, collection -> { + return collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index")); + }).then().as(StepVerifier::create).verifyComplete(); + + indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden()) + .as(StepVerifier::create).verifyComplete(); + indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(indexInfo.isHidden()).isTrue(); + }) // + .verifyComplete(); + } + + Predicate indexByName(String name) { + return indexInfo -> indexInfo.getName().equals(name); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "default-index-operations-tests") + static class DefaultIndexOperationsIntegrationTestsSample { + + @Field("qty") Integer quantity; + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "default-index-operations-tests") + static class MappingToSameCollection + extends DefaultIndexOperationsIntegrationTests.DefaultIndexOperationsIntegrationTestsSample { + + @Field("a_g_e") Integer age; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java new file mode 100644 index 0000000000..e863a7df8b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; + +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +public class DefaultReactiveIndexOperationsUnitTests { + + private ReactiveMongoTemplate template; + + @Mock ReactiveMongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + @Mock Publisher publisher; + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + when(collection.createIndex(any(), any(IndexOptions.class))).thenReturn(publisher); + + this.mappingContext = new MongoMappingContext(); + this.converter = spy(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + this.template = new ReactiveMongoTemplate(factory, converter); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotSetCollectionIfNoDefaultDefined() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("firstname", Direction.DESC)).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void ensureIndexUsesDefaultCollationIfNoneDefinedInOptions() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC)).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotUseDefaultCollationIfExplicitlySpecifiedInTheIndex() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC).collation(Collation.of("en_US"))) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + private DefaultReactiveIndexOperations indexOpsFor(Class type) { + return new DefaultReactiveIndexOperations(template, template.getCollectionName(type), + new QueryMapper(template.getConverter()), type); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "DefaultReactiveIndexOperationsUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + @Field("firstname") String name; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java index cdea7a7da3..6331e1dbc7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -31,39 +31,42 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.BasicDBObject; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link DefaultScriptOperations}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isLessThan = "4.1.0") @ContextConfiguration public class DefaultScriptOperationsTests { + static @Client MongoClient mongoClient; + @Configuration static class Config { private static final String DB_NAME = "script-tests"; @Bean - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return mongoClient; } @Bean public MongoTemplate template() throws Exception { - return new MongoTemplate(mongo(), DB_NAME); + return new MongoTemplate(mongoClient(), DB_NAME); } - } static final String JAVASCRIPT_COLLECTION_NAME = "system.js"; @@ -75,119 +78,94 @@ public MongoTemplate template() throws Exception { @Autowired MongoTemplate template; DefaultScriptOperations scriptOps; - @Before + @BeforeEach public void setUp() { - template.getCollection(JAVASCRIPT_COLLECTION_NAME).remove(new BasicDBObject()); + template.getCollection(JAVASCRIPT_COLLECTION_NAME).deleteMany(new Document()); this.scriptOps = new DefaultScriptOperations(template); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void executeShouldDirectlyRunExecutableMongoScript() { - assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, 10), is((Object) 10D)); + assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, 10)).isEqualTo((Object) 10D); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void saveShouldStoreCallableScriptCorrectly() { Query query = query(where("_id").is(SCRIPT_NAME)); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(false)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isFalse(); scriptOps.register(CALLABLE_SCRIPT); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void saveShouldStoreExecutableScriptCorrectly() { NamedMongoScript script = scriptOps.register(EXECUTABLE_SCRIPT); Query query = query(where("_id").is(script.getName())); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void executeShouldRunCallableScriptThatHasBeenSavedBefore() { scriptOps.register(CALLABLE_SCRIPT); Query query = query(where("_id").is(SCRIPT_NAME)); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); Object result = scriptOps.call(CALLABLE_SCRIPT.getName(), 10); - assertThat(result, is((Object) 10D)); + assertThat(result).isEqualTo(10D); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void existsShouldReturnTrueIfScriptAvailableOnServer() { scriptOps.register(CALLABLE_SCRIPT); - assertThat(scriptOps.exists(SCRIPT_NAME), is(true)); + assertThat(scriptOps.exists(SCRIPT_NAME)).isTrue(); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void existsShouldReturnFalseIfScriptNotAvailableOnServer() { - assertThat(scriptOps.exists(SCRIPT_NAME), is(false)); + assertThat(scriptOps.exists(SCRIPT_NAME)).isFalse(); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void callShouldExecuteExistingScript() { scriptOps.register(CALLABLE_SCRIPT); Object result = scriptOps.call(SCRIPT_NAME, 10); - assertThat(result, is((Object) 10D)); + assertThat(result).isEqualTo((Object) 10D); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = UncategorizedDataAccessException.class) + @Test // DATAMONGO-479 public void callShouldThrowExceptionWhenCallingScriptThatDoesNotExist() { - scriptOps.call(SCRIPT_NAME, 10); + assertThatExceptionOfType(UncategorizedDataAccessException.class).isThrownBy(() -> scriptOps.call(SCRIPT_NAME, 10)); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void scriptNamesShouldContainNameOfRegisteredScript() { scriptOps.register(CALLABLE_SCRIPT); - assertThat(scriptOps.getScriptNames(), hasItems("echo")); + assertThat(scriptOps.getScriptNames()).contains("echo"); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void scriptNamesShouldReturnEmptySetWhenNoScriptRegistered() { - assertThat(scriptOps.getScriptNames(), is(empty())); + assertThat(scriptOps.getScriptNames()).isEmpty(); + } + + @Test // DATAMONGO-1465 + public void executeShouldNotQuoteStrings() { + assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, "spring-data")).isEqualTo((Object) "spring-data"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java index 833bf1028a..7418f17c39 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,116 +15,88 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.IsNull.*; -import static org.mockito.Matchers.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; /** * Unit tests for {@link DefaultScriptOperations}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 */ -@RunWith(MockitoJUnitRunner.class) -public class DefaultScriptOperationsUnitTests { +@ExtendWith(MockitoExtension.class) +class DefaultScriptOperationsUnitTests { - DefaultScriptOperations scriptOps; + private DefaultScriptOperations scriptOps; @Mock MongoOperations mongoOperations; - @Before - public void setUp() { + @BeforeEach + void setUp() { this.scriptOps = new DefaultScriptOperations(mongoOperations); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void rejectsNullExecutableMongoScript() { - scriptOps.register((ExecutableMongoScript) null); + @Test // DATAMONGO-479 + void rejectsNullExecutableMongoScript() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.register((ExecutableMongoScript) null)); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void rejectsNullNamedMongoScript() { - scriptOps.register((NamedMongoScript) null); + @Test // DATAMONGO-479 + void rejectsNullNamedMongoScript() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.register((NamedMongoScript) null)); } - /** - * @see DATAMONGO-479 - */ - @Test - public void saveShouldUseCorrectCollectionName() { + @Test // DATAMONGO-479 + void saveShouldUseCorrectCollectionName() { scriptOps.register(new NamedMongoScript("foo", "function...")); verify(mongoOperations, times(1)).save(any(NamedMongoScript.class), eq("system.js")); } - /** - * @see DATAMONGO-479 - */ - @Test - public void saveShouldGenerateScriptNameForExecutableMongoScripts() { + @Test // DATAMONGO-479 + void saveShouldGenerateScriptNameForExecutableMongoScripts() { scriptOps.register(new ExecutableMongoScript("function...")); ArgumentCaptor captor = ArgumentCaptor.forClass(NamedMongoScript.class); verify(mongoOperations, times(1)).save(captor.capture(), eq("system.js")); - Assert.assertThat(captor.getValue().getName(), notNullValue()); + assertThat(captor.getValue().getName()).isNotNull(); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void executeShouldThrowExceptionWhenScriptIsNull() { - scriptOps.execute(null); + @Test // DATAMONGO-479 + void executeShouldThrowExceptionWhenScriptIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.execute(null)); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void existsShouldThrowExceptionWhenScriptNameIsNull() { - scriptOps.exists(null); + @Test // DATAMONGO-479 + void existsShouldThrowExceptionWhenScriptNameIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.exists(null)); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void existsShouldThrowExceptionWhenScriptNameIsEmpty() { - scriptOps.exists(""); + @Test // DATAMONGO-479 + void existsShouldThrowExceptionWhenScriptNameIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.exists("")); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void callShouldThrowExceptionWhenScriptNameIsNull() { - scriptOps.call(null); + @Test // DATAMONGO-479 + void callShouldThrowExceptionWhenScriptNameIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.call(null)); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) - public void callShouldThrowExceptionWhenScriptNameIsEmpty() { - scriptOps.call(""); + @Test // DATAMONGO-479 + void callShouldThrowExceptionWhenScriptNameIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.call("")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java new file mode 100644 index 0000000000..ed468f8ed2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java @@ -0,0 +1,103 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Iterator; +import java.util.List; + +import org.bson.Document; + +import com.mongodb.BasicDBList; + +/** + * Helper classes to ease assertions on {@link Document}s. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Jongwoo Han + */ +public abstract class DocumentTestUtils { + + private DocumentTestUtils() {} + + /** + * Expects the field with the given key to be not {@literal null} and a {@link Document} in turn and returns it. + * + * @param source the {@link Document} to lookup the nested one + * @param key the key of the field to lookup the nested {@link Document} + * @return + */ + public static Document getAsDocument(Document source, String key) { + return getTypedValue(source, key, Document.class); + } + + /** + * Expects the field with the given key to be not {@literal null} and a {@link BasicDBList}. + * + * @param source the {@link Document} to lookup the {@link List} in + * @param key the key of the field to find the {@link List} in + * @return + */ + public static List getAsDBList(Document source, String key) { + return getTypedValue(source, key, List.class); + } + + /** + * Expects the list element with the given index to be a non-{@literal null} {@link Document} and returns it. + * + * @param source the {@link List} to look up the {@link Document} element in + * @param index the index of the element expected to contain a {@link Document} + * @return + */ + public static Document getAsDocument(List source, int index) { + + assertThat(source.size()).isGreaterThanOrEqualTo(index + 1); + Object value = source.get(index); + assertThat(value).isInstanceOf(Document.class); + return (Document) value; + } + + @SuppressWarnings("unchecked") + public static T getTypedValue(Document source, String key, Class type) { + + Object value = source.get(key); + assertThat(value).isNotNull(); + assertThat(value).isInstanceOf(type); + + return (T) value; + } + + public static void assertTypeHint(Document document, Class type) { + assertTypeHint(document, type.getName()); + } + + public static void assertTypeHint(Document document, String expectedTypeString) { + + Iterator keyIterator = document.keySet().iterator(); + while (keyIterator.hasNext()) { + String key = keyIterator.next(); + if (key.equals("_class")) { + assertThat(document.get(key)).isEqualTo(expectedTypeString); + assertThat(keyIterator.hasNext()).isFalse(); + return; + } + } + + fail(String.format("Expected to find type info %s in %s.", document, expectedTypeString)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java new file mode 100644 index 0000000000..a2197463e6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -0,0 +1,187 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; + +/** + * Unit tests for {@link EntityOperations}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class EntityOperationsUnitTests { + + ConversionService conversionService = new DefaultConversionService(); + + EntityOperations operations = new EntityOperations( + new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoTestMappingContext.newTestContext())); + + @Test // GH-3731 + void shouldReportInvalidTimeField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidTimeField.class).getCollectionOptions()) + .withMessageContaining("Time series field 'foo' does not exist"); + } + + @Test // GH-3731 + void shouldReportInvalidMetaField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidMetaField.class).getCollectionOptions()) + .withMessageContaining("Meta field 'foo' does not exist"); + } + + @Test // DATAMONGO-2293 + void populateIdShouldReturnTargetBeanWhenIdIsNull() { + assertThat(initAdaptibleEntity(new DomainTypeWithIdProperty()).populateIdIfNecessary(null)).isNotNull(); + } + + @Test // GH-4308 + void shouldExtractKeysFromEntity() { + + WithNestedDocument object = new WithNestedDocument("foo"); + + Map keys = operations.forEntity(object).extractKeys(new Document("id", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("id", "foo"); + } + + @Test // GH-4308 + void shouldExtractKeysFromDocument() { + + Document object = new Document("id", "foo"); + + Map keys = operations.forEntity(object).extractKeys(new Document("id", 1), Document.class); + + assertThat(keys).containsEntry("id", "foo"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedEntity() { + + WithNestedDocument object = new WithNestedDocument("foo", new WithNestedDocument("bar"), null); + + Map keys = operations.forEntity(object).extractKeys(new Document("nested.id", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("nested.id", "bar"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedEntityDocument() { + + WithNestedDocument object = new WithNestedDocument("foo", new WithNestedDocument("bar"), + new Document("john", "doe")); + + Map keys = operations.forEntity(object).extractKeys(new Document("document.john", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("document.john", "doe"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedDocument() { + + Document object = new Document("document", new Document("john", "doe")); + + Map keys = operations.forEntity(object).extractKeys(new Document("document.john", 1), + Document.class); + + assertThat(keys).containsEntry("document.john", "doe"); + } + + @Test // GH-4308 + void shouldExtractIdPropertyNameFromRawDocument() { + + Document object = new Document("_id", "id-1").append("value", "val"); + + Map keys = operations.forEntity(object).extractKeys(new Document("value", 1), DomainTypeWithIdProperty.class); + + assertThat(keys).containsEntry("id", "id-1"); + } + + @Test // GH-4308 + void shouldExtractValuesFromProxy() { + + ProjectionInterface source = new SpelAwareProxyProjectionFactory().createProjection(ProjectionInterface.class, new Document("_id", "id-1").append("value", "val")); + + Map keys = operations.forEntity(source).extractKeys(new Document("value", 1), DomainTypeWithIdProperty.class); + + assertThat(keys).isEqualTo(new Document("id", "id-1").append("value", "val")); + } + + EntityOperations.AdaptibleEntity initAdaptibleEntity(T source) { + return operations.forEntity(source, conversionService); + } + + private static class DomainTypeWithIdProperty { + + @Id String id; + String value; + } + + @TimeSeries(timeField = "foo") + static class InvalidTimeField { + + } + + @TimeSeries(timeField = "time", metaField = "foo") + static class InvalidMetaField { + Instant time; + } + + class WithNestedDocument { + + String id; + + WithNestedDocument nested; + + Document document; + + public WithNestedDocument() {} + + public WithNestedDocument(String id) { + this.id = id; + } + + public WithNestedDocument(String id, WithNestedDocument nested, Document document) { + + this.id = id; + this.nested = nested; + this.document = document; + } + } + + interface ProjectionInterface { + String getValue(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java new file mode 100644 index 0000000000..05f0695839 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; + +/** + * Unit tests for {@link ExecutableAggregationOperationSupport}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +public class ExecutableAggregationOperationSupportUnitTests { + + @Mock MongoTemplate template; + private ExecutableAggregationOperationSupport opSupport; + + @BeforeEach + void setUp() { + opSupport = new ExecutableAggregationOperationSupport(template); + } + + @Test // DATAMONGO-1563 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(null)); + } + + @Test // DATAMONGO-1563 + void throwsExceptionOnNullCollectionWhenUsed() { + assertThatIllegalArgumentException() + .isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1563 + void throwsExceptionOnEmptyCollectionWhenUsed() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection("")); + } + + @Test // DATAMONGO-1563 + void throwsExceptionOnNullAggregation() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).by(null)); + } + + @Test // DATAMONGO-1563 + void aggregateWithUntypedAggregationAndExplicitCollection() { + + opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + verify(template).aggregate(any(Aggregation.class), eq("star-wars"), captor.capture()); + assertThat(captor.getValue()).isEqualTo(Person.class); + } + + @Test // DATAMONGO-1563 + void aggregateWithUntypedAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); + } + + @Test // DATAMONGO-1563 + void aggregateWithTypeAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); + } + + @Test // DATAMONGO-1563 + void aggregateStreamWithUntypedAggregationAndExplicitCollection() { + + opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).stream(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + verify(template).aggregateStream(any(Aggregation.class), eq("star-wars"), captor.capture()); + assertThat(captor.getValue()).isEqualTo(Person.class); + } + + @Test // DATAMONGO-1563 + void aggregateStreamWithUntypedAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).stream(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); + } + + @Test // DATAMONGO-1563 + void aggregateStreamWithTypeAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).stream(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); + } + + static class Person {} + + static class Jedi {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java new file mode 100644 index 0000000000..eac248e69a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java @@ -0,0 +1,976 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; + +import java.util.Date; +import java.util.Objects; +import java.util.stream.Stream; + +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.Document; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link ExecutableFindOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoTemplateExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ExecutableFindOperationSupportTests implements StateFunctions { + + private static final String STAR_WARS = "star-wars"; + private static final String STAR_WARS_PLANETS = "star-wars-universe"; + + @Template(database = "executable-find-operation-support-tests", initialEntitySet = { Person.class, Planet.class }) // + private static MongoTestTemplate template; + + private Person han; + private Person luke; + + private Planet alderan; + private Planet dantooine; + + @Override + public void clear() { + template.flush(); + } + + @Override + public void setupState() { + template.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + initPersons(); + initPlanets(); + } + + @Test // DATAMONGO-1563 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(null)); + } + + @Test // DATAMONGO-1563 + void returnTypeIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).as(null)); + } + + @Test // DATAMONGO-1563 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1563 + void findAll() { + assertThat(template.query(Person.class).all()).containsExactlyInAnyOrder(han, luke); + } + + @Test // DATAMONGO-1563 + void findAllWithCollection() { + assertThat(template.query(Human.class).inCollection(STAR_WARS).all()).hasSize(2); + } + + @Test // DATAMONGO-1563 + void findAllWithProjection() { + assertThat(template.query(Person.class).as(Jedi.class).all()).hasOnlyElementsOfType(Jedi.class).hasSize(2); + } + + @Test // DATAMONGO-2041 + @DirtiesState + void findAllWithProjectionOnEmbeddedType() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + template.save(luke); + + assertThat(template.query(Person.class).as(PersonDtoProjection.class).matching(query(where("id").is(luke.id))) + .firstValue()).hasFieldOrPropertyWithValue("father", luke.father); + } + + @Test // DATAMONGO-1733 + void findByReturningAllValuesAsClosedInterfaceProjection() { + + assertThat(template.query(Person.class).as(PersonProjection.class).all()) + .hasOnlyElementsOfTypes(PersonProjection.class); + } + + @Test // DATAMONGO-1563 + void findAllBy() { + + assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).all()) + .containsExactlyInAnyOrder(luke); + } + + @Test // DATAMONGO-1563 + void findAllByWithCollectionUsingMappingInformation() { + + assertThat(template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all()) + .hasSize(1).hasOnlyElementsOfType(Jedi.class); + } + + @Test // DATAMONGO-1563 + void findAllByWithCollection() { + assertThat(template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all()) + .hasSize(1); + } + + @Test // DATAMONGO-2323 + void findAllAsDocument() { + assertThat( + template.query(Document.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all()) + .hasSize(1); + } + + @Test // DATAMONGO-1563 + void findAllByWithProjection() { + + assertThat(template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all()) + .hasOnlyElementsOfType(Jedi.class).hasSize(1); + } + + @Test // DATAMONGO-1563 + void findBy() { + assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).one()).contains(luke); + } + + @Test // DATAMONGO-2416 + void findByCriteria() { + assertThat(template.query(Person.class).matching(where("firstname").is("luke")).one()).contains(luke); + } + + @Test // DATAMONGO-1563 + void findByNoMatch() { + assertThat(template.query(Person.class).matching(query(where("firstname").is("spock"))).one()).isEmpty(); + } + + @Test // DATAMONGO-1563 + void findByTooManyResults() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one()); + } + + @Test // DATAMONGO-1726 + void findByReturningOneValue() { + assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).oneValue()).isEqualTo(luke); + } + + @Test // DATAMONGO-1726 + void findByReturningOneValueButTooManyResults() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).oneValue()); + } + + @Test // DATAMONGO-1726 + void findByReturningFirstValue() { + + assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).firstValue()) + .isEqualTo(luke); + } + + @Test // DATAMONGO-1726 + void findByReturningFirstValueForManyResults() { + + assertThat(template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).firstValue()) + .isIn(han, luke); + } + + @Test // DATAMONGO-1733 + void findByReturningFirstValueAsClosedInterfaceProjection() { + + PersonProjection result = template.query(Person.class).as(PersonProjection.class) + .matching(query(where("firstname").is("han"))).firstValue(); + + assertThat(result).isInstanceOf(PersonProjection.class); + assertThat(result.getFirstname()).isEqualTo("han"); + } + + @Test // DATAMONGO-1733 + void findByReturningFirstValueAsOpenInterfaceProjection() { + + PersonSpELProjection result = template.query(Person.class).as(PersonSpELProjection.class) + .matching(query(where("firstname").is("han"))).firstValue(); + + assertThat(result).isInstanceOf(PersonSpELProjection.class); + assertThat(result.getName()).isEqualTo("han"); + } + + @Test // DATAMONGO-1563 + void streamAll() { + + try (Stream stream = template.query(Person.class).stream()) { + assertThat(stream).containsExactlyInAnyOrder(han, luke); + } + } + + @Test // DATAMONGO-1563 + void streamAllWithCollection() { + + try (Stream stream = template.query(Human.class).inCollection(STAR_WARS).stream()) { + assertThat(stream).hasSize(2); + } + } + + @Test // DATAMONGO-1563 + void streamAllWithProjection() { + + try (Stream stream = template.query(Person.class).as(Jedi.class).stream()) { + assertThat(stream).hasOnlyElementsOfType(Jedi.class).hasSize(2); + } + } + + @Test // DATAMONGO-1733 + void streamAllReturningResultsAsClosedInterfaceProjection() { + + TerminatingFind operation = template.query(Person.class).as(PersonProjection.class); + + assertThat(operation.stream()) // + .hasSize(2) // + .allSatisfy(it -> { + assertThat(it).isInstanceOf(PersonProjection.class); + assertThat(it.getFirstname()).isNotBlank(); + }); + } + + @Test // DATAMONGO-1733 + void streamAllReturningResultsAsOpenInterfaceProjection() { + + TerminatingFind operation = template.query(Person.class).as(PersonSpELProjection.class); + + assertThat(operation.stream()) // + .hasSize(2) // + .allSatisfy(it -> { + assertThat(it).isInstanceOf(PersonSpELProjection.class); + assertThat(it.getName()).isNotBlank(); + }); + } + + @Test // DATAMONGO-1563 + void streamAllBy() { + + try (Stream stream = template.query(Person.class).matching(query(where("firstname").is("luke"))).stream()) { + assertThat(stream).containsExactlyInAnyOrder(luke); + } + } + + @Test // DATAMONGO-1563 + void findAllNearBy() { + + GeoResults results = template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)) + .all(); + assertThat(results.getContent()).hasSize(2); + assertThat(results.getContent().get(0).getDistance()).isNotNull(); + } + + @Test // DATAMONGO-1563 + void findAllNearByWithCollectionAndProjection() { + + GeoResults results = template.query(Object.class).inCollection(STAR_WARS_PLANETS).as(Human.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); + + assertThat(results.getContent()).hasSize(2); + assertThat(results.getContent().get(0).getDistance()).isNotNull(); + assertThat(results.getContent().get(0).getContent()).isInstanceOf(Human.class); + assertThat(results.getContent().get(0).getContent().getId()).isEqualTo("alderan"); + } + + @Test // DATAMONGO-1733 + void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { + + GeoResults results = template.query(Planet.class).as(PlanetProjection.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); + + assertThat(results.getContent()).allSatisfy(it -> { + + assertThat(it.getContent()).isInstanceOf(PlanetProjection.class); + assertThat(it.getContent().getName()).isNotBlank(); + }); + } + + @Test // DATAMONGO-1733 + void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { + + GeoResults results = template.query(Planet.class).as(PlanetSpELProjection.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); + + assertThat(results.getContent()).allSatisfy(it -> { + + assertThat(it.getContent()).isInstanceOf(PlanetSpELProjection.class); + assertThat(it.getContent().getId()).isNotBlank(); + }); + } + + @Test // DATAMONGO-1728 + void firstShouldReturnFirstEntryInCollection() { + assertThat(template.query(Person.class).first()).isNotEmpty(); + } + + @Test // DATAMONGO-1734 + void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { + assertThat(template.query(Person.class).count()).isEqualTo(2); + } + + @Test // DATAMONGO-1734 + void countShouldReturnNrOfElementsMatchingQuery() { + + assertThat(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).count()) + .isEqualTo(1); + } + + @Test // DATAMONGO-1734 + void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { + assertThat(template.query(Person.class).exists()).isTrue(); + } + + @Test // DATAMONGO-1734 + @DirtiesState + void existsShouldReturnFalseIfNoElementExistsInCollection() { + + template.remove(new BasicQuery("{}"), STAR_WARS); + + assertThat(template.query(Person.class).exists()).isFalse(); + } + + @Test // DATAMONGO-1734 + void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { + + assertThat(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).exists()) + .isTrue(); + } + + @Test // DATAMONGO-1734 + void existsShouldReturnFalseWhenNoElementMatchesQuery() { + assertThat(template.query(Person.class).matching(query(where("firstname").is("spock"))).exists()).isFalse(); + } + + @Test // DATAMONGO-1734 + void returnsTargetObjectDirectlyIfProjectionInterfaceIsImplemented() { + assertThat(template.query(Person.class).as(Contact.class).all()).allMatch(it -> it instanceof Person); + } + + @Test // DATAMONGO-1761 + void distinctReturnsEmptyListIfNoMatchFound() { + assertThat(template.query(Person.class).distinct("actually-not-property-in-use").as(String.class).all()).isEmpty(); + } + + @Test // DATAMONGO-1761 + void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.lastname = luke.lastname; + + template.save(anakin); + + assertThat(template.query(Person.class).distinct("lastname").as(String.class).all()) + .containsExactlyInAnyOrder("solo", "skywalker"); + } + + @Test // DATAMONGO-1761 + void distinctReturnsSimpleFieldValuesCorrectly() { + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = "dark-lord"; + + Person padme = new Person(); + padme.firstname = "padme"; + padme.ability = 42L; + + Person jaja = new Person(); + jaja.firstname = "jaja"; + jaja.ability = new Date(); + + template.save(anakin); + template.save(padme); + template.save(jaja); + + assertThat(template.query(Person.class).distinct("ability").all()).containsExactlyInAnyOrder(anakin.ability, + padme.ability, jaja.ability); + } + + @Test // DATAMONGO-1761 + void distinctReturnsComplexValuesCorrectly() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + template.save(anakin); + + assertThat(template.query(Person.class).distinct("ability").all()).containsExactlyInAnyOrder(anakin.ability); + } + + @Test // DATAMONGO-1761 + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + template.save(anakin); + + assertThat(template.query(Person.class).distinct("ability").as(Sith.class).all()) + .containsExactlyInAnyOrder((Sith) anakin.ability); + } + + @Test // DATAMONGO-1761 + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeDocumentSpecified() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + template.save(anakin); + + assertThat(template.query(Person.class).distinct("ability").as(Document.class).all()) + .containsExactlyInAnyOrder(new Document("rank", "lord").append("_class", Sith.class.getName())); + } + + @Test // DATAMONGO-1761 + void distinctMapsFieldNameCorrectly() { + + assertThat(template.query(Jedi.class).inCollection(STAR_WARS).distinct("name").as(String.class).all()) + .containsExactlyInAnyOrder("han", "luke"); + } + + @Test // DATAMONGO-1761 + void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { + + assertThat(template.query(Person.class).distinct("lastname").as(BsonValue.class).all()) + .containsExactlyInAnyOrder(new BsonString("solo"), new BsonString("skywalker")); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { + + template.save(new Document("darth", "vader"), STAR_WARS); + + assertThat(template.query(Person.class).distinct("darth").all()).containsExactlyInAnyOrder("vader"); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsMappedDomainTypeForProjections() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + template.save(luke); + + assertThat(template.query(Person.class).distinct("father").as(Jedi.class).all()) + .containsExactlyInAnyOrder(new Jedi("anakin")); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctAlllowsQueryUsingObjectSourceType() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + template.save(luke); + + assertThat(template.query(Object.class).inCollection(STAR_WARS).distinct("father").as(Jedi.class).all()) + .containsExactlyInAnyOrder(new Jedi("anakin")); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + template.save(luke); + + Person expected = new Person(); + expected.firstname = luke.father.firstname; + + assertThat(template.query(Person.class).distinct("father").all()).containsExactlyInAnyOrder(expected); + } + + @Test // DATAMONGO-1761 + void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.query(Person.class).distinct("firstname").as(Long.class).all()); + } + + @Test // DATAMONGO-2507 + void distinctAppliesFilterQuery() { + + assertThat(template.query(Person.class).inCollection(STAR_WARS).distinct("firstname") // + .matching(where("lastname").is(luke.lastname)) // + .as(String.class) // + .all() // + ).containsExactlyInAnyOrder("luke"); + } + + @Test // GH-2860 + void projectionOnDbRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDbRef = alderan; + + template.save(source); + + WithDbRefProjection target = template.query(WithRefs.class).as(WithDbRefProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDbRef()).isEqualTo(alderan); + } + + @Test // GH-2860 + @Disabled("GH-3913") + @DirtiesState + void propertyProjectionOnDbRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDbRef = alderan; + + template.save(source); + + WithDbRefPropertyProjection target = template.query(WithRefs.class).as(WithDbRefPropertyProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDbRef().getName()).isEqualTo(alderan.getName()); + } + + @Test // GH-2860 + @DirtiesState + void projectionOnDocRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDocRef = alderan; + + template.save(source); + + WithDocumentRefProjection target = template.query(WithRefs.class).as(WithDocumentRefProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDocRef()).isEqualTo(alderan); + } + + @Test // GH-2860 + @DirtiesState + void propertyProjectionOnDocRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDocRef = alderan; + + template.save(source); + + WithDocRefPropertyProjection target = template.query(WithRefs.class).as(WithDocRefPropertyProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDocRef().getName()).isEqualTo(alderan.getName()); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + this.getFather() + + ")"; + } + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + // TODO: Without getters/setters, not identified as projection/properties + static class PersonDtoProjection { + + @Field("firstname") String name; + Person father; + + public String getName() { + return this.name; + } + + public Person getFather() { + return this.father; + } + + public void setName(String name) { + this.name = name; + } + + public void setFather(Person father) { + this.father = father; + } + } + + static class Human { + + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Human(id=" + this.getId() + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } + + static class Sith { + + String rank; + + public String getRank() { + return this.rank; + } + + public void setRank(String rank) { + this.rank = rank; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sith sith = (Sith) o; + return Objects.equals(rank, sith.rank); + } + + @Override + public int hashCode() { + return Objects.hash(rank); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Sith(rank=" + this.getRank() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS_PLANETS) + static class Planet { + + @Id String name; + Point coordinates; + + public Planet(String name, Point coordinates) { + this.name = name; + this.coordinates = coordinates; + } + + public String getName() { + return this.name; + } + + public Point getCoordinates() { + return this.coordinates; + } + + public void setName(String name) { + this.name = name; + } + + public void setCoordinates(Point coordinates) { + this.coordinates = coordinates; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Planet planet = (Planet) o; + return Objects.equals(name, planet.name) && Objects.equals(coordinates, planet.coordinates); + } + + @Override + public int hashCode() { + return Objects.hash(name, coordinates); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Planet(name=" + this.getName() + ", coordinates=" + + this.getCoordinates() + ")"; + } + } + + interface PlanetProjection { + String getName(); + } + + interface PlanetSpELProjection { + + @Value("#{target.name}") + String getId(); + } + + static class WithRefs { + + @Id String id; + + String noRef; + + @DBRef Planet planetDbRef; + + @DocumentReference Planet planetDocRef; + + public String getId() { + return this.id; + } + + public String getNoRef() { + return this.noRef; + } + + public Planet getPlanetDbRef() { + return this.planetDbRef; + } + + public Planet getPlanetDocRef() { + return this.planetDocRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setNoRef(String noRef) { + this.noRef = noRef; + } + + public void setPlanetDbRef(Planet planetDbRef) { + this.planetDbRef = planetDbRef; + } + + public void setPlanetDocRef(Planet planetDocRef) { + this.planetDocRef = planetDocRef; + } + + public String toString() { + return "ExecutableFindOperationSupportTests.WithRefs(id=" + this.getId() + ", noRef=" + this.getNoRef() + + ", planetDbRef=" + this.getPlanetDbRef() + ", planetDocRef=" + this.getPlanetDocRef() + ")"; + } + } + + interface WithDbRefProjection { + Planet getPlanetDbRef(); + } + + interface WithDocumentRefProjection { + Planet getPlanetDocRef(); + } + + interface WithDbRefPropertyProjection { + PlanetProjection getPlanetDbRef(); + } + + interface WithDocRefPropertyProjection { + PlanetProjection getPlanetDocRef(); + } + + private void initPersons() { + + han = new Person(); + han.firstname = "han"; + han.lastname = "solo"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.lastname = "skywalker"; + luke.id = "id-2"; + + template.save(han); + template.save(luke); + } + + private void initPlanets() { + + alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + template.save(alderan); + template.save(dantooine); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java new file mode 100644 index 0000000000..d5e5d603c0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java @@ -0,0 +1,171 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.Arrays; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; + +/** + * Unit tests for {@link ExecutableInsertOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class ExecutableInsertOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + + @Mock MongoTemplate template; + @Mock BulkOperations bulkOperations; + + private ExecutableInsertOperationSupport ops; + + private Person luke, han; + + @BeforeEach + void setUp() { + + ops = new ExecutableInsertOperationSupport(template); + + luke = new Person(); + luke.id = "id-1"; + luke.firstname = "luke"; + + han = new Person(); + han.firstname = "han"; + han.id = "id-2"; + } + + @Test // DATAMONGO-1563 + void nullCollectionShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1563 + void nullBulkModeShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).withBulkMode(null)); + } + + @Test // DATAMONGO-1563 + void insertShouldUseDerivedCollectionName() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + + ops.insert(Person.class).one(luke); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).insert(eq(luke), eq(STAR_WARS)); + + assertThat(captor.getAllValues()).containsExactly(Person.class); + } + + @Test // DATAMONGO-1563 + void insertShouldUseExplicitCollectionName() { + + ops.insert(Person.class).inCollection(STAR_WARS).one(luke); + + verify(template, never()).getCollectionName(any(Class.class)); + verify(template).insert(eq(luke), eq(STAR_WARS)); + } + + @Test // DATAMONGO-1563 + void insertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + + ops.insert(Person.class).all(Arrays.asList(luke, han)); + + verify(template).getCollectionName(any(Class.class)); + verify(template).insert(anyList(), eq(STAR_WARS)); + } + + @Test // DATAMONGO-1563 + void bulkInsertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations); + when(bulkOperations.insert(anyList())).thenReturn(bulkOperations); + + ops.insert(Person.class).bulk(Arrays.asList(luke, han)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(any(Class.class)); + verify(template).bulkOps(eq(BulkMode.ORDERED), captor.capture(), eq(STAR_WARS)); + verify(bulkOperations).insert(anyList()); + verify(bulkOperations).execute(); + } + + @Test // DATAMONGO-1563 + void bulkInsertWithBulkModeShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations); + when(bulkOperations.insert(anyList())).thenReturn(bulkOperations); + + ops.insert(Person.class).withBulkMode(BulkMode.UNORDERED).bulk(Arrays.asList(luke, han)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(any(Class.class)); + verify(template).bulkOps(eq(BulkMode.UNORDERED), captor.capture(), eq(STAR_WARS)); + verify(bulkOperations).insert(anyList()); + verify(bulkOperations).execute(); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public String toString() { + return "ExecutableInsertOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java new file mode 100644 index 0000000000..167852f723 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java @@ -0,0 +1,212 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ExecutableMapReduceOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MockitoExtension.class) +class ExecutableMapReduceOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + private static final String MAP_FUNCTION = "function() { emit(this.id, this.firstname) }"; + private static final String REDUCE_FUNCTION = "function(id, name) { return sum(id, name); }"; + + @Mock MongoTemplate template; + + private ExecutableMapReduceOperationSupport mapReduceOpsSupport; + + @BeforeEach + void setUp() { + mapReduceOpsSupport = new ExecutableMapReduceOperationSupport(template); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMapReduceOperationSupport(null)); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> mapReduceOpsSupport.mapReduce(null)); + } + + @Test // DATAMONGO-1929 + void usesExtractedCollectionName() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesExplicitCollectionName() { + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .inCollection("the-night-angel").all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq("the-night-angel"), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesMapReduceOptionsWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + MapReduceOptions options = MapReduceOptions.options(); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).with(options).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + eq(options), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesQueryWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + Query query = new BasicQuery("{ 'lastname' : 'skywalker' }"); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).matching(query).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-2416 + void usesCriteriaWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + Query query = Query.query(where("lastname").is("skywalker")); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .matching(where("lastname").is("skywalker")).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesProjectionWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).as(Jedi.class).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Jedi.class)); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public Person() {} + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + public String toString() { + return "ExecutableMapReduceOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + + this.getFather() + ")"; + } + } + + static class Jedi { + + @Field("firstname") // + String name; + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableMapReduceOperationSupportUnitTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java new file mode 100644 index 0000000000..621e2a0764 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java @@ -0,0 +1,174 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.result.DeleteResult; + +/** + * Integration tests for {@link ExecutableRemoveOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MongoTemplateExtension.class) +class ExecutableRemoveOperationSupportTests { + + private static final String STAR_WARS = "star-wars"; + + @Template(initialEntitySet = Person.class) // + private static MongoTestTemplate template; + + private Person han; + private Person luke; + + @BeforeEach + void setUp() { + + template.flush(); + + han = new Person(); + han.firstname = "han"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.id = "id-2"; + + template.save(han); + template.save(luke); + } + + @Test // DATAMONGO-1563 + void removeAll() { + + DeleteResult result = template.remove(Person.class).all(); + + assertThat(result.getDeletedCount()).isEqualTo(2L); + } + + @Test // DATAMONGO-1563 + void removeAllMatching() { + + DeleteResult result = template.remove(Person.class).matching(query(where("firstname").is("han"))).all(); + + assertThat(result.getDeletedCount()).isEqualTo(1L); + } + + @Test // DATAMONGO-2416 + void removeAllMatchingCriteria() { + + DeleteResult result = template.remove(Person.class).matching(where("firstname").is("han")).all(); + + assertThat(result.getDeletedCount()).isEqualTo(1L); + } + + @Test // DATAMONGO-1563 + void removeAllMatchingWithAlternateDomainTypeAndCollection() { + + DeleteResult result = template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))) + .all(); + + assertThat(result.getDeletedCount()).isEqualTo(1L); + } + + @Test // DATAMONGO-1563 + void removeAndReturnAllMatching() { + + List result = template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove(); + + assertThat(result).containsExactly(han); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ExecutableRemoveOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } + } + + static class Jedi { + + @Field("firstname") // + String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableRemoveOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java new file mode 100644 index 0000000000..e7f50dab53 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java @@ -0,0 +1,368 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Objects; +import java.util.Optional; + +import org.bson.BsonString; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.result.UpdateResult; + +/** + * Integration tests for {@link ExecutableUpdateOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MongoTemplateExtension.class) +class ExecutableUpdateOperationSupportTests { + + private static final String STAR_WARS = "star-wars"; + + @Template(initialEntitySet = { Human.class, Jedi.class, Person.class }) // + private static MongoTestTemplate template; + + private Person han; + private Person luke; + + @BeforeEach + void setUp() { + + template.remove(Person.class).all(); + + han = new Person(); + han.firstname = "han"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.id = "id-2"; + + template.save(han); + template.save(luke); + } + + @Test // DATAMONGO-1563 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(null)); + } + + @Test // DATAMONGO-1563 + void updateIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).apply(null)); + } + + @Test // DATAMONGO-1563 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1563 + void findAndModifyOptionsAreRequiredOnSet() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.update(Person.class).apply(new Update()).withOptions(null)); + } + + @Test // DATAMONGO-1563 + void updateFirst() { + + UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).first(); + + assertThat(result.getModifiedCount()).isEqualTo(1L); + assertThat(result.getUpsertedId()).isNull(); + } + + @Test // DATAMONGO-1563 + void updateAll() { + + UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).all(); + + assertThat(result.getModifiedCount()).isEqualTo(2L); + assertThat(result.getUpsertedId()).isNull(); + } + + @Test // DATAMONGO-1563 + void updateAllMatching() { + + UpdateResult result = template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) + .all(); + + assertThat(result.getModifiedCount()).isEqualTo(1L); + assertThat(result.getUpsertedId()).isNull(); + } + + @Test // DATAMONGO-2416 + void updateAllMatchingCriteria() { + + UpdateResult result = template.update(Person.class).matching(where("id").is(han.getId())) + .apply(new Update().set("firstname", "Han")) + .all(); + + assertThat(result.getModifiedCount()).isEqualTo(1L); + assertThat(result.getUpsertedId()).isNull(); + } + + @Test // DATAMONGO-1563 + void updateWithDifferentDomainClassAndCollection() { + + UpdateResult result = template.update(Jedi.class).inCollection(STAR_WARS) + .matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).all(); + + assertThat(result.getModifiedCount()).isEqualTo(1L); + assertThat(result.getUpsertedId()).isNull(); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1719 + void findAndModifyValue() { + + Person result = template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) + .findAndModifyValue(); + + assertThat(result).isEqualTo(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1563 + void findAndModify() { + + Optional result = template.update(Person.class).matching(queryHan()) + .apply(new Update().set("firstname", "Han")).findAndModify(); + + assertThat(result).contains(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1563 + void findAndModifyWithDifferentDomainTypeAndCollection() { + + Optional result = template.update(Jedi.class).inCollection(STAR_WARS) + .matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).findAndModify(); + + assertThat(result.get()).hasFieldOrPropertyWithValue("name", "han"); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1563 + void findAndModifyWithOptions() { + + Optional result = template.update(Person.class).matching(queryHan()) + .apply(new Update().set("firstname", "Han")).withOptions(FindAndModifyOptions.options().returnNew(true)) + .findAndModify(); + + assertThat(result.get()).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Han"); + } + + @Test // DATAMONGO-1563 + void upsert() { + + UpdateResult result = template.update(Person.class).matching(query(where("id").is("id-3"))) + .apply(new Update().set("firstname", "Chewbacca")).upsert(); + + assertThat(result.getModifiedCount()).isEqualTo(0L); + assertThat(result.getUpsertedId()).isEqualTo(new BsonString("id-3")); + } + + @Test // DATAMONGO-1827 + void findAndReplaceValue() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Person result = template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplaceValue(); + + assertThat(result).isEqualTo(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplace() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Optional result = template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplace(); + + assertThat(result).contains(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithCollection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Optional result = template.update(Person.class).inCollection(STAR_WARS).matching(queryHan()) + .replaceWith(luke).findAndReplace(); + + assertThat(result).contains(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithOptions() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Person result = template.update(Person.class).matching(queryHan()).replaceWith(luke) + .withOptions(FindAndReplaceOptions.options().returnNew()).findAndReplaceValue(); + + assertThat(result).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + } + + @Test // GH-4463 + void replace() { + + Person luke = new Person(); + luke.id = han.id; + luke.firstname = "Luke"; + + UpdateResult result = template.update(Person.class).matching(queryHan()).replaceWith(luke).replaceFirst(); + assertThat(result.getModifiedCount()).isEqualTo(1L); + } + + @Test // GH-4463 + void replaceWithOptions() { + + Person luke = new Person(); + luke.id = "upserted-luke"; + luke.firstname = "Luke"; + + UpdateResult result = template.update(Person.class).matching(query(where("firstname") + .is("c3p0"))).replaceWith(luke).withOptions(ReplaceOptions.replaceOptions().upsert()).replaceFirst(); + assertThat(result.getUpsertedId()).isEqualTo(new BsonString("upserted-luke")); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithProjection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Jedi result = template.update(Person.class).matching(queryHan()).replaceWith(luke).as(Jedi.class) + .findAndReplaceValue(); + + assertThat(result.getName()).isEqualTo(han.firstname); + } + + private Query queryHan() { + return query(where("id").is(han.getId())); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } + } + + static class Human { + + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Human(id=" + this.getId() + ")"; + } + } + + static class Jedi { + + @Field("firstname") // + String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java index 3b80e1ad91..59938113fd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java index 7fd4a093e1..c9f979d2d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,51 +15,41 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; - -import com.mongodb.BasicDBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link GeoCommandStatistics}. - * + * * @author Oliver Gierke + * @author Mark Paluch * @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside) */ public class GeoCommandStatisticsUnitTests { - /** - * @see DATAMONGO-1361 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1361 public void rejectsNullCommandResult() { - GeoCommandStatistics.from(null); + assertThatIllegalArgumentException().isThrownBy(() -> GeoCommandStatistics.from(null)); } - /** - * @see DATAMONGO-1361 - */ - @Test + @Test // DATAMONGO-1361 public void fallsBackToNanIfNoAverageDistanceIsAvailable() { - GeoCommandStatistics statistics = GeoCommandStatistics.from(new BasicDBObject("stats", null)); - assertThat(statistics.getAverageDistance(), is(Double.NaN)); + GeoCommandStatistics statistics = GeoCommandStatistics.from(new Document("stats", null)); + assertThat(statistics.getAverageDistance()).isNaN(); - statistics = GeoCommandStatistics.from(new BasicDBObject("stats", new BasicDBObject())); - assertThat(statistics.getAverageDistance(), is(Double.NaN)); + statistics = GeoCommandStatistics.from(new Document("stats", new Document())); + assertThat(statistics.getAverageDistance()).isNaN(); } - /** - * @see DATAMONGO-1361 - */ - @Test + @Test // DATAMONGO-1361 public void returnsAverageDistanceIfPresent() { GeoCommandStatistics statistics = GeoCommandStatistics - .from(new BasicDBObject("stats", new BasicDBObject("avgDistance", 1.5))); + .from(new Document("stats", new Document("avgDistance", 1.5))); - assertThat(statistics.getAverageDistance(), is(1.5)); + assertThat(statistics.getAverageDistance()).isEqualTo(1.5); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java index 6f6c846659..004bda1544 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,10 +19,12 @@ /** * Server application than can be run as an app or unit test. - * + * * @author Mark Pollack * @author Oliver Gierke + * @deprecated since 4.5. */ +@Deprecated(since = "4.5", forRemoval = true) public class JmxServer { public static void main(String[] args) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java new file mode 100644 index 0000000000..3afcef93d0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java @@ -0,0 +1,356 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; + +import reactor.test.StepVerifier; + +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MongoTemplateExtension.class) +public class JsonSchemaQueryTests { + + public static final String DATABASE_NAME = "json-schema-query-tests"; + + static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // + static MongoTestTemplate template; + + Person jellyBelly, roseSpringHeart, kazmardBoombub; + + @BeforeEach + public void setUp() { + + template.flush(); + + jellyBelly = new Person(); + jellyBelly.id = "1"; + jellyBelly.name = "Jelly Belly"; + jellyBelly.gender = Gender.PIXY; + jellyBelly.address = new Address(); + jellyBelly.address.city = "Candy Hill"; + jellyBelly.address.street = "Apple Mint Street"; + jellyBelly.value = 42; + + roseSpringHeart = new Person(); + roseSpringHeart.id = "2"; + roseSpringHeart.name = "Rose SpringHeart"; + roseSpringHeart.gender = Gender.UNICORN; + roseSpringHeart.address = new Address(); + roseSpringHeart.address.city = "Rainbow Valley"; + roseSpringHeart.address.street = "Twinkle Ave."; + roseSpringHeart.value = 42L; + + kazmardBoombub = new Person(); + kazmardBoombub.id = "3"; + kazmardBoombub.name = "Kazmard Boombub"; + kazmardBoombub.gender = Gender.GOBLIN; + kazmardBoombub.value = "green"; + + template.save(jellyBelly); + template.save(roseSpringHeart); + template.save(kazmardBoombub); + + } + + @Test // DATAMONGO-1835 + public void createsWorkingSchema() { + + try { + template.dropCollection("person_schema"); + } catch (Exception e) {} + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(template.getConverter()).createSchemaFor(Person.class); + + template.createCollection("person_schema", CollectionOptions.empty().schema(schema)); + } + + @Test // DATAMONGO-1835 + public void queriesBooleanType() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties(JsonSchemaProperty.bool("alive")).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)).hasSize(3); + assertThat(template.find(query(Criteria.where("alive").type(Type.BOOLEAN)), Person.class)).hasSize(3); + } + + @Test // DATAMONGO-1835 + public void findsDocumentsWithRequiredFieldsCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("address").build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly, roseSpringHeart); + } + + @Test // DATAMONGO-1835 + public void findsDocumentsWithRequiredFieldsReactively() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("address").build(); + + new ReactiveMongoTemplate(reactiveClient, DATABASE_NAME) + .find(query(matchingDocumentStructure(schema)), Person.class).as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); + } + + @Test // DATAMONGO-1835 + public void findsDocumentsWithBsonFieldTypesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().property(int32("value")).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly); + } + + @Test // DATAMONGO-1835 + public void findsDocumentsWithJsonFieldTypesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().property(number("value")).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly, roseSpringHeart); + } + + @Test // DATAMONGO-1835 + public void combineSchemaWithOtherCriteria() { + + MongoJsonSchema schema = MongoJsonSchema.builder().property(number("value")).build(); + + assertThat( + template.find(query(matchingDocumentStructure(schema).and("name").is(roseSpringHeart.name)), Person.class)) + .containsExactlyInAnyOrder(roseSpringHeart); + } + + @Test // DATAMONGO-1835 + public void usesMappedFieldNameForRequiredProperties() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly, roseSpringHeart, kazmardBoombub); + } + + @Test // DATAMONGO-1835 + public void usesMappedFieldNameForProperties() { + + MongoJsonSchema schema = MongoJsonSchema.builder().property(string("name").matching("^R.*")).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(roseSpringHeart); + } + + @Test // DATAMONGO-1835 + public void mapsNestedFieldName() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("address") // + .property(object("address").properties(string("street").matching("^Apple.*"))).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly); + } + + @Test // DATAMONGO-1835 + public void mapsEnumValuesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() + .property(untyped("gender").possibleValues(Gender.PIXY, Gender.GOBLIN)).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)) + .containsExactlyInAnyOrder(jellyBelly, kazmardBoombub); + } + + @Test // DATAMONGO-1835 + public void useTypeOperatorOnFieldLevel() { + assertThat(template.find(query(where("value").type(Type.intType())), Person.class)).containsExactly(jellyBelly); + } + + @Test // DATAMONGO-1835 + public void useTypeOperatorWithMultipleTypesOnFieldLevel() { + + assertThat(template.find(query(where("value").type(Type.intType(), Type.stringType())), Person.class)) + .containsExactlyInAnyOrder(jellyBelly, kazmardBoombub); + } + + @Test // DATAMONGO-1835 + public void findsWithSchemaReturningRawDocument() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("address").build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Document.class, + template.getCollectionName(Person.class))).hasSize(2); + } + + static class Person { + + @Id String id; + + @Field("full_name") // + String name; + Gender gender; + Address address; + Object value; + + boolean alive; + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Gender getGender() { + return this.gender; + } + + public Address getAddress() { + return this.address; + } + + public Object getValue() { + return this.value; + } + + public boolean isAlive() { + return this.alive; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setGender(Gender gender) { + this.gender = gender; + } + + public void setAddress(Address address) { + this.address = address; + } + + public void setValue(Object value) { + this.value = value; + } + + public void setAlive(boolean alive) { + this.alive = alive; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return alive == person.alive && Objects.equals(id, person.id) && Objects.equals(name, person.name) + && gender == person.gender && Objects.equals(address, person.address) && Objects.equals(value, person.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, gender, address, value, alive); + } + + public String toString() { + return "JsonSchemaQueryTests.Person(id=" + this.getId() + ", name=" + this.getName() + ", gender=" + + this.getGender() + ", address=" + this.getAddress() + ", value=" + this.getValue() + ", alive=" + + this.isAlive() + ")"; + } + } + + static class Address { + + String city; + + @Field("str") // + String street; + + public String getCity() { + return this.city; + } + + public String getStreet() { + return this.street; + } + + public void setCity(String city) { + this.city = city; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(city, address.city) && Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(city, street); + } + + public String toString() { + return "JsonSchemaQueryTests.Address(city=" + this.getCity() + ", street=" + this.getStreet() + ")"; + } + } + + static enum Gender { + PIXY, UNICORN, GOBLIN + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java new file mode 100644 index 0000000000..adaecad5da --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -0,0 +1,765 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import static org.springframework.data.mongodb.test.util.Assertions.assertThatExceptionOfType; + +import java.util.Collections; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.bson.BsonDocument; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.Function; + +/** + * Unit tests for {@link MappingMongoJsonSchemaCreator}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class MappingMongoJsonSchemaCreatorUnitTests { + + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private MappingMongoJsonSchemaCreator schemaCreator; + + @BeforeEach + void setUp() { + + mappingContext = new MongoMappingContext(); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + } + + @Test // DATAMONGO-1849 + void simpleTypes() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(VariousFieldTypes.class); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(Document.parse(VARIOUS_FIELD_TYPES)); + } + + @Test // DATAMONGO-1849 + void withRemappedIdType() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithExplicitMongoIdTypeMapping.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING); + } + + @Test // DATAMONGO-1849 + void cyclic() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(Cyclic.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(CYCLIC); + } + + @Test // DATAMONGO-1849 + void converterRegistered() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + MongoCustomConversions mcc = new MongoCustomConversions( + Collections.singletonList(SimpleToDocumentConverter.INSTANCE)); + converter.setCustomConversions(mcc); + converter.afterPropertiesSet(); + + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithNestedDomainType.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo( + "{ 'type' : 'object', 'properties' : { '_id' : { 'type' : 'object' }, 'nested' : { 'type' : 'object' } } }"); + } + + @Test // GH-3800 + void csfle/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Patient.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema.toBsonDocument()).isEqualTo(BsonDocument.parse(PATIENT)); + } + + @Test // GH-3800 + void csfleCyclic/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Cyclic.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isNotNull(); + } + + @Test // GH-3800 + void csfleWithKeyFromProperties() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromProperty.class); + + assertThat(schema.schemaDocument().toBsonDocument()).isEqualTo(BsonDocument.parse(ENC_FROM_PROPERTY_SCHEMA)); + } + + @Test // GH-3800 + void csfleWithKeyFromMethod() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromMethod.class); + + assertThat(schema.schemaDocument().toBsonDocument()).isEqualTo(BsonDocument.parse(ENC_FROM_METHOD_SCHEMA)); + } + + // --> Combining Schemas and Properties + + @Test // GH-3870 + void shouldAllowToSpecifyPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("objectValue").withTypes(A.class, B.class).createSchemaFor(SomeTestObject.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema) // + .containsEntry("properties.objectValue.properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void shouldAllowToSpecifyNestedPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("value.objectValue").withTypes(A.class, B.class) // + .createSchemaFor(WrapperAroundA.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.value.properties.objectValue.properties.aNonEncrypted", + new Document("type", "string")) // + .containsEntry("properties.value.properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.value.properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + + } + + @Test // GH-3870 + void shouldAllowToSpecifyGenericTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("genericValue").withTypes(A.class, B.class).createSchemaFor(SomeTestObject.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.genericValue.properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.genericValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.genericValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void encryptionFilterShouldCaptureSpecifiedPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("objectValue").withTypes(A.class, B.class) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(SomeTestObject.class); + + assertThat(schema.schemaDocument()) // + .doesNotContainKey("properties.objectValue.properties.aNonEncrypted") // + .containsEntry("properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void allowsToCreateCombinedSchemaWhenPropertiesDoNotOverlap() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create().mergedSchemaFor(A.class, B.class, C.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.bEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.cEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void combinedSchemaFailsOnPropertyClash() { + + MongoJsonSchema schemaA = MongoJsonSchemaCreator.create() // + .createSchemaFor(A.class); + MongoJsonSchema schemaAButDifferent = MongoJsonSchemaCreator.create() // + .createSchemaFor(PropertyClashWithA.class); + + MongoJsonSchema targetSchema = schemaA.mergeWith(schemaAButDifferent); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(targetSchema::schemaDocument); + } + + @Test // GH-3870 + void combinedSchemaAllowsToCompensateErrors() { + + MongoJsonSchema schemaA = MongoJsonSchemaCreator.create() // + .createSchemaFor(A.class); + MongoJsonSchema schemaAButDifferent = MongoJsonSchemaCreator.create() // + .createSchemaFor(PropertyClashWithA.class); + + MongoJsonSchema schema = schemaA.mergeWith(Collections.singleton(schemaAButDifferent), + (path, a, b) -> Resolution.ofValue(path, "object")); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.aNonEncrypted", new Document("type", "object")); + } + + @Test // GH-3870 + void bsonTypeVsJustTypeValueResolutionIsDoneByDefault() { + + MongoJsonSchema schemaUsingType = MongoJsonSchema.builder() + .property(JsonSchemaProperty.named("value").ofType(Type.jsonTypeOf("string"))).build(); + MongoJsonSchema schemaUsingBsonType = MongoJsonSchema.builder() + .property(JsonSchemaProperty.named("value").ofType(Type.bsonTypeOf("string"))).build(); + + MongoJsonSchema targetSchema = MongoJsonSchema.merge(schemaUsingType, schemaUsingBsonType); + + assertThat(targetSchema.schemaDocument()) // + .containsEntry("properties.value", new Document("type", "string")); + } + + @Test // GH-4454 + void wrapEncryptedEntityTypeLikeProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(WithEncryptedEntityLikeProperty.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.domainTypeValue", Document.parse("{'encrypt': {'bsonType': 'object' } }")); + } + + @Test // GH-4185 + void qeRangeEncryptedProperties() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(QueryableEncryptedRoot.class); + + String expectedForInt = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'int', + 'queries' : [ + { 'queryType' : 'range', 'contention' : { '$numberLong' : '0' }, 'max' : 200, 'min' : 0, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + String expectedForRootLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '0' }, 'sparsity' : 0 } + ] + }}"""; + + String expectedForNestedLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '1' }, 'max' : { '$numberLong' : '1' }, 'min' : { '$numberLong' : '-1' }, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + assertThat(schema.schemaDocument()) // + .doesNotContainKey("properties.unencrypted") // + .containsEntry("properties.encryptedInt", Document.parse(expectedForInt)) + .containsEntry("properties.encryptedLong", Document.parse(expectedForRootLong)) + .containsEntry("properties.nested.properties.encrypted_long", Document.parse(expectedForNestedLong)); + + } + + // --> TYPES AND JSON + + // --> ENUM + + private static final String JUST_SOME_ENUM = "{ 'type' : 'string', 'enum' : ['ONE', 'TWO'] }"; + + enum JustSomeEnum { + ONE, TWO + } + + // --> VARIOUS FIELD TYPES + + static final String VARIOUS_FIELD_TYPES = "" + // + "{" + // + " 'type' : 'object'," + // + " 'required' : ['primitiveInt']," + // + " 'properties' : {" + // + " 'id' : { 'type' : 'string' }," + // + " 're-named-property' : { 'type' : 'string' }," + // + " 'retypedProperty' : { 'bsonType' : 'javascript' }," + // + " 'primitiveInt' : { 'bsonType' : 'int' }," + // + " 'booleanProperty' : { 'type' : 'boolean' }," + // + " 'longProperty' : { 'bsonType' : 'long' }," + // + " 'intProperty' : { 'bsonType' : 'int' }," + // + " 'dateProperty' : { 'bsonType' : 'date' }," + // + " 'arrayProperty' : { 'type' : 'array' }," + // + " 'binaryDataProperty' : { 'bsonType' : 'binData' }," + // + " 'collectionProperty' : { 'type' : 'array' }," + // + " 'simpleTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'string' } }," + // + " 'complexTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'object', 'properties' : { 'field' : { 'type' : 'string'} } } }" + + // + " 'enumTypeCollectionProperty' : { 'type' : 'array', 'items' : " + JUST_SOME_ENUM + " }" + // + " 'mapProperty' : { 'type' : 'object' }," + // + " 'objectProperty' : { 'type' : 'object' }," + // + " 'enumProperty' : " + JUST_SOME_ENUM + " }" + // + "}"; + + static class VariousFieldTypes { + + @Field("id") String id; + @Field("re-named-property") String renamedProperty; + @Field(targetType = FieldType.SCRIPT) String retypedProperty; + @Transient String transientProperty; + int primitiveInt; + Boolean booleanProperty; + Long longProperty; + Integer intProperty; + Date dateProperty; + Object[] arrayProperty; + byte[] binaryDataProperty; + List collectionProperty; + List simpleTypeCollectionProperty; + List complexTypeCollectionProperty; + List enumTypeCollectionProperty; + Map mapProperty; + Object objectProperty; + JustSomeEnum enumProperty; + } + + static class SomeDomainType { + String field; + } + + // --> NESTED DOMAIN TYPE + + static final String WITH_NESTED_DOMAIN_TYPE = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'type' : 'object' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithNestedDomainType { + + String id; + VariousFieldTypes nested; + } + + // --> EXPLICIT MONGO_ID MAPPING + + final String WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'bsonType' : 'objectId' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithExplicitMongoIdTypeMapping { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + VariousFieldTypes nested; + } + + // --> OH NO - A CYCLIC PROPERTY RELATIONSHIP 😱 + + static final String CYCLIC_FIN = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }" + // + " 'cyclic' : { 'type' : 'object' }" + // + " }" + // + "}"; + + static final String CYCLIC_2 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested2' : { 'type' : 'string' }," + // + " 'cyclic' : " + CYCLIC_FIN + // + " }" + // + "}"; + + class Cyclic2 { + + String nested2; + Cyclic cyclic; + } + + static final String CYCLIC_1 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested1' : { 'type' : 'string' }," + // + " 'cyclic2' : " + CYCLIC_2 + // + " }" + // + "}"; + + class Cyclic1 { + + String nested1; + Cyclic2 cyclic2; + } + + static final String CYCLIC = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }," + // + " 'cyclic1' : " + CYCLIC_1 + // + " }" + // + "}"; + + class Cyclic { + + String root; + Cyclic1 cyclic1; + } + + @WritingConverter + enum SimpleToDocumentConverter + implements org.springframework.core.convert.converter.Converter { + INSTANCE; + + @Override + public org.bson.Document convert(VariousFieldTypes source) { + return null; + } + } + + static final String PATIENT = "{" + // + " 'type': 'object'," + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': 'xKVup8B1Q+CkHaVRx+qa+g=='," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'properties': {" + // + " 'ssn': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }," + // + " 'bloodType': {" + // + " 'encrypt': {" + // + " 'bsonType': 'string'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'medicalRecords': {" + // + " 'encrypt': {" + // + " 'bsonType': 'array'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'insurance': {" + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + static class Patient { + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer ssn; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + String bloodType; + + String keyAltNameField; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + List> medicalRecords; + + Insurance insurance; + } + + static class Insurance { + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_PROPERTY_ENTITY_KEY = "C5a5aMB7Ttq4wSJTFeRn8g=="; + static final String ENC_FROM_PROPERTY_PROPOERTY_KEY = "Mw6mdTVPQfm4quqSCLVB3g=="; + static final String ENC_FROM_PROPERTY_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{entityKey}") + static class EncryptionMetadataFromProperty { + + @Encrypted(keyId = "#{propertyKey}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_METHOD_ENTITY_KEY = "4fPYFM9qSgyRAjgQ2u+IMQ=="; + static final String ENC_FROM_METHOD_PROPOERTY_KEY = "+idiseKwTVCJfSKC3iUeYQ=="; + static final String ENC_FROM_METHOD_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}") + static class EncryptionMetadataFromMethod { + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + public static class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("entityKey", ENC_FROM_PROPERTY_ENTITY_KEY); + properties.put("propertyKey", ENC_FROM_PROPERTY_PROPOERTY_KEY); + return properties; + } + + @Override + public Map getFunctions() { + try { + return Collections.singletonMap("keyId", + new Function(EncryptionExtension.class.getMethod("keyId", String.class), this)); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + return Collections.emptyMap(); + } + + public String keyId(String target) { + + if (target.equals("EncryptionMetadataFromMethod")) { + return ENC_FROM_METHOD_ENTITY_KEY; + } + + if (target.equals("EncryptionMetadataFromMethod.policyNumber")) { + return ENC_FROM_METHOD_PROPOERTY_KEY; + } + + return "xKVup8B1Q+CkHaVRx+qa+g=="; + } + } + + private static final Document ENCRYPTED_BSON_STRING = Document + .parse("{'encrypt': { 'bsonType': 'string','algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'} }"); + + static class SomeTestObject { + T genericValue; + Object objectValue; + } + + static class RootWithGenerics { + S sValue; + T tValue; + } + + static class SubWithFixedGeneric extends RootWithGenerics { + + } + + static class Concrete extends SubWithFixedGeneric { + + } + + static class WrapperAroundA { + + SomeTestObject value; + } + + static class A { + + String aNonEncrypted; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String aEncrypted; + } + + static class B { + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String bEncrypted; + } + + static class C extends A { + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String cEncrypted; + } + + static class PropertyClashWithA { + Integer aNonEncrypted; + } + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + static class WithEncryptedEntityLikeProperty { + @Encrypted SomeDomainType domainTypeValue; + } + + static class QueryableEncryptedRoot { + + String unencrypted; + + @RangeEncrypted(contentionFactor = 0L, rangeOptions = "{ 'min': 0, 'max': 200, 'trimFactor': 1, 'sparsity': 1}") // + Integer encryptedInt; + + @Encrypted(algorithm = "Range") + @Queryable(contentionFactor = 0L, queryType = "range", queryAttributes = "{ 'sparsity': 0 }") // + Long encryptedLong; + + NestedRangeEncrypted nested; + + } + + static class NestedRangeEncrypted { + + @Field("encrypted_long") + @RangeEncrypted(contentionFactor = 1L, + rangeOptions = "{ 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }, 'trimFactor': 1, 'sparsity': 1}") // + Long encryptedLong; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Message.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Message.java deleted file mode 100644 index dc83a87ac3..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Message.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.Date; - -import org.bson.types.ObjectId; - -public class Message { - - private ObjectId id; - - private String text; - - private Date timestamp; - - public Message() { - } - - public Message(String text) { - super(); - this.text = text; - this.timestamp = new Date(); - } - - public Message(String text, Date timestamp) { - super(); - this.text = text; - this.timestamp = timestamp; - } - - public ObjectId getId() { - return id; - } - - public void setId(ObjectId id) { - this.id = id; - } - - public String getText() { - return text; - } - - public void setText(String text) { - this.text = text; - } - - public Date getTimestamp() { - return timestamp; - } - - public void setTimestamp(Date timestamp) { - this.timestamp = timestamp; - } - - @Override - public String toString() { - return "Message [id=" + id + ", text=" + text + ", timestamp=" + timestamp + "]"; - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java index 924742ee0b..f8a5c1128a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java @@ -1,60 +1,58 @@ -/* - * Copyright 2002-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import com.mongodb.CommandResult; -import com.mongodb.DB; -import com.mongodb.Mongo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * This test class assumes that you are already running the MongoDB server. - * - * @author Mark Pollack - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class MongoAdminIntegrationTests { - +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.client.MongoClient; + +/** + * This test class assumes that you are already running the MongoDB server. + * + * @author Mark Pollack + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:infrastructure.xml") +public class MongoAdminIntegrationTests { + private static final Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class); - - @SuppressWarnings("unused") - private DB testAdminDb; - - @Autowired - Mongo mongo; - - @Before - public void setUp() { - mongo.getDB("testAdminDb").dropDatabase(); - testAdminDb = mongo.getDB("testAdminDb"); - - } - - @Test - public void serverStats() { - // CommandResult result = testAdminDb.getStats(); - CommandResult result = mongo.getDB("admin").command("serverStatus"); - logger.info("stats = " + result); - } -} \ No newline at end of file + + @Autowired MongoClient mongoClient; + + MongoAdmin mongoAdmin; + + @Before + public void setUp() { + mongoAdmin = new MongoAdmin(mongoClient); + } + + @Test + public void serverStats() { + logger.info("stats = " + mongoAdmin.getServerStatus()); + } + + @Test + public void databaseStats() { + logger.info(mongoAdmin.getDatabaseStats("testAdminDb")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java new file mode 100644 index 0000000000..868190db5d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; + +/** + * Unit tests for {@link MongoClientFactoryBean}. + * + * @author Christoph Strobl + */ +class MongoClientFactoryBeanUnitTests { + + static final String CONNECTION_STRING_STRING = "mongodb://db1.example.net:27017,db2.example.net:2500/?replicaSet=test&connectTimeoutMS=300000"; + static final ConnectionString CONNECTION_STRING = new ConnectionString(CONNECTION_STRING_STRING); + + @Test // DATAMONGO-2427 + void connectionStringParametersNotOverriddenByDefaults() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setMongoClientSettings(MongoClientSettings.builder().build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("test"); + assertThat(settings.getSocketSettings().getConnectTimeout(TimeUnit.MILLISECONDS)).isEqualTo(300000); + assertThat(settings.getClusterSettings().getHosts()).hasSize(2); + } + + @Test // DATAMONGO-2427 + void hostPortParametersNotOverriddenByDefaults() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setPort(2500); + factoryBean.setHost("db2.example.net"); + factoryBean.setReplicaSet("rs0"); + factoryBean.setMongoClientSettings(MongoClientSettings.builder().build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("rs0"); + assertThat(settings.getClusterSettings().getHosts()).containsExactly(new ServerAddress("db2.example.net", 2500)); + } + + @Test // DATAMONGO-2427 + void explicitSettingsOverrideConnectionStringOnes() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setMongoClientSettings( + MongoClientSettings.builder().applyToClusterSettings(it -> it.requiredReplicaSetName("rs0")) + .applyToSocketSettings(it -> it.connectTimeout(100, TimeUnit.MILLISECONDS)).build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("rs0"); + assertThat(settings.getSocketSettings().getConnectTimeout(TimeUnit.MILLISECONDS)).isEqualTo(100); + assertThat(settings.getClusterSettings().getHosts()).hasSize(2); + } + + @Test // DATAMONGO-2427 + void hostAndPortPlusConnectionStringError() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setHost("localhost"); + factoryBean.setPort(27017); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(factoryBean::createInstance); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java similarity index 71% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java index 7bd5388d56..7c6e33ec5e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.data.mongodb.config.ReadPreferencePropertyEditor; @@ -27,19 +27,16 @@ import com.mongodb.ReadPreference; /** - * Integration tests for {@link MongoClientOptionsFactoryBean}. - * + * Integration tests for {@link MongoClientSettingsFactoryBean}. + * * @author Christoph Strobl */ -public class MongoClientOptionsFactoryBeanIntegrationTests { +public class MongoClientSettingsFactoryBeanIntegrationTests { - /** - * @see DATAMONGO-1158 - */ - @Test + @Test // DATAMONGO-1158 public void convertsReadPreferenceConcernCorrectly() { - RootBeanDefinition definition = new RootBeanDefinition(MongoClientOptionsFactoryBean.class); + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); definition.getPropertyValues().addPropertyValue("readPreference", "NEAREST"); DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); @@ -47,7 +44,7 @@ public void convertsReadPreferenceConcernCorrectly() { factory.registerBeanDefinition("factory", definition); - MongoClientOptionsFactoryBean bean = factory.getBean("&factory", MongoClientOptionsFactoryBean.class); - assertThat(ReflectionTestUtils.getField(bean, "readPreference"), is((Object) ReadPreference.nearest())); + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readPreference")).isEqualTo((Object) ReadPreference.nearest()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java new file mode 100644 index 0000000000..2ddaca7f24 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.data.mongodb.config.ReadConcernPropertyEditor; +import org.springframework.data.mongodb.config.ReadPreferencePropertyEditor; +import org.springframework.data.mongodb.config.UUidRepresentationPropertyEditor; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link MongoClientSettingsFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoClientSettingsFactoryBeanUnitTests { + + @Test // DATAMONGO-2384 + public void convertsReadPreferenceConcernCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("readPreference", "NEAREST"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, ReadPreferencePropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readPreference")).isEqualTo(ReadPreference.nearest()); + } + + @Test // DATAMONGO-2384 + public void convertsReadConcernConcernCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("readConcern", "MAJORITY"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, ReadConcernPropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readConcern")).isEqualTo(ReadConcern.MAJORITY); + } + + @Test // DATAMONGO-2427 + public void convertsUuidRepresentationCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("uUidRepresentation", "STANDARD"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, UUidRepresentationPropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "uUidRepresentation")).isEqualTo(UuidRepresentation.STANDARD); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsIntegrationTests.java deleted file mode 100644 index f2032acc57..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsIntegrationTests.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright 2012-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; - -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.springframework.dao.DataAccessException; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean; - -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoException; - -/** - * Integration tests for {@link MongoDbUtils}. - * - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class MongoDbUtilsIntegrationTests { - - static final String AUTHENTICATION_DATABASE_NAME = "admin"; - static final String DATABASE_NAME = "dbAuthTests"; - static final UserCredentials CREDENTIALS = new UserCredentials("admin", "admin"); - - static Mongo mongo; - static MongoTemplate template; - static ThreadPoolExecutorFactoryBean factory; - static ExecutorService service; - - Exception exception; - - @BeforeClass - public static void setUp() throws Exception { - - mongo = new MongoClient(); - template = new MongoTemplate(mongo, DATABASE_NAME); - - factory = new ThreadPoolExecutorFactoryBean(); - factory.setCorePoolSize(2); - factory.setMaxPoolSize(10); - factory.setWaitForTasksToCompleteOnShutdown(true); - factory.afterPropertiesSet(); - - service = factory.getObject(); - - assumeFalse(isMongo3Driver()); - } - - @AfterClass - public static void tearDown() { - - factory.destroy(); - - // Remove test database - - template.execute(new DbCallback() { - public Void doInDB(DB db) throws MongoException, DataAccessException { - db.dropDatabase(); - return null; - } - }); - } - - /** - * @see DATAMONGO-585 - */ - @Test - public void authenticatesCorrectlyInMultithreadedEnvironment() throws Exception { - - // Create sample user - template.execute(new DbCallback() { - public Void doInDB(DB db) throws MongoException, DataAccessException { - - ReflectiveDbInvoker.addUser(db, "admin", "admin".toCharArray()); - return null; - } - }); - - Callable callable = new Callable() { - public Void call() throws Exception { - - try { - DB db = MongoDbUtils.getDB(mongo, DATABASE_NAME, CREDENTIALS); - assertThat(db, is(notNullValue())); - } catch (Exception o_O) { - MongoDbUtilsIntegrationTests.this.exception = o_O; - } - - return null; - } - }; - - List> callables = new ArrayList>(); - - for (int i = 0; i < 10; i++) { - callables.add(callable); - } - - service.invokeAll(callables); - - if (exception != null) { - fail("Exception occurred!" + exception); - } - } - - /** - * @see DATAMONGO-789 - */ - @Test - public void authenticatesCorrectlyWithAuthenticationDB() throws Exception { - - // Create sample user - template.execute(new DbCallback() { - public Void doInDB(DB db) throws MongoException, DataAccessException { - - ReflectiveDbInvoker.addUser(db.getSisterDB("admin"), "admin", "admin".toCharArray()); - return null; - } - }); - - Callable callable = new Callable() { - public Void call() throws Exception { - - try { - DB db = MongoDbUtils.getDB(mongo, DATABASE_NAME, CREDENTIALS, AUTHENTICATION_DATABASE_NAME); - assertThat(db, is(notNullValue())); - } catch (Exception o_O) { - MongoDbUtilsIntegrationTests.this.exception = o_O; - } - - return null; - } - }; - - List> callables = new ArrayList>(); - - for (int i = 0; i < 10; i++) { - callables.add(callable); - } - - service.invokeAll(callables); - - if (exception != null) { - fail("Exception occurred!" + exception); - } - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsUnitTests.java deleted file mode 100644 index b7cf04196c..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoDbUtilsUnitTests.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright 2012-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.mockito.Matchers.*; -import static org.mockito.Mockito.*; - -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.CannotGetMongoDbConnectionException; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.transaction.support.TransactionSynchronization; -import org.springframework.transaction.support.TransactionSynchronizationManager; -import org.springframework.transaction.support.TransactionSynchronizationUtils; - -import com.mongodb.DB; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - -/** - * Unit tests for {@link MongoDbUtils}. - * - * @author Oliver Gierke - * @author Randy Watler - * @author Christoph Strobl - */ -@RunWith(MockitoJUnitRunner.class) -public class MongoDbUtilsUnitTests { - - @Mock Mongo mongo; - @Mock MongoClient mongoClientMock; - @Mock DB dbMock; - - @Before - public void setUp() throws Exception { - - when(mongo.getDB(anyString())).thenReturn(dbMock).thenReturn(mock(DB.class)); - when(mongoClientMock.getDB(anyString())).thenReturn(dbMock); - - TransactionSynchronizationManager.initSynchronization(); - } - - @After - public void tearDown() { - - for (Object key : TransactionSynchronizationManager.getResourceMap().keySet()) { - TransactionSynchronizationManager.unbindResource(key); - } - - TransactionSynchronizationManager.clearSynchronization(); - } - - @Test - public void returnsNewInstanceForDifferentDatabaseName() { - - DB first = MongoDbUtils.getDB(mongo, "first"); - DB second = MongoDbUtils.getDB(mongo, "second"); - assertThat(second, is(not(first))); - } - - @Test - public void returnsSameInstanceForSameDatabaseName() { - - DB first = MongoDbUtils.getDB(mongo, "first"); - assertThat(first, is(notNullValue())); - assertThat(MongoDbUtils.getDB(mongo, "first"), is(sameInstance(first))); - } - - /** - * @see DATAMONGO-737 - */ - @Test - public void handlesTransactionSynchronizationLifecycle() { - - // ensure transaction synchronization manager has no registered - // transaction synchronizations or bound resources at start of test - assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(true)); - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true)); - - // access database for one mongo instance, (registers transaction - // synchronization and binds transaction resource) - MongoDbUtils.getDB(mongo, "first"); - - // ensure transaction synchronization manager has registered - // transaction synchronizations and bound resources - assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(false)); - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(false)); - - // simulate transaction completion, (unbinds transaction resource) - try { - simulateTransactionCompletion(); - } catch (Exception e) { - fail("Unexpected exception thrown during transaction completion: " + e); - } - - // ensure transaction synchronization manager has no bound resources - // at end of test - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true)); - } - - /** - * @see DATAMONGO-737 - */ - @Test - public void handlesTransactionSynchronizationsLifecycle() { - - // ensure transaction synchronization manager has no registered - // transaction synchronizations or bound resources at start of test - assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(true)); - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true)); - - // access multiple databases for one mongo instance, (registers - // transaction synchronizations and binds transaction resources) - MongoDbUtils.getDB(mongo, "first"); - MongoDbUtils.getDB(mongo, "second"); - - // ensure transaction synchronization manager has registered - // transaction synchronizations and bound resources - assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(false)); - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(false)); - - // simulate transaction completion, (unbinds transaction resources) - try { - simulateTransactionCompletion(); - } catch (Exception e) { - fail("Unexpected exception thrown during transaction completion: " + e); - } - - // ensure transaction synchronization manager has no bound - // transaction resources at end of test - assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true)); - } - - /** - * @see DATAMONGO-1218 - */ - @Test - @SuppressWarnings("deprecation") - public void getDBDAuthenticateViaAuthDbWhenCalledWithMongoInstance() { - - assumeThat(MongoClientVersion.isMongo3Driver(), is(false)); - - when(dbMock.getName()).thenReturn("db"); - - try { - MongoDbUtils.getDB(mongo, "db", new UserCredentials("shallan", "davar"), "authdb"); - } catch (CannotGetMongoDbConnectionException e) { - // need to catch that one since we cannot answer the reflective call sufficiently - } - - verify(mongo, times(1)).getDB("authdb"); - } - - /** - * @see DATAMONGO-1218 - */ - @Test - @SuppressWarnings("deprecation") - public void getDBDShouldSkipAuthenticationViaAuthDbWhenCalledWithMongoClientInstance() { - - MongoDbUtils.getDB(mongoClientMock, "db", new UserCredentials("dalinar", "kholin"), "authdb"); - - verify(mongoClientMock, never()).getDB("authdb"); - } - - /** - * Simulate transaction rollback/commit completion protocol on managed transaction synchronizations which will unbind - * managed transaction resources. Does not swallow exceptions for testing purposes. - * - * @see TransactionSynchronizationUtils#triggerBeforeCompletion() - * @see TransactionSynchronizationUtils#triggerAfterCompletion(int) - */ - private void simulateTransactionCompletion() { - - // triggerBeforeCompletion() implementation without swallowed exceptions - List synchronizations = TransactionSynchronizationManager.getSynchronizations(); - for (TransactionSynchronization synchronization : synchronizations) { - synchronization.beforeCompletion(); - } - - // triggerAfterCompletion() implementation without swallowed exceptions - List remainingSynchronizations = TransactionSynchronizationManager - .getSynchronizations(); - if (remainingSynchronizations != null) { - for (TransactionSynchronization remainingSynchronization : remainingSynchronizations) { - remainingSynchronization.afterCompletion(TransactionSynchronization.STATUS_ROLLED_BACK); - } - } - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java new file mode 100644 index 0000000000..a45b099640 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.AutoEncryptionSettings; + +/** + * Integration tests for {@link MongoEncryptionSettingsFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoEncryptionSettingsFactoryBeanTests { + + @Test // DATAMONGO-2306 + public void createsAutoEncryptionSettings() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoEncryptionSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("bypassAutoEncryption", true); + definition.getPropertyValues().addPropertyValue("keyVaultNamespace", "ns"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoEncryptionSettingsFactoryBean bean = factory.getBean("&factory", MongoEncryptionSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "bypassAutoEncryption")).isEqualTo(true); + + AutoEncryptionSettings target = factory.getBean(AutoEncryptionSettings.class); + assertThat(target.getKeyVaultNamespace()).isEqualTo("ns"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java index 6f6fa5e766..9730e61e51 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,98 +15,106 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.assertj.core.api.Assertions.*; -import java.io.IOException; -import java.net.UnknownHostException; +import org.bson.BsonDocument; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; import org.springframework.core.NestedRuntimeException; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DuplicateKeyException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.InvalidDataAccessResourceUsageException; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.MongoTransactionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.lang.Nullable; import com.mongodb.MongoCursorNotFoundException; import com.mongodb.MongoException; import com.mongodb.MongoInternalException; import com.mongodb.MongoSocketException; +import com.mongodb.MongoSocketReadTimeoutException; +import com.mongodb.MongoSocketWriteException; import com.mongodb.ServerAddress; /** * Unit tests for {@link MongoExceptionTranslator}. - * + * * @author Michal Vich * @author Oliver Gierke * @author Christoph Strobl + * @author Brice Vandeputte */ -@RunWith(MockitoJUnitRunner.class) -public class MongoExceptionTranslatorUnitTests { - - MongoExceptionTranslator translator; +class MongoExceptionTranslatorUnitTests { - @Mock com.mongodb.DuplicateKeyException exception; - @Mock MongoSocketException socketException; - @Mock MongoCursorNotFoundException cursorNotFoundException; + private static final String EXCEPTION_MESSAGE = "IOException"; + private MongoExceptionTranslator translator; - @Before - public void setUp() { + @BeforeEach + void setUp() { translator = new MongoExceptionTranslator(); } @Test - public void translateDuplicateKey() { + void translateDuplicateKey() { - DataAccessException translatedException = translator.translateExceptionIfPossible(exception); - expectExceptionWithCauseMessage(translatedException, DuplicateKeyException.class, null); + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible( + new com.mongodb.DuplicateKeyException(new BsonDocument(), new ServerAddress(), null)), + DuplicateKeyException.class, null); } - @Test - public void translateSocketException() { + @Test // GH-3568 + void translateSocketException() { - when(socketException.getMessage()).thenReturn("IOException"); - when(socketException.getCause()).thenReturn(new IOException("IOException")); - DataAccessException translatedException = translator.translateExceptionIfPossible(socketException); + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoSocketException(EXCEPTION_MESSAGE, new ServerAddress())), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); + } - expectExceptionWithCauseMessage(translatedException, DataAccessResourceFailureException.class, "IOException"); + @Test // GH-3568 + void translateSocketExceptionSubclasses() { - } + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoSocketWriteException("intermediate message", + new ServerAddress(), new Exception(EXCEPTION_MESSAGE))), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); - @Test - public void translateCursorNotFound() throws UnknownHostException { + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoSocketReadTimeoutException("intermediate message", + new ServerAddress(), new Exception(EXCEPTION_MESSAGE))), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); - when(cursorNotFoundException.getCode()).thenReturn(1); - when(cursorNotFoundException.getServerAddress()).thenReturn(new ServerAddress()); + } - DataAccessException translatedException = translator.translateExceptionIfPossible(cursorNotFoundException); + @Test + void translateCursorNotFound() { - expectExceptionWithCauseMessage(translatedException, DataAccessResourceFailureException.class); + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoCursorNotFoundException(1L, new BsonDocument(), Mockito.mock(ServerAddress.class))), + DataAccessResourceFailureException.class); } @Test - public void translateToDuplicateKeyException() { + void translateToDuplicateKeyException() { checkTranslatedMongoException(DuplicateKeyException.class, 11000); checkTranslatedMongoException(DuplicateKeyException.class, 11001); } @Test - public void translateToDataAccessResourceFailureException() { + void translateToDataAccessResourceFailureException() { checkTranslatedMongoException(DataAccessResourceFailureException.class, 12000); checkTranslatedMongoException(DataAccessResourceFailureException.class, 13440); } @Test - public void translateToInvalidDataAccessApiUsageException() { + void translateToInvalidDataAccessApiUsageException() { checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 10003); checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 12001); @@ -116,7 +124,7 @@ public void translateToInvalidDataAccessApiUsageException() { } @Test - public void translateToUncategorizedMongoDbException() { + void translateToUncategorizedMongoDbException() { MongoException exception = new MongoException(0, ""); DataAccessException translatedException = translator.translateExceptionIfPossible(exception); @@ -125,7 +133,7 @@ public void translateToUncategorizedMongoDbException() { } @Test - public void translateMongoInternalException() { + void translateMongoInternalException() { MongoInternalException exception = new MongoInternalException("Internal exception"); DataAccessException translatedException = translator.translateExceptionIfPossible(exception); @@ -134,37 +142,81 @@ public void translateMongoInternalException() { } @Test - public void translateUnsupportedException() { + void translateUnsupportedException() { RuntimeException exception = new RuntimeException(); - assertThat(translator.translateExceptionIfPossible(exception), is(nullValue())); + assertThat(translator.translateExceptionIfPossible(exception)).isNull(); + } + + @Test // DATAMONGO-2045 + void translateSessionExceptions() { + + checkTranslatedMongoException(ClientSessionException.class, 206); + checkTranslatedMongoException(ClientSessionException.class, 213); + checkTranslatedMongoException(ClientSessionException.class, 228); + checkTranslatedMongoException(ClientSessionException.class, 264); + } + + @Test // DATAMONGO-2045 + void translateTransactionExceptions() { + + checkTranslatedMongoException(MongoTransactionException.class, 217); + checkTranslatedMongoException(MongoTransactionException.class, 225); + checkTranslatedMongoException(MongoTransactionException.class, 244); + checkTranslatedMongoException(MongoTransactionException.class, 251); + checkTranslatedMongoException(MongoTransactionException.class, 256); + checkTranslatedMongoException(MongoTransactionException.class, 257); + checkTranslatedMongoException(MongoTransactionException.class, 263); + checkTranslatedMongoException(MongoTransactionException.class, 267); + } + + @Test // DATAMONGO-2073 + public void translateTransientTransactionExceptions() { + + MongoException source = new MongoException(267, "PreparedTransactionInProgress"); + source.addLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL); + + expectExceptionWithCauseMessage(translator.translateExceptionIfPossible(source), + UncategorizedMongoDbException.class, + "PreparedTransactionInProgress"); + assertThat(translator.isTransientFailure(source)).isTrue(); + assertThat(translator.isTransientFailure(translator.translateExceptionIfPossible(source))).isTrue(); + } + + @Test // DATAMONGO-2073 + public void translateMongoExceptionWithTransientLabel() { + + MongoException exception = new MongoException(0, ""); + exception.addLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + DataAccessException translatedException = translator.translateExceptionIfPossible(exception); + + expectExceptionWithCauseMessage(translatedException, UncategorizedMongoDbException.class); } private void checkTranslatedMongoException(Class clazz, int code) { - try { - translator.translateExceptionIfPossible(new MongoException(code, "")); - fail("Expected exception of type " + clazz.getName() + "!"); - } catch (NestedRuntimeException e) { - Throwable cause = e.getRootCause(); - assertThat(cause, is(instanceOf(MongoException.class))); - assertThat(((MongoException) cause).getCode(), is(code)); - } + DataAccessException translated = translator.translateExceptionIfPossible(new MongoException(code, "")); + + assertThat(translated).as("Expected exception of type " + clazz.getName()).isNotNull(); + + Throwable cause = translated.getRootCause(); + assertThat(cause).isInstanceOf(MongoException.class); + assertThat(((MongoException) cause).getCode()).isEqualTo(code); } - private static void expectExceptionWithCauseMessage(NestedRuntimeException e, + private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e, Class type) { expectExceptionWithCauseMessage(e, type, null); } - private static void expectExceptionWithCauseMessage(NestedRuntimeException e, - Class type, String message) { + private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e, + Class type, @Nullable String message) { - assertThat(e, is(instanceOf(type))); + assertThat(e).isInstanceOf(type); if (message != null) { - assertThat(e.getRootCause(), is(notNullValue())); - assertThat(e.getRootCause().getMessage(), containsString(message)); + assertThat(e.getRootCause()).isNotNull(); + assertThat(e.getRootCause().getMessage()).contains(message); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoFactoryBeanIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoFactoryBeanIntegrationTests.java deleted file mode 100644 index 1d60455231..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoFactoryBeanIntegrationTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Test; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.beans.factory.support.RootBeanDefinition; -import org.springframework.data.mongodb.config.ServerAddressPropertyEditor; -import org.springframework.data.mongodb.config.WriteConcernPropertyEditor; -import org.springframework.test.util.ReflectionTestUtils; - -import com.mongodb.Mongo; -import com.mongodb.ServerAddress; -import com.mongodb.WriteConcern; - -/** - * Integration tests for {@link MongoFactoryBean}. - * - * @author Oliver Gierke - * @author Thomas Darimont - */ -public class MongoFactoryBeanIntegrationTests { - - /** - * @see DATAMONGO-408 - */ - @Test - public void convertsWriteConcernCorrectly() { - - RootBeanDefinition definition = new RootBeanDefinition(MongoFactoryBean.class); - definition.getPropertyValues().addPropertyValue("writeConcern", "SAFE"); - - DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); - factory.registerCustomEditor(WriteConcern.class, WriteConcernPropertyEditor.class); - factory.registerBeanDefinition("factory", definition); - - MongoFactoryBean bean = factory.getBean("&factory", MongoFactoryBean.class); - assertThat(ReflectionTestUtils.getField(bean, "writeConcern"), is((Object) WriteConcern.SAFE)); - } - - /** - * @see DATAMONGO-693 - */ - @Test - public void createMongoInstanceWithHostAndEmptyReplicaSets() { - - RootBeanDefinition definition = new RootBeanDefinition(MongoFactoryBean.class); - definition.getPropertyValues().addPropertyValue("host", "localhost"); - definition.getPropertyValues().addPropertyValue("replicaPair", ""); - - DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); - factory.registerCustomEditor(ServerAddress.class, ServerAddressPropertyEditor.class); - factory.registerBeanDefinition("factory", definition); - - Mongo mongo = factory.getBean(Mongo.class); - assertNotNull(mongo); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java index 9614330608..8b88552860 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,23 @@ */ package org.springframework.data.mongodb.core; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.DataAccessException; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.AbstractMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -34,20 +39,21 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.util.TypeInformation; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; import com.mongodb.DBRef; /** * Abstract base class for unit tests to specify behaviour we expect from {@link MongoOperations}. Subclasses return * instances of their implementation and thus can see if it correctly implements the {@link MongoOperations} interface. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public abstract class MongoOperationsUnitTests { @Mock CollectionCallback collectionCallback; @@ -57,7 +63,7 @@ public abstract class MongoOperationsUnitTests { Person person; List persons; - @Before + @BeforeEach public final void operationsSetUp() { person = new Person("Oliver"); @@ -65,12 +71,12 @@ public final void operationsSetUp() { converter = new AbstractMongoConverter(null) { - public void write(Object t, DBObject dbo) { - dbo.put("firstName", person.getFirstName()); + public void write(Object t, Bson bson) { + ((Document) bson).put("firstName", person.getFirstName()); } @SuppressWarnings("unchecked") - public S read(Class clazz, DBObject dbo) { + public S read(Class clazz, Bson bson) { return (S) person; } @@ -82,7 +88,7 @@ public Object convertToMongoType(Object obj, TypeInformation typeInformation) return null; } - public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { + public DBRef toDBRef(Object object, MongoPersistentProperty referringProperty) { return null; } @@ -90,26 +96,41 @@ public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { public MongoTypeMapper getTypeMapper() { return null; } + + @Override + public ProjectionFactory getProjectionFactory() { + return null; + } + + @Override + public CustomConversions getCustomConversions() { + return null; + } + + @Override + public R project(EntityProjection descriptor, Bson bson) { + return null; + } }; } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForCollectionCallback() { - - getOperations().execute("test", (CollectionCallback) null); + assertThatIllegalArgumentException().isThrownBy(() -> getOperations().execute("test", (CollectionCallback) null)); } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForCollectionCallback2() { - getOperations().execute("collection", (CollectionCallback) null); + assertThatIllegalArgumentException() + .isThrownBy(() -> getOperations().execute("collection", (CollectionCallback) null)); } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForDbCallback() { - getOperations().execute((DbCallback) null); + assertThatIllegalArgumentException().isThrownBy(() -> getOperations().execute((DbCallback) null)); } @Test @@ -137,7 +158,7 @@ public void convertsExceptionForCreateCollection2() { new Execution() { @Override public void doWith(MongoOperations operations) { - operations.createCollection("foo", new CollectionOptions(1, 1, true)); + operations.createCollection("foo", CollectionOptions.empty().size(1).maxDocuments(1).capped()); } }.assertDataAccessException(); } @@ -187,7 +208,7 @@ public void convertsExceptionForExecuteCommand() { new Execution() { @Override public void doWith(MongoOperations operations) { - operations.executeCommand(new BasicDBObject()); + operations.executeCommand(new Document()); } }.assertDataAccessException(); } @@ -199,17 +220,7 @@ public void convertsExceptionForExecuteStringCommand() { public void doWith(MongoOperations operations) { operations.executeCommand(""); } - }.assertDataAccessException(); - } - - @Test - public void convertsExceptionForExecuteInSession() { - new Execution() { - @Override - public void doWith(MongoOperations operations) { - operations.executeInSession(dbCallback); - } - }.assertDataAccessException(); + }.assertException(IllegalArgumentException.class); } @Test @@ -219,7 +230,7 @@ public void convertsExceptionForGetCollection() { public void doWith(MongoOperations operations) { operations.findAll(Object.class); } - }.assertDataAccessException(); + }.assertException(MappingException.class); } @Test @@ -302,10 +313,7 @@ public void doWith(MongoOperations operations) { }.assertDataAccessException(); } - /** - * @see DATAMONGO-341 - */ - @Test + @Test // DATAMONGO-341 public void geoNearRejectsNullNearQuery() { new Execution() { @@ -316,10 +324,7 @@ public void doWith(MongoOperations operations) { }.assertDataAccessException(); } - /** - * @see DATAMONGO-341 - */ - @Test + @Test // DATAMONGO-341 public void geoNearRejectsNullNearQueryifCollectionGiven() { new Execution() { @@ -330,10 +335,7 @@ public void doWith(MongoOperations operations) { }.assertDataAccessException(); } - /** - * @see DATAMONGO-341 - */ - @Test + @Test // DATAMONGO-341 public void geoNearRejectsNullEntityClass() { final NearQuery query = NearQuery.near(new Point(10, 20)); @@ -346,10 +348,7 @@ public void doWith(MongoOperations operations) { }.assertDataAccessException(); } - /** - * @see DATAMONGO-341 - */ - @Test + @Test // DATAMONGO-341 public void geoNearRejectsNullEntityClassIfCollectionGiven() { final NearQuery query = NearQuery.near(new Point(10, 20)); @@ -370,12 +369,7 @@ public void assertDataAccessException() { public void assertException(Class exception) { - try { - doWith(getOperationsForExceptionHandling()); - fail("Expected " + exception + " but completed without any!"); - } catch (Exception e) { - assertTrue("Expected " + exception + " but got " + e, exception.isInstance(e)); - } + assertThatThrownBy(() -> doWith(getOperationsForExceptionHandling())).isInstanceOf(exception); } public abstract void doWith(MongoOperations operations); @@ -384,14 +378,14 @@ public void assertException(Class exception) { /** * Expects an {@link MongoOperations} instance that will be used to check that invoking methods on it will only cause * {@link DataAccessException}s. - * + * * @return */ protected abstract MongoOperations getOperationsForExceptionHandling(); /** * Returns a plain {@link MongoOperations}. - * + * * @return */ protected abstract MongoOperations getOperations(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java deleted file mode 100644 index 19db55a393..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.core.ReflectiveMongoOptionsInvoker.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; - -import javax.net.ssl.SSLSocketFactory; - -import org.junit.BeforeClass; -import org.junit.Test; - -import com.mongodb.MongoOptions; - -/** - * Unit tests for {@link MongoOptionsFactoryBean}. - * - * @author Oliver Gierke - * @author Mike Saavedra - * @author Christoph Strobl - */ -@SuppressWarnings("deprecation") -public class MongoOptionsFactoryBeanUnitTests { - - @BeforeClass - public static void validateMongoDriver() { - assumeFalse(isMongo3Driver()); - } - - /** - * @throws Exception - * @see DATADOC-280 - */ - @Test - public void setsMaxConnectRetryTime() throws Exception { - - MongoOptionsFactoryBean bean = new MongoOptionsFactoryBean(); - bean.setMaxAutoConnectRetryTime(27); - bean.afterPropertiesSet(); - - MongoOptions options = bean.getObject(); - assertThat(getMaxAutoConnectRetryTime(options), is(27L)); - } - - /** - * @throws Exception - * @see DATAMONGO-764 - */ - @Test - public void testSslConnection() throws Exception { - - MongoOptionsFactoryBean bean = new MongoOptionsFactoryBean(); - bean.setSsl(true); - bean.afterPropertiesSet(); - - MongoOptions options = bean.getObject(); - assertNotNull(options.getSocketFactory()); - assertTrue(options.getSocketFactory() instanceof SSLSocketFactory); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java new file mode 100644 index 0000000000..cfa28e9314 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; + +/** + * Integration tests for {@link MongoServerApiFactoryBean}. + * + * @author Christoph Strobl + */ +class MongoServerApiFactoryBeanTests { + + @Test // GH-3820 + void createsServerApiForVersionString() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "V1"); + definition.getPropertyValues().addPropertyValue("deprecationErrors", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "deprecationErrors")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).contains(true); + assertThat(target.getStrict()).isNotPresent(); + } + + @Test // GH-3820 + void createsServerApiForVersionNumber() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "1"); + definition.getPropertyValues().addPropertyValue("strict", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "strict")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).isNotPresent(); + assertThat(target.getStrict()).contains(true); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java new file mode 100644 index 0000000000..deaffab4b2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java @@ -0,0 +1,148 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Collation.Alternate; +import org.springframework.data.mongodb.core.query.Collation.ComparisonLevel; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +public class MongoTemplateCollationTests { + + public static final String COLLECTION_NAME = "collation-1"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "collation-tests"; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + } + + @Autowired MongoTemplate template; + + @BeforeEach + public void setUp() { + template.dropCollection(COLLECTION_NAME); + } + + @Test // DATAMONGO-1518 + public void createCollectionWithCollation() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.just(Collation.of("en_US"))); + + Document collation = getCollationInfo(COLLECTION_NAME); + assertThat(collation.get("locale")).isEqualTo("en_US"); + } + + @Test // DATAMONGO-1518 + public void createCollectionWithCollationHavingLocaleVariant() { + + template.createCollection(COLLECTION_NAME, + CollectionOptions.just(Collation.of(new Locale("de", "AT", "phonebook")))); + + Document collation = getCollationInfo(COLLECTION_NAME); + assertThat(collation.get("locale")).isEqualTo("de_AT@collation=phonebook"); + } + + @Test // DATAMONGO-1518 + public void createCollectionWithCollationHavingStrength() { + + template.createCollection(COLLECTION_NAME, + CollectionOptions.just(Collation.of("en_US").strength(ComparisonLevel.primary().includeCase()))); + + Document collation = getCollationInfo(COLLECTION_NAME); + assertThat(collation.get("strength")).isEqualTo(1); + assertThat(collation.get("caseLevel")).isEqualTo(true); + } + + @Test // DATAMONGO-1518 + public void createCollectionWithCollationHavingBackwardsAndNumericOrdering() { + + template.createCollection(COLLECTION_NAME, + CollectionOptions.just(Collation.of("en_US").backwardDiacriticSort().numericOrderingEnabled())); + + Document collation = getCollationInfo(COLLECTION_NAME); + assertThat(collation.get("backwards")).isEqualTo(true); + assertThat(collation.get("numericOrdering")).isEqualTo(true); + } + + @Test // DATAMONGO-1518 + public void createCollationWithCollationHavingAlternate() { + + template.createCollection(COLLECTION_NAME, + CollectionOptions.just(Collation.of("en_US").alternate(Alternate.shifted().punct()))); + + Document collation = getCollationInfo(COLLECTION_NAME); + assertThat(collation.get("alternate")).isEqualTo("shifted"); + assertThat(collation.get("maxVariable")).isEqualTo("punct"); + } + + private Document getCollationInfo(String collectionName) { + return getCollectionInfo(collectionName).get("options", Document.class).get("collation", Document.class); + } + + private Document getCollectionInfo(String collectionName) { + + return template.execute(db -> { + + Document result = db.runCommand( + new Document().append("listCollections", 1).append("filter", new Document("name", collectionName))); + return (Document) result.get("cursor", Document.class).get("firstBatch", List.class).get(0); + }); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java new file mode 100644 index 0000000000..498bfec17a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java @@ -0,0 +1,656 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxy; +import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.model.Filters; + +/** + * {@link org.springframework.data.mongodb.core.mapping.DBRef} related integration tests for + * {@link org.springframework.data.mongodb.core.MongoTemplate}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +public class MongoTemplateDbRefTests { + + @Template(database = "mongo-template-dbref-tests", + initialEntitySet = { RefCycleLoadingIntoDifferentTypeRoot.class, + RefCycleLoadingIntoDifferentTypeIntermediate.class, RefCycleLoadingIntoDifferentTypeRootView.class, + WithDBRefOnRawStringId.class, WithLazyDBRefOnRawStringId.class, WithRefToAnotherDb.class, + WithLazyRefToAnotherDb.class, WithListRefToAnotherDb.class, WithLazyListRefToAnotherDb.class }) // + static MongoTestTemplate template; + + @Template(database = "mongo-template-dbref-tests-other-db", initialEntitySet = JustSomeType.class) // + static MongoTestTemplate otherDbTemplate; + + @BeforeEach + public void setUp() { + + template.flush(); + otherDbTemplate.flush(); + } + + @Test // DATAMONGO-1703 + public void shouldLoadRefIntoDifferentTypeCorrectly() { + + // init root + RefCycleLoadingIntoDifferentTypeRoot root = new RefCycleLoadingIntoDifferentTypeRoot(); + root.id = "root-1"; + root.content = "jon snow"; + template.save(root); + + // init one and set view id ref to root.id + RefCycleLoadingIntoDifferentTypeIntermediate intermediate = new RefCycleLoadingIntoDifferentTypeIntermediate(); + intermediate.id = "one-1"; + intermediate.refToRootView = new RefCycleLoadingIntoDifferentTypeRootView(); + intermediate.refToRootView.id = root.id; + + template.save(intermediate); + + // add one ref to root + root.refToIntermediate = intermediate; + template.save(root); + + RefCycleLoadingIntoDifferentTypeRoot loaded = template.findOne(query(where("id").is(root.id)), + RefCycleLoadingIntoDifferentTypeRoot.class); + + assertThat(loaded.content).isEqualTo("jon snow"); + assertThat(loaded.getRefToIntermediate()).isInstanceOf(RefCycleLoadingIntoDifferentTypeIntermediate.class); + assertThat(loaded.getRefToIntermediate().getRefToRootView()) + .isInstanceOf(RefCycleLoadingIntoDifferentTypeRootView.class); + assertThat(loaded.getRefToIntermediate().getRefToRootView().getContent()).isEqualTo("jon snow"); + } + + @Test // DATAMONGO-1798 + public void stringDBRefLoading() { + + RawStringId ref = new RawStringId(); + ref.id = new ObjectId().toHexString(); + ref.value = "new value"; + + template.save(ref); + + WithDBRefOnRawStringId source = new WithDBRefOnRawStringId(); + source.id = "foo"; + source.value = ref; + + template.save(source); + + org.bson.Document result = template + .execute(db -> (org.bson.Document) db.getCollection(template.getCollectionName(WithDBRefOnRawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("value")) + .isEqualTo(new com.mongodb.DBRef(template.getCollectionName(RawStringId.class), ref.getId())); + + WithDBRefOnRawStringId target = template.findOne(query(where("id").is(source.id)), WithDBRefOnRawStringId.class); + assertThat(target.value).isEqualTo(ref); + } + + @Test // DATAMONGO-1798 + public void stringDBRefLazyLoading() { + + RawStringId ref = new RawStringId(); + ref.id = new ObjectId().toHexString(); + ref.value = "new value"; + + template.save(ref); + + WithLazyDBRefOnRawStringId source = new WithLazyDBRefOnRawStringId(); + source.id = "foo"; + source.value = ref; + + template.save(source); + + org.bson.Document result = template.execute( + db -> (org.bson.Document) db.getCollection(template.getCollectionName(WithLazyDBRefOnRawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("value")) + .isEqualTo(new com.mongodb.DBRef(template.getCollectionName(RawStringId.class), ref.getId())); + + WithLazyDBRefOnRawStringId target = template.findOne(query(where("id").is(source.id)), + WithLazyDBRefOnRawStringId.class); + + assertThat(target.value).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getValue()).isEqualTo(ref); + } + + @Test // DATAMONGO-2223 + public void shouldResolveSingleDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + otherDbTemplate.insert(one); + + WithRefToAnotherDb source = new WithRefToAnotherDb(); + source.value = one; + + template.save(source); + + WithRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithRefToAnotherDb.class); + assertThat(target.getValue()).isEqualTo(one); + } + + @Test // DATAMONGO-2223 + public void shouldResolveSingleLazyDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + otherDbTemplate.insert(one); + + WithLazyRefToAnotherDb source = new WithLazyRefToAnotherDb(); + source.value = one; + + template.save(source); + + WithLazyRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithLazyRefToAnotherDb.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.value, false); + assertThat(target.getValue()).isEqualTo(one); + } + + @Test // DATAMONGO-2223 + public void shouldResolveListDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + otherDbTemplate.insertAll(Arrays.asList(one, two)); + + WithListRefToAnotherDb source = new WithListRefToAnotherDb(); + source.value = Arrays.asList(one, two); + + template.save(source); + + WithListRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithListRefToAnotherDb.class); + assertThat(target.getValue()).containsExactlyInAnyOrder(one, two); + } + + @Test // DATAMONGO-2223 + public void shouldResolveLazyListDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + otherDbTemplate.insertAll(Arrays.asList(one, two)); + + WithLazyListRefToAnotherDb source = new WithLazyListRefToAnotherDb(); + source.value = Arrays.asList(one, two); + + template.save(source); + + WithLazyListRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), + WithLazyListRefToAnotherDb.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.value, false); + assertThat(target.getValue()).containsExactlyInAnyOrder(one, two); + } + + @Test // GH-2191 + void shouldAllowToSliceCollectionOfDbRefs() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + template.insertAll(Arrays.asList(one, two)); + + WithCollectionDbRef source = new WithCollectionDbRef(); + source.refs = Arrays.asList(one, two); + + template.save(source); + + Query theQuery = query(where("id").is(source.id)); + theQuery.fields().slice("refs", 1, 1); + + WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class); + assertThat(target.getRefs()).containsExactly(two); + } + + @Test // GH-2191 + void shouldAllowToSliceCollectionOfLazyDbRefs() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + template.insertAll(Arrays.asList(one, two)); + + WithCollectionDbRef source = new WithCollectionDbRef(); + source.lazyrefs = Arrays.asList(one, two); + + template.save(source); + + Query theQuery = query(where("id").is(source.id)); + theQuery.fields().slice("lazyrefs", 1, 1); + + WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.lazyrefs, false); + assertThat(target.getLazyrefs()).containsExactly(two); + } + + @Document("cycle-with-different-type-root") + static class RefCycleLoadingIntoDifferentTypeRoot { + + @Id String id; + String content; + @DBRef RefCycleLoadingIntoDifferentTypeIntermediate refToIntermediate; + + public String getId() { + return this.id; + } + + public String getContent() { + return this.content; + } + + public RefCycleLoadingIntoDifferentTypeIntermediate getRefToIntermediate() { + return this.refToIntermediate; + } + + public void setId(String id) { + this.id = id; + } + + public void setContent(String content) { + this.content = content; + } + + public void setRefToIntermediate(RefCycleLoadingIntoDifferentTypeIntermediate refToIntermediate) { + this.refToIntermediate = refToIntermediate; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeRoot(id=" + this.getId() + ", content=" + + this.getContent() + ", refToIntermediate=" + this.getRefToIntermediate() + ")"; + } + } + + @Document("cycle-with-different-type-intermediate") + static class RefCycleLoadingIntoDifferentTypeIntermediate { + + @Id String id; + @DBRef RefCycleLoadingIntoDifferentTypeRootView refToRootView; + + public String getId() { + return this.id; + } + + public RefCycleLoadingIntoDifferentTypeRootView getRefToRootView() { + return this.refToRootView; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefToRootView(RefCycleLoadingIntoDifferentTypeRootView refToRootView) { + this.refToRootView = refToRootView; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeIntermediate(id=" + this.getId() + + ", refToRootView=" + this.getRefToRootView() + ")"; + } + } + + @Document("cycle-with-different-type-root") + static class RefCycleLoadingIntoDifferentTypeRootView { + + @Id String id; + String content; + + public String getId() { + return this.id; + } + + public String getContent() { + return this.content; + } + + public void setId(String id) { + this.id = id; + } + + public void setContent(String content) { + this.content = content; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeRootView(id=" + this.getId() + ", content=" + + this.getContent() + ")"; + } + } + + static class RawStringId { + + @MongoId String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDbRefTests.RawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithCollectionDbRef { + + @Id String id; + + @DBRef List refs; + + @DBRef(lazy = true) List lazyrefs; + + public String getId() { + return this.id; + } + + public List getRefs() { + return this.refs; + } + + public List getLazyrefs() { + return this.lazyrefs; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefs(List refs) { + this.refs = refs; + } + + public void setLazyrefs(List lazyrefs) { + this.lazyrefs = lazyrefs; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithCollectionDbRef(id=" + this.getId() + ", refs=" + this.getRefs() + + ", lazyrefs=" + this.getLazyrefs() + ")"; + } + } + + static class WithDBRefOnRawStringId { + + @Id String id; + @DBRef RawStringId value; + + public String getId() { + return this.id; + } + + public RawStringId getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(RawStringId value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithDBRefOnRawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyDBRefOnRawStringId { + + @Id String id; + @DBRef(lazy = true) RawStringId value; + + public String getId() { + return this.id; + } + + public RawStringId getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(RawStringId value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyDBRefOnRawStringId(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class WithRefToAnotherDb { + + @Id String id; + @DBRef(db = "mongo-template-dbref-tests-other-db") JustSomeType value; + + public String getId() { + return this.id; + } + + public JustSomeType getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(JustSomeType value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyRefToAnotherDb { + + @Id String id; + @DBRef(lazy = true, db = "mongo-template-dbref-tests-other-db") JustSomeType value; + + public String getId() { + return this.id; + } + + public JustSomeType getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(JustSomeType value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithListRefToAnotherDb { + + @Id String id; + @DBRef(db = "mongo-template-dbref-tests-other-db") List value; + + public String getId() { + return this.id; + } + + public List getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(List value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithListRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyListRefToAnotherDb { + + @Id String id; + @DBRef(lazy = true, db = "mongo-template-dbref-tests-other-db") List value; + + public String getId() { + return this.id; + } + + public List getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(List value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyListRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class JustSomeType { + + @Id String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + JustSomeType that = (JustSomeType) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDbRefTests.JustSomeType(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java new file mode 100644 index 0000000000..51b3b005a5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -0,0 +1,2315 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.model.Filters; + +/** + * {@link DocumentReference} related integration tests for {@link MongoTemplate}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateDocumentReferenceTests { + + public static final String DB_NAME = "document-reference-tests"; + + static @Client MongoClient client; + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureConversion(it -> { + it.customConverters(new ReferencableConverter(), new SimpleObjectRefWithReadingConverterToDocumentConverter(), + new DocumentToSimpleObjectRefWithReadingConverter()); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + }); + + @BeforeEach + public void setUp() { + template.flushDatabase(); + } + + @Test // GH-3602 + void writeSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.simpleValueRef = new SimpleObjectRef("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); + } + + @Test // GH-3782 + void writeTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customIdTargetRef = new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), + "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRef")).isEqualTo(expectedIdValue); + } + + @Test // GH-3602 + void writeMapTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.mapValueRef = new LinkedHashMap<>(); + source.mapValueRef.put("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + source.mapValueRef.put("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); + } + + @Test // GH-3782 + void writeMapOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefMap = Collections.singletonMap("frodo", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefMap", Map.class)).containsEntry("frodo", expectedIdValue); + } + + @Test // GH-3602 + void writeCollectionOfSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.simpleValueRef = Arrays.asList(new SimpleObjectRef("ref-1", "me-the-1-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); + } + + @Test // GH-3782 + void writeListOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefList = Collections.singletonList( + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefList", List.class)).containsExactly(expectedIdValue); + } + + @Test // GH-3602 + void writeObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.objectValueRef = new ObjectRefOfDocument("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef")).isEqualTo(source.getObjectValueRef().toReference()); + } + + @Test // GH-3602 + void writeCollectionOfObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.objectValueRef = Arrays.asList(new ObjectRefOfDocument("ref-1", "me-the-1-referenced-object"), + new ObjectRefOfDocument("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef", List.class)).containsExactly( + source.getObjectValueRef().get(0).toReference(), source.getObjectValueRef().get(1).toReference()); + } + + @Test // GH-3602 + void readSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionOfSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readLazySimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleLazyValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.simpleLazyValueRef, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getSimpleLazyValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readSimpleTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + new Document("id", "ref-1").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRef()).isEqualTo(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + Collections.singletonList(new Document("id", "ref-1").append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRef()) + .containsExactly(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()) + .isEqualTo(new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + Arrays.asList( + new Document("id", "ref-2").append("collection", "object-ref-of-document-with-embedded-collection-name"), + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()).containsExactly( + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-2", "me-the-2-referenced-object"), + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object")); + } + + @Test // GH-3602 + void useOrderFromAnnotatedSort() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document refSource3 = new Document("_id", "ref-3").append("value", "me-the-3-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleSortedValueRef", + Arrays.asList("ref-1", "ref-3", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(refCollectionName).insertOne(refSource3); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleSortedValueRef()).containsExactly( + new SimpleObjectRef("ref-3", "me-the-3-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object"), + new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readLazyObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("lazyObjectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.lazyObjectValueRefOnNonIdFields, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getLazyObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + Collections.singletonList(new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", + "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .containsExactly(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readMapOfReferences() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + Document refSource1 = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-1-referenced-object"); + + Document refSource2 = new Document("_id", "ref-2").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-2-referenced-object"); + + Map refmap = new LinkedHashMap<>(); + refmap.put("frodo", "ref-1"); + refmap.put("bilbo", "ref-2"); + + Document source = new Document("_id", "id-1").append("value", "v1").append("mapValueRef", refmap); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(source); + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + + assertThat(result.getMapValueRef()) + .containsEntry("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")) + .containsEntry("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3602 + void loadLazyCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + } + + @Test // GH-3602 + void loadEagerCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.eagerToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + assertThat(loadedA.getToB().eagerToA).isSameAs(loadedA); + } + + @Test // GH-3602 + void loadAndStoreUnresolvedLazyDoesNotResolveTheProxy() { + + String collectionB = template.getCollectionName(WithRefB.class); + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + template.save(loadedA.getToB()); + + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + Document target = template.execute(db -> { + return db.getCollection(collectionB).find(Filters.eq("_id", "b")).first(); + }); + assertThat(target.get("lazyToA", Object.class)).isEqualTo("a"); + } + + @Test // GH-3602 + void loadCollectionReferenceWithMissingRefs() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + // ref-1 is missing. + Document refSource = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3805 + void loadEmptyCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedValueRef", + Collections.emptyList()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3805 + void loadEmptyMapReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedMapRef", + new Document()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedMapRef).isEmpty(); + } + + @Test // GH-3805 + void loadNoExistingCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // no reference array at all + Document source = new Document("_id", "id-1").append("value", "v1"); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3806 + void resolveReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithRequiredArgsCtor source = new WithRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithRequiredArgsCtor.class); + assertThat(target.publisher).isNotNull(); + } + + @Test // GH-3806 + void resolveLazyReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithLazyRequiredArgsCtor source = new WithLazyRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithLazyRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithLazyRequiredArgsCtor.class); + + // proxy not yet resolved + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + // resolve the proxy by invoking a method on it + assertThat(target.getPublisher().getName()).isEqualTo("ppp"); + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + assertThat(proxy.isResolved()).isTrue(); + }); + } + + @Test // GH-3602 + void queryForReference() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB").is(b)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void queryForReferenceInCollection() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + Document shouldBeFound = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + Document shouldNotBeFound = new Document("_id", "id-2").append("value", "v2").append("simpleValueRef", + Arrays.asList("ref-1")); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(shouldBeFound); + db.getCollection(rootCollectionName).insertOne(shouldNotBeFound); + return null; + }); + + SimpleObjectRef objectRef = new SimpleObjectRef("ref-2", "some irrelevant value"); + + List loaded = template.query(CollectionRefRoot.class) + .matching(where("simpleValueRef").in(objectRef)).all(); + assertThat(loaded).map(CollectionRefRoot::getId).containsExactly("id-1"); + } + + @Test // GH-3602 + void queryForReferenceOnIdField() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB.id").is(b.id)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void updateReferenceWithEntityHavingPointerConversion() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", b)).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-3602 + void updateReferenceWithEntityWithoutPointerConversion() { + + String collectionName = template.getCollectionName(SingleRefRoot.class); + SingleRefRoot refRoot = new SingleRefRoot(); + refRoot.id = "root-1"; + + SimpleObjectRef ref = new SimpleObjectRef("ref-1", "me the referenced object"); + + template.save(refRoot); + + template.update(SingleRefRoot.class).apply(new Update().set("simpleValueRef", ref)).first(); + + Document target = template.execute(db -> { + return db.getCollection(collectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", "ref-1"); + } + + @Test // GH-3602 + void updateReferenceWithValue() { + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", "b")).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-4041 + void updateReferenceWithPushToCollection() { + + WithListOfRefs a = new WithListOfRefs(); + a.id = "a"; + template.save(a); + + WithListOfRefs b = new WithListOfRefs(); + b.id = "b"; + template.save(b); + + template.update(WithListOfRefs.class).matching(where("id").is(a.id)) + .apply(new Update().push("refs").each(new Object[] { b })).first(); + + String collection = template.getCollectionName(WithListOfRefs.class); + + Document target = template.execute(db -> { + return db.getCollection(collection).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("refs", Collections.singletonList("b")); + } + + @Test // GH-3782 + void updateReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + template.save(root); + + template.update(SingleRefRoot.class).apply(new Update().set("customIdTargetRef", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "b"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("customIdTargetRef", expectedIdValue); + } + + @Test // GH-3602 + void updateReferenceCollectionWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().push("simpleValueRef").value(new SimpleObjectRef("ref-2", "boys"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceCollectionWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().push("simpleValueRef").value("ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + @Disabled("Property path resolution does not work inside maps, the key is considered :/") + void updateReferenceMapWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().set("mapValueRef.rise", new SimpleObjectRef("ref-2", "against"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceMapWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().set("mapValueRef.rise", "ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void useReadingWriterConverterPairForLoading() { + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + root.withReadingConverter = new SimpleObjectRefWithReadingConverter("ref-1", "value-1"); + + template.save(root.withReadingConverter); + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(SingleRefRoot.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("withReadingConverter", + new Document("ref-key-from-custom-write-converter", root.withReadingConverter.id)); + + SingleRefRoot loaded = template.findOne(query(where("id").is(root.id)), SingleRefRoot.class); + assertThat(loaded.withReadingConverter).isInstanceOf(SimpleObjectRefWithReadingConverter.class); + } + + @Test // GH-3602 + void deriveMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + + template.save(book); + + template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)) + .first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void queryDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + book.publisher = publisher; + + Book result = template.findOne(query(where("publisher").is(publisher)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void allowsDirectUsageOfAtReference() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + root.publisher = publisher; + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + + UsingAtReference result = template.findOne(query(where("id").is(root.id)), UsingAtReference.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateWhenUsingAtReferenceDirectly() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + + template.save(root); + template.update(UsingAtReference.class).matching(where("id").is(root.id)).apply(new Update().set("publisher", publisher)).first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + } + + @Test // GH-3798 + void allowsOneToMayStyleLookupsUsingSelfVariable() { + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = "p-100"; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = "p-100"; + + template.save(book1); + template.save(book2); + template.save(book3); + + OneToManyStylePublisher publisher = new OneToManyStylePublisher(); + publisher.id = "p-100"; + + template.save(publisher); + + OneToManyStylePublisher target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisher.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); + } + + @Test // GH-3847 + void writeReferenceWithIdStringThatIsAnObjectId() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + ObjectId id = new ObjectId(); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.simpleValueRef = new SimpleObjectRef(id.toHexString(), "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef")).isEqualTo(id); + } + + @Test // GH-3847 + void readWithIdStringThatIsAnObjectId() { + + ObjectId id = new ObjectId(); + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", id).append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", id); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef(id.toHexString(), "me-the-referenced-object")); + } + + @Test // GH-3847 + void readWriteTypeReferenceHavingFixedStringIdTargetType() { + + ObjectId id = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + ObjectRefHavingStringIdTargetType customStringIdTargetRef = new ObjectRefHavingStringIdTargetType(id.toHexString(), + "me-the-referenced-object"); + template.save(customStringIdTargetRef); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customStringIdTargetRef = customStringIdTargetRef; + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customStringIdTargetRef")).isEqualTo(id.toHexString()); + + SingleRefRoot result = template.findOne(query(where("id").is("root-1")), SingleRefRoot.class); + assertThat(result.getCustomStringIdTargetRef()) + .isEqualTo(new ObjectRefHavingStringIdTargetType(id.toHexString(), "me-the-referenced-object")); + } + + @Test // GH-4484 + void resolveReferenceForOneToManyLookupWithSelfVariableWhenUsedInCtorArgument() { + + OneToManyStylePublisherWithRequiredArgsCtor publisher = new OneToManyStylePublisherWithRequiredArgsCtor("p-100", null); + template.save(publisher); + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = publisher.id; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = publisher.id; + + template.save(book1); + template.save(book2); + template.save(book3); + + OneToManyStylePublisherWithRequiredArgsCtor target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisherWithRequiredArgsCtor.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); + } + + static class SingleRefRoot { + + String id; + String value; + + @DocumentReference SimpleObjectRefWithReadingConverter withReadingConverter; + + @DocumentReference(lookup = "{ '_id' : ?#{#target} }") // + SimpleObjectRef simpleValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", lazy = true) // + SimpleObjectRef simpleLazyValueRef; + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + SimpleObjectRef simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + ObjectRefOfDocument objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "#collection") // + ObjectRefOfDocumentWithEmbeddedCollectionName objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + ObjectRefOnNonIdField objectValueRefOnNonIdFields; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }", lazy = true) // + ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields; + + @DocumentReference ObjectRefHavingCustomizedIdTargetType customIdTargetRef; + + @DocumentReference ObjectRefHavingStringIdTargetType customStringIdTargetRef; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public SimpleObjectRefWithReadingConverter getWithReadingConverter() { + return this.withReadingConverter; + } + + public SimpleObjectRef getSimpleValueRef() { + return this.simpleValueRef; + } + + public SimpleObjectRef getSimpleLazyValueRef() { + return this.simpleLazyValueRef; + } + + public SimpleObjectRef getSimpleValueRefWithAnnotatedFieldName() { + return this.simpleValueRefWithAnnotatedFieldName; + } + + public ObjectRefOfDocument getObjectValueRef() { + return this.objectValueRef; + } + + public ObjectRefOfDocumentWithEmbeddedCollectionName getObjectValueRefWithEmbeddedCollectionName() { + return this.objectValueRefWithEmbeddedCollectionName; + } + + public ObjectRefOnNonIdField getObjectValueRefOnNonIdFields() { + return this.objectValueRefOnNonIdFields; + } + + public ObjectRefOnNonIdField getLazyObjectValueRefOnNonIdFields() { + return this.lazyObjectValueRefOnNonIdFields; + } + + public ObjectRefHavingCustomizedIdTargetType getCustomIdTargetRef() { + return this.customIdTargetRef; + } + + public ObjectRefHavingStringIdTargetType getCustomStringIdTargetRef() { + return this.customStringIdTargetRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setWithReadingConverter(SimpleObjectRefWithReadingConverter withReadingConverter) { + this.withReadingConverter = withReadingConverter; + } + + public void setSimpleValueRef(SimpleObjectRef simpleValueRef) { + this.simpleValueRef = simpleValueRef; + } + + public void setSimpleLazyValueRef(SimpleObjectRef simpleLazyValueRef) { + this.simpleLazyValueRef = simpleLazyValueRef; + } + + public void setSimpleValueRefWithAnnotatedFieldName(SimpleObjectRef simpleValueRefWithAnnotatedFieldName) { + this.simpleValueRefWithAnnotatedFieldName = simpleValueRefWithAnnotatedFieldName; + } + + public void setObjectValueRef(ObjectRefOfDocument objectValueRef) { + this.objectValueRef = objectValueRef; + } + + public void setObjectValueRefWithEmbeddedCollectionName( + ObjectRefOfDocumentWithEmbeddedCollectionName objectValueRefWithEmbeddedCollectionName) { + this.objectValueRefWithEmbeddedCollectionName = objectValueRefWithEmbeddedCollectionName; + } + + public void setObjectValueRefOnNonIdFields(ObjectRefOnNonIdField objectValueRefOnNonIdFields) { + this.objectValueRefOnNonIdFields = objectValueRefOnNonIdFields; + } + + public void setLazyObjectValueRefOnNonIdFields(ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields) { + this.lazyObjectValueRefOnNonIdFields = lazyObjectValueRefOnNonIdFields; + } + + public void setCustomIdTargetRef(ObjectRefHavingCustomizedIdTargetType customIdTargetRef) { + this.customIdTargetRef = customIdTargetRef; + } + + public void setCustomStringIdTargetRef(ObjectRefHavingStringIdTargetType customStringIdTargetRef) { + this.customStringIdTargetRef = customStringIdTargetRef; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.SingleRefRoot(id=" + this.getId() + ", value=" + this.getValue() + + ", withReadingConverter=" + this.getWithReadingConverter() + ", simpleValueRef=" + this.getSimpleValueRef() + + ", simpleLazyValueRef=" + this.getSimpleLazyValueRef() + ", simpleValueRefWithAnnotatedFieldName=" + + this.getSimpleValueRefWithAnnotatedFieldName() + ", objectValueRef=" + this.getObjectValueRef() + + ", objectValueRefWithEmbeddedCollectionName=" + this.getObjectValueRefWithEmbeddedCollectionName() + + ", objectValueRefOnNonIdFields=" + this.getObjectValueRefOnNonIdFields() + + ", lazyObjectValueRefOnNonIdFields=" + this.getLazyObjectValueRefOnNonIdFields() + ", customIdTargetRef=" + + this.getCustomIdTargetRef() + ", customStringIdTargetRef=" + this.getCustomStringIdTargetRef() + ")"; + } + } + + static class CollectionRefRoot { + + String id; + String value; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRef; + + @DocumentReference + List simplePreinitializedValueRef = new ArrayList<>(); + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") // + List simpleSortedValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + Map mapValueRef; + + @DocumentReference // + Map simplePreinitializedMapRef = new LinkedHashMap<>(); + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + List objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") // + List objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + List objectValueRefOnNonIdFields; + + @DocumentReference List customIdTargetRefList; + + @DocumentReference Map customIdTargetRefMap; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public List getSimpleValueRef() { + return this.simpleValueRef; + } + + public List getSimplePreinitializedValueRef() { + return this.simplePreinitializedValueRef; + } + + public List getSimpleSortedValueRef() { + return this.simpleSortedValueRef; + } + + public Map getMapValueRef() { + return this.mapValueRef; + } + + public Map getSimplePreinitializedMapRef() { + return this.simplePreinitializedMapRef; + } + + public List getSimpleValueRefWithAnnotatedFieldName() { + return this.simpleValueRefWithAnnotatedFieldName; + } + + public List getObjectValueRef() { + return this.objectValueRef; + } + + public List getObjectValueRefWithEmbeddedCollectionName() { + return this.objectValueRefWithEmbeddedCollectionName; + } + + public List getObjectValueRefOnNonIdFields() { + return this.objectValueRefOnNonIdFields; + } + + public List getCustomIdTargetRefList() { + return this.customIdTargetRefList; + } + + public Map getCustomIdTargetRefMap() { + return this.customIdTargetRefMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setSimpleValueRef(List simpleValueRef) { + this.simpleValueRef = simpleValueRef; + } + + public void setSimplePreinitializedValueRef(List simplePreinitializedValueRef) { + this.simplePreinitializedValueRef = simplePreinitializedValueRef; + } + + public void setSimpleSortedValueRef(List simpleSortedValueRef) { + this.simpleSortedValueRef = simpleSortedValueRef; + } + + public void setMapValueRef(Map mapValueRef) { + this.mapValueRef = mapValueRef; + } + + public void setSimplePreinitializedMapRef(Map simplePreinitializedMapRef) { + this.simplePreinitializedMapRef = simplePreinitializedMapRef; + } + + public void setSimpleValueRefWithAnnotatedFieldName(List simpleValueRefWithAnnotatedFieldName) { + this.simpleValueRefWithAnnotatedFieldName = simpleValueRefWithAnnotatedFieldName; + } + + public void setObjectValueRef(List objectValueRef) { + this.objectValueRef = objectValueRef; + } + + public void setObjectValueRefWithEmbeddedCollectionName( + List objectValueRefWithEmbeddedCollectionName) { + this.objectValueRefWithEmbeddedCollectionName = objectValueRefWithEmbeddedCollectionName; + } + + public void setObjectValueRefOnNonIdFields(List objectValueRefOnNonIdFields) { + this.objectValueRefOnNonIdFields = objectValueRefOnNonIdFields; + } + + public void setCustomIdTargetRefList(List customIdTargetRefList) { + this.customIdTargetRefList = customIdTargetRefList; + } + + public void setCustomIdTargetRefMap(Map customIdTargetRefMap) { + this.customIdTargetRefMap = customIdTargetRefMap; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.CollectionRefRoot(id=" + this.getId() + ", value=" + this.getValue() + + ", simpleValueRef=" + this.getSimpleValueRef() + ", simplePreinitializedValueRef=" + + this.getSimplePreinitializedValueRef() + ", simpleSortedValueRef=" + this.getSimpleSortedValueRef() + + ", mapValueRef=" + this.getMapValueRef() + ", simplePreinitializedMapRef=" + + this.getSimplePreinitializedMapRef() + ", simpleValueRefWithAnnotatedFieldName=" + + this.getSimpleValueRefWithAnnotatedFieldName() + ", objectValueRef=" + this.getObjectValueRef() + + ", objectValueRefWithEmbeddedCollectionName=" + this.getObjectValueRefWithEmbeddedCollectionName() + + ", objectValueRefOnNonIdFields=" + this.getObjectValueRefOnNonIdFields() + ", customIdTargetRefList=" + + this.getCustomIdTargetRefList() + ", customIdTargetRefMap=" + this.getCustomIdTargetRefMap() + ")"; + } + } + + @FunctionalInterface + interface ReferenceAble { + Object toReference(); + } + + @org.springframework.data.mongodb.core.mapping.Document("simple-object-ref") + static class SimpleObjectRef { + + @Id String id; + String value; + + public SimpleObjectRef(String id, String value) { + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SimpleObjectRef that = (SimpleObjectRef) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.SimpleObjectRef(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class SimpleObjectRefWithReadingConverter extends SimpleObjectRef { + + public SimpleObjectRefWithReadingConverter(String id, String value) { + super(id, value); + } + + } + + static class ObjectRefOfDocument implements ReferenceAble { + + @Id String id; + String value; + + public ObjectRefOfDocument(String id, String value) { + this.id = id; + this.value = value; + } + + @Override + public Object toReference() { + return new Document("id", id).append("property", "without-any-meaning"); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOfDocument that = (ObjectRefOfDocument) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOfDocument(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class ObjectRefOfDocumentWithEmbeddedCollectionName implements ReferenceAble { + + @Id String id; + String value; + + public ObjectRefOfDocumentWithEmbeddedCollectionName(String id, String value) { + this.id = id; + this.value = value; + } + + @Override + public Object toReference() { + return new Document("id", id).append("collection", "object-ref-of-document-with-embedded-collection-name"); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOfDocumentWithEmbeddedCollectionName that = (ObjectRefOfDocumentWithEmbeddedCollectionName) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOfDocumentWithEmbeddedCollectionName(id=" + this.getId() + + ", value=" + this.getValue() + ")"; + } + } + + static class ObjectRefOnNonIdField implements ReferenceAble { + + @Id String id; + String value; + String refKey1; + String refKey2; + + public ObjectRefOnNonIdField(String id, String value, String refKey1, String refKey2) { + this.id = id; + this.value = value; + this.refKey1 = refKey1; + this.refKey2 = refKey2; + } + + @Override + public Object toReference() { + return new Document("refKey1", refKey1).append("refKey2", refKey2); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public String getRefKey1() { + return this.refKey1; + } + + public String getRefKey2() { + return this.refKey2; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setRefKey1(String refKey1) { + this.refKey1 = refKey1; + } + + public void setRefKey2(String refKey2) { + this.refKey2 = refKey2; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOnNonIdField that = (ObjectRefOnNonIdField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(refKey1, that.refKey1) + && Objects.equals(refKey2, that.refKey2); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, refKey1, refKey2); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOnNonIdField(id=" + this.getId() + ", value=" + + this.getValue() + ", refKey1=" + this.getRefKey1() + ", refKey2=" + this.getRefKey2() + ")"; + } + } + + static class ObjectRefHavingCustomizedIdTargetType { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + String name; + + public ObjectRefHavingCustomizedIdTargetType(String id, String name) { + this.id = id; + this.name = name; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefHavingCustomizedIdTargetType that = (ObjectRefHavingCustomizedIdTargetType) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefHavingCustomizedIdTargetType(id=" + this.getId() + ", name=" + + this.getName() + ")"; + } + } + + static class ObjectRefHavingStringIdTargetType { + + @MongoId(targetType = FieldType.STRING) String id; + String name; + + public ObjectRefHavingStringIdTargetType(String id, String name) { + this.id = id; + this.name = name; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefHavingStringIdTargetType that = (ObjectRefHavingStringIdTargetType) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefHavingStringIdTargetType(id=" + this.getId() + ", name=" + + this.getName() + ")"; + } + } + + static class ReferencableConverter implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(ReferenceAble source) { + return source::toReference; + } + } + + @WritingConverter + static class DocumentToSimpleObjectRefWithReadingConverter + implements Converter, SimpleObjectRefWithReadingConverter> { + + @Nullable + @Override + public SimpleObjectRefWithReadingConverter convert(DocumentPointer source) { + + Document document = client.getDatabase(DB_NAME).getCollection("simple-object-ref") + .find(Filters.eq("_id", source.getPointer().get("ref-key-from-custom-write-converter"))).first(); + return new SimpleObjectRefWithReadingConverter(document.getString("_id"), document.getString("value")); + } + } + + @WritingConverter + static class SimpleObjectRefWithReadingConverterToDocumentConverter + implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(SimpleObjectRefWithReadingConverter source) { + return () -> new Document("ref-key-from-custom-write-converter", source.getId()); + } + } + + static class WithRefA/* to B */ implements ReferenceAble { + + @Id String id; + @DocumentReference // + WithRefB toB; + + @Override + public Object toReference() { + return id; + } + + public String getId() { + return this.id; + } + + public WithRefB getToB() { + return this.toB; + } + + public void setId(String id) { + this.id = id; + } + + public void setToB(WithRefB toB) { + this.toB = toB; + } + } + + static class WithRefB/* to A */ implements ReferenceAble { + + @Id String id; + @DocumentReference(lazy = true) // + WithRefA lazyToA; + + @DocumentReference // + WithRefA eagerToA; + + @Override + public Object toReference() { + return id; + } + + public String getId() { + return this.id; + } + + public WithRefA getLazyToA() { + return this.lazyToA; + } + + public WithRefA getEagerToA() { + return this.eagerToA; + } + + public void setId(String id) { + this.id = id; + } + + public void setLazyToA(WithRefA lazyToA) { + this.lazyToA = lazyToA; + } + + public void setEagerToA(WithRefA eagerToA) { + this.eagerToA = eagerToA; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.WithRefB(id=" + this.getId() + ", lazyToA=" + this.getLazyToA() + + ", eagerToA=" + this.getEagerToA() + ")"; + } + } + + static class ReferencedObject {} + + class ToDocumentPointerConverter implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("", source); + } + } + + static class Book { + + String id; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") // + Publisher publisher; + + public String getId() { + return this.id; + } + + public Publisher getPublisher() { + return this.publisher; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisher(Publisher publisher) { + this.publisher = publisher; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.Book(id=" + this.getId() + ", publisher=" + this.getPublisher() + ")"; + } + } + + static class Publisher { + + String id; + String acronym; + String name; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + static class UsingAtReference { + + String id; + + @Reference // + Publisher publisher; + + public String getId() { + return this.id; + } + + public Publisher getPublisher() { + return this.publisher; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisher(Publisher publisher) { + this.publisher = publisher; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.UsingAtReference(id=" + this.getId() + ", publisher=" + + this.getPublisher() + ")"; + } + } + + static class OneToManyStyleBook { + + @Id + String id; + + private String publisherId; + + public String getId() { + return this.id; + } + + public String getPublisherId() { + return this.publisherId; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisherId(String publisherId) { + this.publisherId = publisherId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + OneToManyStyleBook that = (OneToManyStyleBook) o; + return Objects.equals(id, that.id) && Objects.equals(publisherId, that.publisherId); + } + + @Override + public int hashCode() { + return Objects.hash(id, publisherId); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStyleBook(id=" + this.getId() + ", publisherId=" + + this.getPublisherId() + ")"; + } + } + + static class OneToManyStylePublisher { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + + public String getId() { + return this.id; + } + + public List getBooks() { + return this.books; + } + + public void setId(String id) { + this.id = id; + } + + public void setBooks(List books) { + this.books = books; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStylePublisher(id=" + this.getId() + ", books=" + + this.getBooks() + ")"; + } + } + + static class WithRequiredArgsCtor { + + final String id; + + @DocumentReference final Publisher publisher; + + public WithRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + } + + static class WithLazyRequiredArgsCtor { + + final String id; + + @DocumentReference(lazy = true) final Publisher publisher; + + public WithLazyRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + + public String getId() { + return id; + } + + public Publisher getPublisher() { + return publisher; + } + } + + public static class WithListOfRefs { + + @Id private String id; + + @DocumentReference private List refs; + + public String getId() { + return this.id; + } + + public List getRefs() { + return this.refs; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefs(List refs) { + this.refs = refs; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.WithListOfRefs(id=" + this.getId() + ", refs=" + this.getRefs() + ")"; + } + } + + static class OneToManyStylePublisherWithRequiredArgsCtor { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + + public OneToManyStylePublisherWithRequiredArgsCtor(String id, List books) { + this.id = id; + this.books = books; + } + + public String getId() { + return this.id; + } + + public List getBooks() { + return this.books; + } + + public void setId(String id) { + this.id = id; + } + + public void setBooks(List books) { + this.books = books; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStylePublisherWithRequiredArgsCtor(id=" + this.getId() + ", book=" + + this.getBooks() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java new file mode 100644 index 0000000000..1cbb5ab519 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java @@ -0,0 +1,268 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Objects; +import java.util.function.Consumer; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression; +import org.springframework.data.mongodb.core.aggregation.StringOperators; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link org.springframework.data.mongodb.core.query.Field}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Giacomo Baso + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.4") +class MongoTemplateFieldProjectionTests { + + private static @Template MongoTestTemplate template; + + private Person luke; + + @BeforeEach + void beforeEach() { + + luke = new Person(); + luke.id = "luke"; + luke.firstname = "luke"; + luke.lastname = "skywalker"; + + template.save(luke); + } + + @AfterEach + void afterEach() { + template.flush(Person.class, Wrapper.class); + } + + @Test // GH-3583 + void usesMongoExpressionAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create("'$toUpper' : '$last_name'")) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void usesMongoExpressionWithPlaceholdersAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create("'$toUpper' : '$?0'", "last_name")) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-4821 + void usesMongoExpressionWithLineBreaksAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create(""" + { + '$toUpper' : '$last_name' + } + """)) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsAggregationExpressionToDomainType() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(StringOperators.valueOf("lastname").toUpper()).as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsAggregationSpELExpressionToDomainType() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(AggregationSpELExpression.expressionOf("toUpper(lastname)")).as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsNestedPathAggregationExpressionToDomainType() { + + Wrapper wrapper = new Wrapper(); + wrapper.id = "wrapper"; + wrapper.person = luke; + + template.save(wrapper); + + Query query = Query.query(Criteria.where("id").is(wrapper.id)); + query.fields().include("person.firstname", "person.id") + .project(StringOperators.valueOf("person.lastname").toUpper()).as("person.last_name"); + + Wrapper result = template.findOne(query, Wrapper.class); + assertThat(result.person).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsProjectionOnUnwrapped() { + + luke.address = new Address(); + luke.address.planet = "tatoine"; + + template.save(luke); + + Person result = findLuke(fields -> { + fields.project(StringOperators.valueOf("address.planet").toUpper()).as("planet"); + }); + + assertThat(result.address.planet).isEqualTo("TATOINE"); + } + + private Person findLuke(Consumer projection) { + + Query query = Query.query(Criteria.where("id").is(luke.id)); + projection.accept(query.fields()); + return template.findOne(query, Person.class); + } + + static class Wrapper { + + @Id String id; + Person person; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Wrapper wrapper = (Wrapper) o; + return Objects.equals(id, wrapper.id) && Objects.equals(person, wrapper.person); + } + + @Override + public int hashCode() { + return Objects.hash(id, person); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Wrapper(id=" + this.id + ", person=" + this.person + ")"; + } + } + + static class Person { + + @Id String id; + String firstname; + + @Field("last_name") // + String lastname; + + @Unwrapped.Nullable Address address; + + Person toUpperCaseLastnameClone(Person source) { + + Person target = new Person(); + target.id = source.id; + target.firstname = source.firstname; + target.lastname = source.lastname.toUpperCase(); + target.address = source.address; + + return target; + } + + Person upperCaseLastnameClone() { + return toUpperCaseLastnameClone(this); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(address, person.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, address); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Person(id=" + this.id + ", firstname=" + this.firstname + ", lastname=" + + this.lastname + ", address=" + this.address + ")"; + } + } + + static class Address { + + String planet; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(planet, address.planet); + } + + @Override + public int hashCode() { + return Objects.hash(planet); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Address(planet=" + this.planet + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java index ffd4034664..75fbbd516a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,45 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Arrays; +import java.util.Objects; + +import org.bson.Document; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.annotation.Id; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.geojson.Geometry; +import com.mongodb.client.model.geojson.MultiPolygon; +import com.mongodb.client.model.geojson.PolygonCoordinates; +import com.mongodb.client.model.geojson.Position; /** * Integration test for {@link MongoTemplate}. - * + * * @author Oliver Gierke * @author Thomas Risberg + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:template-mapping.xml") public class MongoTemplateMappingTests { - @Autowired - @Qualifier("mongoTemplate1") - MongoTemplate template1; - - @Autowired - @Qualifier("mongoTemplate2") - MongoTemplate template2; + @Autowired @Qualifier("mongoTemplate1") MongoTemplate template1; - @Rule - public ExpectedException thrown = ExpectedException.none(); + @Autowired @Qualifier("mongoTemplate2") MongoTemplate template2; @Before public void setUp() { @@ -62,19 +61,133 @@ public void setUp() { } @Test - public void insertsEntityCorrectly1() throws Exception { + public void insertsEntityCorrectly1() { addAndRetrievePerson(template1); checkPersonPersisted(template1); - } @Test - public void insertsEntityCorrectly2() throws Exception { + public void insertsEntityCorrectly2() { addAndRetrievePerson(template2); checkPersonPersisted(template2); + } + + @Test // DATAMONGO-2357 + public void writesAndReadsEntityWithNativeMongoGeoJsonTypesCorrectly() { + + WithMongoGeoJson source = new WithMongoGeoJson(); + source.id = "id-2"; + source.multiPolygon = new MultiPolygon(Arrays.asList(new PolygonCoordinates(Arrays.asList(new Position(0, 0), + new Position(0, 1), new Position(1, 1), new Position(1, 0), new Position(0, 0))))); + + template1.save(source); + + assertThat(template1.findOne(query(where("id").is(source.id)), WithMongoGeoJson.class)).isEqualTo(source); + } + + @Test // DATAMONGO-2357 + public void writesAndReadsEntityWithOpenNativeMongoGeoJsonTypesCorrectly() { + + WithOpenMongoGeoJson source = new WithOpenMongoGeoJson(); + source.id = "id-2"; + source.geometry = new MultiPolygon(Arrays.asList(new PolygonCoordinates(Arrays.asList(new Position(0, 0), + new Position(0, 1), new Position(1, 1), new Position(1, 0), new Position(0, 0))))); + + template1.save(source); + + assertThat(template1.findOne(query(where("id").is(source.id)), WithOpenMongoGeoJson.class)).isEqualTo(source); + } + + static class WithMongoGeoJson { + + @Id String id; + MultiPolygon multiPolygon; + public String getId() { + return this.id; + } + + public MultiPolygon getMultiPolygon() { + return this.multiPolygon; + } + + public void setId(String id) { + this.id = id; + } + + public void setMultiPolygon(MultiPolygon multiPolygon) { + this.multiPolygon = multiPolygon; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithMongoGeoJson that = (WithMongoGeoJson) o; + return Objects.equals(id, that.id) && Objects.equals(multiPolygon, that.multiPolygon); + } + + @Override + public int hashCode() { + return Objects.hash(id, multiPolygon); + } + + public String toString() { + return "MongoTemplateMappingTests.WithMongoGeoJson(id=" + this.getId() + ", multiPolygon=" + + this.getMultiPolygon() + ")"; + } + } + + static class WithOpenMongoGeoJson { + + @Id String id; + Geometry geometry; + + public WithOpenMongoGeoJson() {} + + public String getId() { + return this.id; + } + + public Geometry getGeometry() { + return this.geometry; + } + + public void setId(String id) { + this.id = id; + } + + public void setGeometry(Geometry geometry) { + this.geometry = geometry; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithOpenMongoGeoJson that = (WithOpenMongoGeoJson) o; + return Objects.equals(id, that.id) && Objects.equals(geometry, that.geometry); + } + + @Override + public int hashCode() { + return Objects.hash(id, geometry); + } + + public String toString() { + return "MongoTemplateMappingTests.WithOpenMongoGeoJson(id=" + this.getId() + ", geometry=" + this.getGeometry() + + ")"; + } } private void addAndRetrievePerson(MongoTemplate template) { @@ -83,15 +196,15 @@ private void addAndRetrievePerson(MongoTemplate template) { template.insert(person); Person result = template.findById(person.getId(), Person.class); - assertThat(result.getFirstName(), is("Oliver")); - assertThat(result.getAge(), is(25)); + assertThat(result.getFirstName()).isEqualTo("Oliver"); + assertThat(result.getAge()).isEqualTo(25); } private void checkPersonPersisted(MongoTemplate template) { template.execute(Person.class, new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - DBObject dbo = collection.findOne(); - assertThat((String) dbo.get("name"), is("Oliver")); + public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + Document document = collection.find(new Document()).first(); + assertThat((String) document.get("name")).isEqualTo("Oliver"); return null; } }); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java new file mode 100644 index 0000000000..6b8e158e55 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java @@ -0,0 +1,297 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.ReplaceOptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.bson.BsonInt64; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; +import com.mongodb.client.result.UpdateResult; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateReplaceTests { + + static final String DB_NAME = "mongo-template-replace-tests"; + static final String RESTAURANT_COLLECTION = "restaurant"; + + static @Client MongoClient client; + private MongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new MongoTemplate(client, DB_NAME); + template.setEntityLifecycleEventsEnabled(false); + + initTestData(); + } + + @AfterEach() + void afterEach() { + clearTestData(); + } + + @Test // GH-4462 + void replacesExistingDocument() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant("Central Pork Cafe", "Manhattan")); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesFirstOnMoreThanOneMatch() { + + UpdateResult result = template + .replace(query(where("violations").exists(true)), new Restaurant("Central Pork Cafe", "Manhattan")); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDoc() { + + UpdateResult result = template.replace(query(where("r-name").is("Central Perk Cafe")), + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), + template.getCollectionName(Restaurant.class)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDocMappingQueryAgainstDomainType() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), Restaurant.class, + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), ReplaceOptions.none(), template.getCollectionName(Restaurant.class)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithMatchingId() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(1L, "Central Pork Cafe", "Manhattan", 0)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithNewIdThrowsDataIntegrityViolationException() { + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(4L, "Central Pork Cafe", "Manhattan", 0))); + } + + @Test // GH-4462 + void doesNothingIfNoMatchFoundAndUpsertSetToFalse/* by default */() { + + UpdateResult result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(null, "Pizza Rat's Pizzaria", "Manhattan", 8)); + + assertThat(result.getMatchedCount()).isEqualTo(0); + assertThat(result.getModifiedCount()).isEqualTo(0); + + Document document = retrieve(collection -> collection.find(Filters.eq("r-name", "Pizza Rat's Pizzaria")).first()); + assertThat(document).isNull(); + } + + @Test // GH-4462 + void insertsIfNoMatchFoundAndUpsertSetToTrue() { + + UpdateResult result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(4L, "Pizza Rat's Pizzaria", "Manhattan", 8), replaceOptions().upsert()); + + assertThat(result.getMatchedCount()).isEqualTo(0); + assertThat(result.getModifiedCount()).isEqualTo(0); + assertThat(result.getUpsertedId()).isEqualTo(new BsonInt64(4L)); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 4)).first()); + assertThat(document).containsEntry("r-name", "Pizza Rat's Pizzaria"); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + void replaceConsidersSort() { + + UpdateResult result = template.replace(new Query().with(Sort.by(Direction.DESC, "name")), new Restaurant("resist", "Manhattan")); + + assertThat(result.getModifiedCount()).isOne(); + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()); + assertThat(document).containsEntry("r-name", "resist"); + } + + void initTestData() { + + List testData = Stream.of( // + "{ '_id' : 1, 'r-name' : 'Central Perk Cafe', 'Borough' : 'Manhattan' }", + "{ '_id' : 2, 'r-name' : 'Rock A Feller Bar and Grill', 'Borough' : 'Queens', 'violations' : 2 }", + "{ '_id' : 3, 'r-name' : 'Empire State Pub', 'Borough' : 'Brooklyn', 'violations' : 0 }") // + .map(Document::parse).collect(Collectors.toList()); + + doInCollection(collection -> collection.insertMany(testData)); + } + + void clearTestData() { + doInCollection(collection -> collection.deleteMany(new Document())); + } + + void doInCollection(Consumer> consumer) { + retrieve(collection -> { + consumer.accept(collection); + return "done"; + }); + } + + T retrieve(Function, T> fkt) { + return fkt.apply(client.getDatabase(DB_NAME).getCollection(RESTAURANT_COLLECTION)); + } + + @org.springframework.data.mongodb.core.mapping.Document(RESTAURANT_COLLECTION) + static class Restaurant { + + Long id; + + @Field("r-name") String name; + String borough; + Integer violations; + + Restaurant() {} + + Restaurant(String name, String borough) { + + this.name = name; + this.borough = borough; + } + + Restaurant(Long id, String name, String borough, Integer violations) { + + this.id = id; + this.name = name; + this.borough = borough; + this.violations = violations; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getRName() { + return name; + } + + public void setRName(String rName) { + this.name = rName; + } + + public String getBorough() { + return borough; + } + + public void setBorough(String borough) { + this.borough = borough; + } + + public int getViolations() { + return violations; + } + + public void setViolations(int violations) { + this.violations = violations; + } + + @Override + public String toString() { + return "Restaurant{" + "id=" + id + ", name='" + name + '\'' + ", borough='" + borough + '\'' + ", violations=" + + violations + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Restaurant that = (Restaurant) o; + return violations == that.violations && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(borough, that.borough); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, borough, violations); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java new file mode 100644 index 0000000000..766929c732 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java @@ -0,0 +1,582 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.lang.reflect.Proxy; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.MongoTemplateTests.PersonWithIdPropertyOfTypeUUIDListener; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link org.springframework.data.domain.Window} queries. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +class MongoTemplateScrollTests { + + static @Client MongoClient client; + + public static final String DB_NAME = "mongo-template-scroll-tests"; + + ConfigurableApplicationContext context = new GenericApplicationContext(); + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); + + cfg.configureAuditing(it -> { + it.auditingHandler(ctx -> { + return new IsNewAwareAuditingHandler(PersistentEntities.of(ctx)); + }); + }); + }); + + private static int compareProxies(PersonInterfaceProjection actual, PersonInterfaceProjection expected) { + if (actual.getAge() != expected.getAge()) { + return -1; + } + if (!ObjectUtils.nullSafeEquals(actual.getFirstName(), expected.getFirstName())) { + return -1; + } + + return 0; + } + + @BeforeEach + void setUp() { + template.remove(Person.class).all(); + template.remove(WithNestedDocument.class).all(); + template.remove(WithRenamedField.class).all(); + } + + @Test // GH-4308 + void shouldUseKeysetScrollingWithNestedSort() { + + WithNestedDocument john20 = new WithNestedDocument(null, "John", 120, new WithNestedDocument("John", 20), + new Document("name", "bar")); + WithNestedDocument john40 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 40), + new Document("name", "baz")); + WithNestedDocument john41 = new WithNestedDocument(null, "John", 141, new WithNestedDocument("John", 41), + new Document("name", "foo")); + + template.insertAll(Arrays.asList(john20, john40, john41)); + + Query q = new Query(where("name").regex("J.*")).with(Sort.by("nested.name", "nested.age", "document.name")) + .limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.scroll(q, WithNestedDocument.class); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(john20, john40); + + window = template.scroll(q.with(window.positionAt(window.size() - 1)), WithNestedDocument.class); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(john41); + } + + @Test // GH-4308 + void shouldErrorOnNullValueForQuery() { + + WithNestedDocument john20 = new WithNestedDocument(null, "John", 120, new WithNestedDocument("John", 20), + new Document("name", "bar")); + WithNestedDocument john40 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john41 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john42 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john43 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john44 = new WithNestedDocument(null, "John", 141, new WithNestedDocument("John", 41), + new Document("name", "foo")); + + template.insertAll(Arrays.asList(john20, john40, john41, john42, john43, john44)); + } + + @Test // GH-4308 + void shouldAllowReverseSort() { + + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")); + q.with(ScrollPosition.keyset()).limit(6); + + Window window = template.scroll(q, Person.class); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(6); + + KeysetScrollPosition scrollPosition = (KeysetScrollPosition) window.positionAt(window.size() - 2); + window = template.scroll(q.with(scrollPosition.backward()).limit(2), Person.class); + + assertThat(window).hasSize(2); + assertThat(window).containsOnly(jane_42, john20); + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + + window = template.scroll(q.with(window.positionAt(0)).limit(2), Person.class); + + assertThat(window).hasSize(2); + assertThat(window).containsOnly(jane_20, jane_40); + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + } + + @Test // GH-4413 + void shouldAllowInitialBackwardSort() { + + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")); + q.with(ScrollPosition.keyset().backward()).limit(3); + + Window window = template.scroll(q, Person.class); + assertThat(window).containsExactly(john20, john40_1, john40_2); + + window = template.scroll(q.with(window.positionAt(0)).limit(3), Person.class); + assertThat(window).containsExactly(jane_20, jane_40, jane_42); + } + + @ParameterizedTest // GH-4308 + @MethodSource("positions") + public void shouldApplyCursoringCorrectly(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter, @Nullable Comparator comparator) { + + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")).limit(2); + + Window window = template.query(Person.class).inCollection("person").as(resultType).matching(q) + .scroll(scrollPosition); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertWindow(window, comparator).containsOnly(assertionConverter.apply(jane_20), assertionConverter.apply(jane_40)); + + window = template.query(Person.class).inCollection("person").as(resultType).matching(q.limit(3)) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(3); + assertWindow(window, comparator).contains(assertionConverter.apply(jane_42), assertionConverter.apply(john20)); + assertWindow(window, comparator).containsAnyOf(assertionConverter.apply(john40_1), + assertionConverter.apply(john40_2)); + + window = template.query(Person.class).inCollection("person").as(resultType).matching(q.limit(1)) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertWindow(window, comparator).containsAnyOf(assertionConverter.apply(john40_1), + assertionConverter.apply(john40_2)); + } + + @ParameterizedTest // GH-4308 + @MethodSource("renamedFieldProjectTargets") + void scrollThroughResultsWithRenamedField(Class resultType, Function assertionConverter) { + + WithRenamedField one = new WithRenamedField("id-1", "v1", null); + WithRenamedField two = new WithRenamedField("id-2", "v2", null); + WithRenamedField three = new WithRenamedField("id-3", "v3", null); + + template.insertAll(Arrays.asList(one, two, three)); + + Query q = new Query(where("value").regex("v.*")).with(Sort.by(Sort.Direction.DESC, "value")).limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(ScrollPosition.keyset()); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(three), assertionConverter.apply(two)); + + window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(assertionConverter.apply(one)); + } + + static Stream positions() { + + return Stream.of(args(ScrollPosition.keyset(), Person.class, Function.identity()), // + args(ScrollPosition.keyset(), Document.class, MongoTemplateScrollTests::toDocument), // + args(ScrollPosition.offset(), Person.class, Function.identity()), // + args(ScrollPosition.offset(), PersonDtoProjection.class, MongoTemplateScrollTests::toPersonDtoProjection), // + args(ScrollPosition.offset(), PersonInterfaceProjection.class, + MongoTemplateScrollTests::toPersonInterfaceProjection, MongoTemplateScrollTests::compareProxies)); + } + + static Stream renamedFieldProjectTargets() { + return Stream.of(Arguments.of(WithRenamedField.class, Function.identity()), + Arguments.of(Document.class, new Function() { + @Override + public Document apply(WithRenamedField withRenamedField) { + return new Document("_id", withRenamedField.getId()).append("_val", withRenamedField.getValue()) + .append("_class", WithRenamedField.class.getName()); + } + })); + } + + static org.assertj.core.api.IterableAssert assertWindow(Window window, @Nullable Comparator comparator) { + return comparator != null ? assertThat(window).usingElementComparator(comparator) : assertThat(window); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + return args(scrollPosition, resultType, assertionConverter, null); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter, @Nullable Comparator comparator) { + return Arguments.of(scrollPosition, resultType, assertionConverter, comparator); + } + + static Document toDocument(Person person) { + + return new Document("_class", person.getClass().getName()).append("_id", person.getId()).append("active", true) + .append("firstName", person.getFirstName()).append("age", person.getAge()); + } + + static PersonDtoProjection toPersonDtoProjection(Person person) { + + PersonDtoProjection dto = new PersonDtoProjection(); + dto.firstName = person.getFirstName(); + dto.age = person.getAge(); + return dto; + } + + static PersonInterfaceProjection toPersonInterfaceProjection(Person person) { + + return new PersonInterfaceProjectionImpl(person); + } + + static class PersonDtoProjection { + + String firstName; + int age; + + public String getFirstName() { + return this.firstName; + } + + public int getAge() { + return this.age; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonDtoProjection that = (PersonDtoProjection) o; + return age == that.age && Objects.equals(firstName, that.firstName); + } + + @Override + public int hashCode() { + return Objects.hash(firstName, age); + } + + public String toString() { + return "MongoTemplateScrollTests.PersonDtoProjection(firstName=" + this.getFirstName() + ", age=" + this.getAge() + + ")"; + } + } + + interface PersonInterfaceProjection { + String getFirstName(); + + int getAge(); + } + + static class PersonInterfaceProjectionImpl implements PersonInterfaceProjection { + + final Person delegate; + + public PersonInterfaceProjectionImpl(Person delegate) { + this.delegate = delegate; + } + + @Override + public String getFirstName() { + return delegate.getFirstName(); + } + + @Override + public int getAge() { + return delegate.getAge(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof Proxy) { + return true; + } + return false; + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(delegate); + } + } + + static class WithRenamedField { + + String id; + + @Field("_val") String value; + + WithRenamedField nested; + + public WithRenamedField(String id, String value, WithRenamedField nested) { + this.id = id; + this.value = value; + this.nested = nested; + } + + public WithRenamedField() {} + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public WithRenamedField getNested() { + return this.nested; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setNested(WithRenamedField nested) { + this.nested = nested; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithRenamedField that = (WithRenamedField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(nested, that.nested); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, nested); + } + + public String toString() { + return "MongoTemplateScrollTests.WithRenamedField(id=" + this.getId() + ", value=" + this.getValue() + ", nested=" + + this.getNested() + ")"; + } + } + + class WithNestedDocument { + + String id; + + String name; + + int age; + + WithNestedDocument nested; + + Document document; + + public WithNestedDocument(String name, int age) { + this.name = name; + this.age = age; + } + + @PersistenceCreator + public WithNestedDocument(String id, String name, int age, WithNestedDocument nested, Document document) { + this.id = id; + this.name = name; + this.age = age; + this.nested = nested; + this.document = document; + } + + public WithNestedDocument() {} + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public int getAge() { + return this.age; + } + + public WithNestedDocument getNested() { + return this.nested; + } + + public Document getDocument() { + return this.document; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setAge(int age) { + this.age = age; + } + + public void setNested(WithNestedDocument nested) { + this.nested = nested; + } + + public void setDocument(Document document) { + this.document = document; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithNestedDocument that = (WithNestedDocument) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(nested, that.nested) && Objects.equals(document, that.document); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, age, nested, document); + } + + public String toString() { + return "MongoTemplateScrollTests.WithNestedDocument(id=" + this.getId() + ", name=" + this.getName() + ", age=" + + this.getAge() + ", nested=" + this.getNested() + ", document=" + this.getDocument() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index 86ce2a08b6..6aaec4011e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,32 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.core.ReflectiveWriteConcernInvoker.*; -import static org.springframework.data.mongodb.core.ReflectiveWriteResultInvoker.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.core.query.Update.*; +import java.lang.reflect.InvocationTargetException; +import java.math.BigDecimal; import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.bson.Document; import org.bson.types.ObjectId; -import org.joda.time.DateTime; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.convert.converter.Converter; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataIntegrityViolationException; @@ -50,51 +48,74 @@ import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.annotation.Version; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.context.PersistentEntities; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.convert.CustomConversions; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators; +import org.springframework.data.mongodb.core.aggregation.ObjectOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.StringOperators; import org.springframework.data.mongodb.core.convert.LazyLoadingProxy; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.Index; -import org.springframework.data.mongodb.core.index.Index.Duplicates; import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.util.CloseableIterator; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.lang.Nullable; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.util.ClassUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBObject; -import com.mongodb.CommandResult; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.mongodb.DBRef; -import com.mongodb.Mongo; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; -import com.mongodb.WriteResult; +import com.mongodb.client.FindIterable; +import com.mongodb.client.ListIndexesIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Integration test for {@link MongoTemplate}. - * + * * @author Oliver Gierke * @author Thomas Risberg * @author Amol Nayak @@ -102,103 +123,79 @@ * @author Thomas Darimont * @author Komi Innocent * @author Christoph Strobl + * @author Mark Paluch + * @author Laszlo Csontos + * @author duozhilin + * @author Jakub Zurawa + * @author Florian Lüdiger */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoClientExtension.class) public class MongoTemplateTests { - private static final org.springframework.data.util.Version TWO_DOT_FOUR = org.springframework.data.util.Version - .parse("2.4"); - private static final org.springframework.data.util.Version TWO_DOT_EIGHT = org.springframework.data.util.Version - .parse("2.8"); + public static final String DB_NAME = "mongo-template-tests"; - @Autowired MongoTemplate template; - @Autowired MongoDbFactory factory; + static @Client MongoClient client; - MongoTemplate mappingTemplate; - org.springframework.data.util.Version mongoVersion; + ConfigurableApplicationContext context = new GenericApplicationContext(); - @Rule public ExpectedException thrown = ExpectedException.none(); + MongoTestTemplate template = new MongoTestTemplate(cfg -> { - @Autowired - public void setMongo(Mongo mongo) throws Exception { + cfg.configureDatabaseFactory(it -> { - CustomConversions conversions = new CustomConversions(Arrays.asList(DateToDateTimeConverter.INSTANCE, - DateTimeToDateConverter.INSTANCE)); + it.client(client); + it.defaultDb(DB_NAME); + }); - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(new HashSet>(Arrays.asList(PersonWith_idPropertyOfTypeObjectId.class, - PersonWith_idPropertyOfTypeString.class, PersonWithIdPropertyOfTypeObjectId.class, - PersonWithIdPropertyOfTypeString.class, PersonWithIdPropertyOfTypeInteger.class, - PersonWithIdPropertyOfTypeBigInteger.class, PersonWithIdPropertyOfPrimitiveInt.class, - PersonWithIdPropertyOfTypeLong.class, PersonWithIdPropertyOfPrimitiveLong.class))); - mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); - mappingContext.initialize(); + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + it.initialEntitySet(AuditablePerson.class); + }); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); - MappingMongoConverter mappingConverter = new MappingMongoConverter(dbRefResolver, mappingContext); - mappingConverter.setCustomConversions(conversions); - mappingConverter.afterPropertiesSet(); + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); - this.mappingTemplate = new MongoTemplate(factory, mappingConverter); - } + cfg.configureAuditing(it -> { + it.auditingHandler(ctx -> { + return new IsNewAwareAuditingHandler(PersistentEntities.of(ctx)); + }); + }); + }); - @Before - public void setUp() { - cleanDb(); - queryMongoVersionIfNecessary(); - } + MongoTestTemplate mappingTemplate = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureConversion(it -> { + it.customConverters(DateToDateTimeConverter.INSTANCE, DateTimeToDateConverter.INSTANCE); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(new GenericApplicationContext()); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); + }); + + MongoDatabaseFactory factory = template.getMongoDatabaseFactory(); - @After + @AfterEach public void cleanUp() { - cleanDb(); - } - private void queryMongoVersionIfNecessary() { + template.flush(); + template.flush("collection", "personX", "findandreplace"); - if (mongoVersion == null) { - CommandResult result = template.executeCommand("{ buildInfo: 1 }"); - mongoVersion = org.springframework.data.util.Version.parse(result.get("version").toString()); - } - } + mappingTemplate.flush(); - protected void cleanDb() { template.dropCollection(Person.class); - template.dropCollection(PersonWithAList.class); - template.dropCollection(PersonWith_idPropertyOfTypeObjectId.class); - template.dropCollection(PersonWith_idPropertyOfTypeString.class); - template.dropCollection(PersonWithIdPropertyOfTypeObjectId.class); - template.dropCollection(PersonWithIdPropertyOfTypeString.class); - template.dropCollection(PersonWithIdPropertyOfTypeInteger.class); - template.dropCollection(PersonWithIdPropertyOfTypeBigInteger.class); - template.dropCollection(PersonWithIdPropertyOfPrimitiveInt.class); - template.dropCollection(PersonWithIdPropertyOfTypeLong.class); - template.dropCollection(PersonWithIdPropertyOfPrimitiveLong.class); - template.dropCollection(PersonWithVersionPropertyOfTypeInteger.class); - template.dropCollection(TestClass.class); - template.dropCollection(Sample.class); - template.dropCollection(MyPerson.class); - template.dropCollection(TypeWithFieldAnnotation.class); - template.dropCollection(TypeWithDate.class); - template.dropCollection("collection"); - template.dropCollection("personX"); - template.dropCollection(Document.class); - template.dropCollection(ObjectWith3AliasedFields.class); - template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class); - template.dropCollection(BaseDoc.class); - template.dropCollection(ObjectWithEnumValue.class); - template.dropCollection(DocumentWithCollection.class); - template.dropCollection(DocumentWithCollectionOfSimpleType.class); - template.dropCollection(DocumentWithMultipleCollections.class); - template.dropCollection(DocumentWithNestedCollection.class); - template.dropCollection(DocumentWithEmbeddedDocumentWithCollection.class); - template.dropCollection(DocumentWithNestedList.class); - template.dropCollection(DocumentWithDBRefCollection.class); - template.dropCollection(SomeContent.class); - template.dropCollection(SomeTemplate.class); - template.dropCollection(Address.class); - template.dropCollection(DocumentWithCollectionOfSamples.class); - template.dropCollection(WithGeoJson.class); } @Test @@ -209,8 +206,8 @@ public void insertsSimpleEntityCorrectly() throws Exception { template.insert(person); List result = template.find(new Query(Criteria.where("_id").is(person.getId())), Person.class); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(person)); + assertThat(result.size()).isEqualTo(1); + assertThat(result).contains(person); } @Test @@ -228,10 +225,7 @@ public void bogusUpdateDoesNotTriggerException() throws Exception { mongoTemplate.updateFirst(q, u, Person.class); } - /** - * @see DATAMONGO-480 - */ - @Test + @Test // DATAMONGO-480 public void throwsExceptionForDuplicateIds() { MongoTemplate template = new MongoTemplate(factory); @@ -244,17 +238,13 @@ public void throwsExceptionForDuplicateIds() { try { template.insert(person); - fail("Expected DataIntegrityViolationException!"); + fail("Expected DataIntegrityViolationException"); } catch (DataIntegrityViolationException e) { - assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + assertThat(e.getMessage()).contains("E11000 duplicate key error"); } } - /** - * @see DATAMONGO-480 - * @see DATAMONGO-799 - */ - @Test + @Test // DATAMONGO-480, DATAMONGO-799 public void throwsExceptionForUpdateWithInvalidPushOperator() { MongoTemplate template = new MongoTemplate(factory); @@ -266,20 +256,15 @@ public void throwsExceptionForUpdateWithInvalidPushOperator() { template.insert(person); - thrown.expect(DataIntegrityViolationException.class); - thrown.expectMessage("array"); - thrown.expectMessage("age"); - thrown.expectMessage("failed"); - Query query = new Query(Criteria.where("firstName").is("Amol")); Update upd = new Update().push("age", 29); - template.updateFirst(query, upd, Person.class); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.updateFirst(query, upd, Person.class)).withMessageContaining("array") + .withMessageContaining("age"); } - /** - * @see DATAMONGO-480 - */ - @Test + @Test // DATAMONGO-480 public void throwsExceptionForIndexViolationIfConfigured() { MongoTemplate template = new MongoTemplate(factory); @@ -296,21 +281,15 @@ public void throwsExceptionForIndexViolationIfConfigured() { try { template.save(person); - fail("Expected DataIntegrityViolationException!"); + fail("Expected DataIntegrityViolationException"); } catch (DataIntegrityViolationException e) { - assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + assertThat(e.getMessage()).contains("E11000 duplicate key error"); } } - /** - * @see DATAMONGO-480 - */ - @Test + @Test // DATAMONGO-480 public void rejectsDuplicateIdInInsertAll() { - thrown.expect(DataIntegrityViolationException.class); - thrown.expectMessage("E11000 duplicate key error"); - MongoTemplate template = new MongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); @@ -318,15 +297,35 @@ public void rejectsDuplicateIdInInsertAll() { Person person = new Person(id, "Amol"); person.setAge(28); - List records = new ArrayList(); + List records = new ArrayList<>(); records.add(person); records.add(person); - template.insertAll(records); + assertThatExceptionOfType(DataIntegrityViolationException.class).isThrownBy(() -> template.insertAll(records)) + .withMessageContaining("E11000 duplicate key error"); + } + + @Test // DATAMONGO-1687 + public void createCappedCollection() { + + template.createCollection(Person.class, CollectionOptions.empty().capped().size(1000).maxDocuments(1000)); + + org.bson.Document collectionOptions = getCollectionInfo(template.getCollectionName(Person.class)).get("options", + org.bson.Document.class); + assertThat(collectionOptions.get("capped")).isEqualTo(true); + } + + private org.bson.Document getCollectionInfo(String collectionName) { + + return template.execute(db -> { + + org.bson.Document result = db.runCommand(new org.bson.Document().append("listCollections", 1).append("filter", + new org.bson.Document("name", collectionName))); + return (org.bson.Document) result.get("cursor", org.bson.Document.class).get("firstBatch", List.class).get(0); + }); } @Test - @SuppressWarnings("deprecation") public void testEnsureIndex() throws Exception { Person p1 = new Person("Oliver"); @@ -336,84 +335,77 @@ public void testEnsureIndex() throws Exception { p2.setAge(40); template.insert(p2); - template.indexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique(Duplicates.DROP)); + template.indexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique()); + + MongoCollection coll = template.getCollection(template.getCollectionName(Person.class)); + List indexInfo = new ArrayList<>(); + coll.listIndexes().into(indexInfo); - DBCollection coll = template.getCollection(template.getCollectionName(Person.class)); - List indexInfo = coll.getIndexInfo(); - assertThat(indexInfo.size(), is(2)); - String indexKey = null; + assertThat(indexInfo.size()).isEqualTo(2); + Object indexKey = null; boolean unique = false; - boolean dropDupes = false; - for (DBObject ix : indexInfo) { + for (org.bson.Document ix : indexInfo) { + if ("age_-1".equals(ix.get("name"))) { - indexKey = ix.get("key").toString(); + indexKey = ix.get("key"); unique = (Boolean) ix.get("unique"); - if (mongoVersion.isLessThan(TWO_DOT_EIGHT)) { - dropDupes = (Boolean) ix.get("dropDups"); - assertThat(dropDupes, is(true)); - } else { - assertThat(ix.get("dropDups"), is(nullValue())); - } + assertThat(ix.get("dropDups")).isNull(); } } - assertThat(indexKey, is("{ \"age\" : -1}")); - assertThat(unique, is(true)); + assertThat(((org.bson.Document) indexKey)).containsEntry("age", -1); + assertThat(unique).isTrue(); List indexInfoList = template.indexOps(Person.class).getIndexInfo(); - assertThat(indexInfoList.size(), is(2)); + assertThat(indexInfoList.size()).isEqualTo(2); IndexInfo ii = indexInfoList.get(1); - assertThat(ii.isUnique(), is(true)); - - if (mongoVersion.isLessThan(TWO_DOT_EIGHT)) { - assertThat(ii.isDropDuplicates(), is(true)); - } else { - assertThat(ii.isDropDuplicates(), is(false)); - } - - assertThat(ii.isSparse(), is(false)); + assertThat(ii.isUnique()).isTrue(); + assertThat(ii.isSparse()).isFalse(); List indexFields = ii.getIndexFields(); IndexField field = indexFields.get(0); - assertThat(field, is(IndexField.create("age", Direction.DESC))); + assertThat(field).isEqualTo(IndexField.create("age", Direction.DESC)); } - /** - * @see DATAMONGO-746 - */ - @Test + @Test // DATAMONGO-746, DATAMONGO-2264 public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() throws Exception { - String command = "db." + template.getCollectionName(Person.class) - + ".createIndex({'age':-1}, {'unique':true, 'sparse':true})"; - template.indexOps(Person.class).dropAllIndexes(); + template.dropCollection(Person.class); + + assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty()).isTrue(); - assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty(), is(true)); - factory.getDb().eval(command); + factory.getMongoDatabase().getCollection(template.getCollectionName(Person.class)) + .createIndex(new org.bson.Document("age", -1), new IndexOptions().name("age_-1").unique(true).sparse(true)); - List indexInfo = template.getCollection(template.getCollectionName(Person.class)).getIndexInfo(); - String indexKey = null; + ListIndexesIterable indexInfo = template.getCollection(template.getCollectionName(Person.class)) + .listIndexes(); + org.bson.Document indexKey = null; boolean unique = false; - for (DBObject ix : indexInfo) { + MongoCursor cursor = indexInfo.iterator(); + + while (cursor.hasNext()) { + + org.bson.Document ix = cursor.next(); + if ("age_-1".equals(ix.get("name"))) { - indexKey = ix.get("key").toString(); + indexKey = (org.bson.Document) ix.get("key"); unique = (Boolean) ix.get("unique"); } } - assertThat(indexKey, is("{ \"age\" : -1.0}")); - assertThat(unique, is(true)); + assertThat(indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); IndexInfo info = template.indexOps(Person.class).getIndexInfo().get(1); - assertThat(info.isUnique(), is(true)); - assertThat(info.isSparse(), is(true)); + assertThat(info.isUnique()).isTrue(); + assertThat(info.isSparse()).isTrue(); List indexFields = info.getIndexFields(); IndexField field = indexFields.get(0); - assertThat(field, is(IndexField.create("age", Direction.DESC))); + assertThat(field).isEqualTo(IndexField.create("age", Direction.DESC)); } @Test @@ -431,11 +423,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p1); // also try save mongoTemplate.save(p1); - assertThat(p1.getId(), notNullValue()); + assertThat(p1.getId()).isNotNull(); PersonWithIdPropertyOfTypeString p1q = mongoTemplate.findOne(new Query(where("id").is(p1.getId())), PersonWithIdPropertyOfTypeString.class); - assertThat(p1q, notNullValue()); - assertThat(p1q.getId(), is(p1.getId())); + assertThat(p1q).isNotNull(); + assertThat(p1q.getId()).isEqualTo(p1.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeString.class, 1); // String id - provided @@ -447,11 +439,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p2); // also try save mongoTemplate.save(p2); - assertThat(p2.getId(), notNullValue()); + assertThat(p2.getId()).isNotNull(); PersonWithIdPropertyOfTypeString p2q = mongoTemplate.findOne(new Query(where("id").is(p2.getId())), PersonWithIdPropertyOfTypeString.class); - assertThat(p2q, notNullValue()); - assertThat(p2q.getId(), is(p2.getId())); + assertThat(p2q).isNotNull(); + assertThat(p2q.getId()).isEqualTo(p2.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeString.class, 2); // String _id - generated @@ -462,11 +454,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p3); // also try save mongoTemplate.save(p3); - assertThat(p3.get_id(), notNullValue()); + assertThat(p3.get_id()).isNotNull(); PersonWith_idPropertyOfTypeString p3q = mongoTemplate.findOne(new Query(where("_id").is(p3.get_id())), PersonWith_idPropertyOfTypeString.class); - assertThat(p3q, notNullValue()); - assertThat(p3q.get_id(), is(p3.get_id())); + assertThat(p3q).isNotNull(); + assertThat(p3q.get_id()).isEqualTo(p3.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeString.class, 1); // String _id - provided @@ -478,11 +470,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p4); // also try save mongoTemplate.save(p4); - assertThat(p4.get_id(), notNullValue()); + assertThat(p4.get_id()).isNotNull(); PersonWith_idPropertyOfTypeString p4q = mongoTemplate.findOne(new Query(where("_id").is(p4.get_id())), PersonWith_idPropertyOfTypeString.class); - assertThat(p4q, notNullValue()); - assertThat(p4q.get_id(), is(p4.get_id())); + assertThat(p4q).isNotNull(); + assertThat(p4q.get_id()).isEqualTo(p4.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeString.class, 2); // ObjectId id - generated @@ -493,11 +485,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p5); // also try save mongoTemplate.save(p5); - assertThat(p5.getId(), notNullValue()); + assertThat(p5.getId()).isNotNull(); PersonWithIdPropertyOfTypeObjectId p5q = mongoTemplate.findOne(new Query(where("id").is(p5.getId())), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(p5q, notNullValue()); - assertThat(p5q.getId(), is(p5.getId())); + assertThat(p5q).isNotNull(); + assertThat(p5q.getId()).isEqualTo(p5.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeObjectId.class, 1); // ObjectId id - provided @@ -509,11 +501,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p6); // also try save mongoTemplate.save(p6); - assertThat(p6.getId(), notNullValue()); + assertThat(p6.getId()).isNotNull(); PersonWithIdPropertyOfTypeObjectId p6q = mongoTemplate.findOne(new Query(where("id").is(p6.getId())), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(p6q, notNullValue()); - assertThat(p6q.getId(), is(p6.getId())); + assertThat(p6q).isNotNull(); + assertThat(p6q.getId()).isEqualTo(p6.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeObjectId.class, 2); // ObjectId _id - generated @@ -524,11 +516,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p7); // also try save mongoTemplate.save(p7); - assertThat(p7.get_id(), notNullValue()); + assertThat(p7.get_id()).isNotNull(); PersonWith_idPropertyOfTypeObjectId p7q = mongoTemplate.findOne(new Query(where("_id").is(p7.get_id())), PersonWith_idPropertyOfTypeObjectId.class); - assertThat(p7q, notNullValue()); - assertThat(p7q.get_id(), is(p7.get_id())); + assertThat(p7q).isNotNull(); + assertThat(p7q.get_id()).isEqualTo(p7.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeObjectId.class, 1); // ObjectId _id - provided @@ -540,11 +532,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p8); // also try save mongoTemplate.save(p8); - assertThat(p8.get_id(), notNullValue()); + assertThat(p8.get_id()).isNotNull(); PersonWith_idPropertyOfTypeObjectId p8q = mongoTemplate.findOne(new Query(where("_id").is(p8.get_id())), PersonWith_idPropertyOfTypeObjectId.class); - assertThat(p8q, notNullValue()); - assertThat(p8q.get_id(), is(p8.get_id())); + assertThat(p8q).isNotNull(); + assertThat(p8q.get_id()).isEqualTo(p8.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeObjectId.class, 2); // Integer id - provided @@ -556,16 +548,14 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p9); // also try save mongoTemplate.save(p9); - assertThat(p9.getId(), notNullValue()); + assertThat(p9.getId()).isNotNull(); PersonWithIdPropertyOfTypeInteger p9q = mongoTemplate.findOne(new Query(where("id").in(p9.getId())), PersonWithIdPropertyOfTypeInteger.class); - assertThat(p9q, notNullValue()); - assertThat(p9q.getId(), is(p9.getId())); + assertThat(p9q).isNotNull(); + assertThat(p9q.getId()).isEqualTo(p9.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeInteger.class, 1); - /* - * @see DATAMONGO-602 - */ + // DATAMONGO-602 // BigInteger id - provided PersonWithIdPropertyOfTypeBigInteger p9bi = new PersonWithIdPropertyOfTypeBigInteger(); p9bi.setFirstName("Sven_9bi"); @@ -575,11 +565,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p9bi); // also try save mongoTemplate.save(p9bi); - assertThat(p9bi.getId(), notNullValue()); + assertThat(p9bi.getId()).isNotNull(); PersonWithIdPropertyOfTypeBigInteger p9qbi = mongoTemplate.findOne(new Query(where("id").in(p9bi.getId())), PersonWithIdPropertyOfTypeBigInteger.class); - assertThat(p9qbi, notNullValue()); - assertThat(p9qbi.getId(), is(p9bi.getId())); + assertThat(p9qbi).isNotNull(); + assertThat(p9qbi.getId()).isEqualTo(p9bi.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeBigInteger.class, 1); // int id - provided @@ -591,11 +581,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p10); // also try save mongoTemplate.save(p10); - assertThat(p10.getId(), notNullValue()); + assertThat(p10.getId()).isNotNull(); PersonWithIdPropertyOfPrimitiveInt p10q = mongoTemplate.findOne(new Query(where("id").in(p10.getId())), PersonWithIdPropertyOfPrimitiveInt.class); - assertThat(p10q, notNullValue()); - assertThat(p10q.getId(), is(p10.getId())); + assertThat(p10q).isNotNull(); + assertThat(p10q.getId()).isEqualTo(p10.getId()); checkCollectionContents(PersonWithIdPropertyOfPrimitiveInt.class, 1); // Long id - provided @@ -607,11 +597,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p11); // also try save mongoTemplate.save(p11); - assertThat(p11.getId(), notNullValue()); + assertThat(p11.getId()).isNotNull(); PersonWithIdPropertyOfTypeLong p11q = mongoTemplate.findOne(new Query(where("id").in(p11.getId())), PersonWithIdPropertyOfTypeLong.class); - assertThat(p11q, notNullValue()); - assertThat(p11q.getId(), is(p11.getId())); + assertThat(p11q).isNotNull(); + assertThat(p11q.getId()).isEqualTo(p11.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeLong.class, 1); // long id - provided @@ -623,22 +613,35 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p12); // also try save mongoTemplate.save(p12); - assertThat(p12.getId(), notNullValue()); + assertThat(p12.getId()).isNotNull(); PersonWithIdPropertyOfPrimitiveLong p12q = mongoTemplate.findOne(new Query(where("id").in(p12.getId())), PersonWithIdPropertyOfPrimitiveLong.class); - assertThat(p12q, notNullValue()); - assertThat(p12q.getId(), is(p12.getId())); + assertThat(p12q).isNotNull(); + assertThat(p12q.getId()).isEqualTo(p12.getId()); checkCollectionContents(PersonWithIdPropertyOfPrimitiveLong.class, 1); + + // DATAMONGO-1617 + // UUID id - provided + PersonWithIdPropertyOfTypeUUID p13 = new PersonWithIdPropertyOfTypeUUID(); + p13.setFirstName("Sven_10"); + p13.setAge(22); + // insert + mongoTemplate.insert(p13); + // also try save + mongoTemplate.save(p13); + assertThat(p13.getId()).isNotNull(); + PersonWithIdPropertyOfTypeUUID p13q = mongoTemplate.findOne(new Query(where("id").in(p13.getId())), + PersonWithIdPropertyOfTypeUUID.class); + assertThat(p13q).isNotNull(); + assertThat(p13q.getId()).isEqualTo(p13.getId()); + checkCollectionContents(PersonWithIdPropertyOfTypeUUID.class, 1); } private void checkCollectionContents(Class entityClass, int count) { - assertThat(template.findAll(entityClass).size(), is(count)); + assertThat(template.findAll(entityClass).size()).isEqualTo(count); } - /** - * @see DATAMONGO-234 - */ - @Test + @Test // DATAMONGO-234 public void testFindAndUpdate() { template.insert(new Person("Tom", 21)); @@ -648,28 +651,28 @@ public void testFindAndUpdate() { Query query = new Query(Criteria.where("firstName").is("Harry")); Update update = new Update().inc("age", 1); Person p = template.findAndModify(query, update, Person.class); // return old - assertThat(p.getFirstName(), is("Harry")); - assertThat(p.getAge(), is(23)); + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findAndModify(query, update, Person.class, "person"); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(25)); + assertThat(p.getAge()).isEqualTo(25); p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class); - assertThat(p.getAge(), is(26)); + assertThat(p.getAge()).isEqualTo(26); - p = template.findAndModify(query, update, null, Person.class, "person"); - assertThat(p.getAge(), is(26)); + p = template.findAndModify(query, update, new FindAndModifyOptions(), Person.class, "person"); + assertThat(p.getAge()).isEqualTo(26); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(27)); + assertThat(p.getAge()).isEqualTo(27); Query query2 = new Query(Criteria.where("firstName").is("Mary")); p = template.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class); - assertThat(p.getFirstName(), is("Mary")); - assertThat(p.getAge(), is(1)); + assertThat(p.getFirstName()).isEqualTo("Mary"); + assertThat(p.getAge()).isEqualTo(1); } @@ -681,8 +684,8 @@ public void testFindAndUpdateUpsert() { Update update = new Update().set("age", 23); Person p = template.findAndModify(query, update, new FindAndModifyOptions().upsert(true).returnNew(true), Person.class); - assertThat(p.getFirstName(), is("Harry")); - assertThat(p.getAge(), is(23)); + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); } @Test @@ -698,9 +701,77 @@ public void testFindAndRemove() throws Exception { Message found2 = template.findAndRemove(q, Message.class); Message notFound = template.findAndRemove(q, Message.class); - assertThat(found1, notNullValue()); - assertThat(found2, notNullValue()); - assertThat(notFound, nullValue()); + assertThat(found1).isNotNull(); + assertThat(found2).isNotNull(); + assertThat(notFound).isNull(); + } + + @Test // DATAMONGO-1761 + public void testDistinct() { + + Address address1 = new Address(); + address1.state = "PA"; + address1.city = "Philadelphia"; + + Address address2 = new Address(); + address2.state = "PA"; + address2.city = " New York"; + + MyPerson person1 = new MyPerson(); + person1.name = "Ben"; + person1.address = address1; + + MyPerson person2 = new MyPerson(); + person2.name = "Eric"; + person2.address = address2; + + template.save(person1); + template.save(person2); + + assertThat(template.findDistinct("name", MyPerson.class, String.class)).containsExactlyInAnyOrder(person1.getName(), + person2.getName()); + assertThat(template.findDistinct(new BasicQuery("{'address.state' : 'PA'}"), "name", MyPerson.class, String.class)) + .containsExactlyInAnyOrder(person1.getName(), person2.getName()); + assertThat(template.findDistinct(new BasicQuery("{'address.state' : 'PA'}"), "name", + template.getCollectionName(MyPerson.class), MyPerson.class, String.class)) + .containsExactlyInAnyOrder(person1.getName(), person2.getName()); + } + + @Test // DATAMONGO-1761 + public void testDistinctCovertsResultToPropertyTargetTypeCorrectly() { + + template.insert(new Person("garvin")); + + assertThat(template.findDistinct("firstName", Person.class, Object.class)).allSatisfy(String.class::isInstance); + } + + @Test // DATAMONGO-1761 + public void testDistinctResolvesDbRefsCorrectly() { + + SomeContent content1 = new SomeContent(); + content1.text = "content-1"; + + SomeContent content2 = new SomeContent(); + content2.text = "content-2"; + + template.save(content1); + template.save(content2); + + SomeTemplate t1 = new SomeTemplate(); + t1.content = content1; + + SomeTemplate t2 = new SomeTemplate(); + t2.content = content2; + + SomeTemplate t3 = new SomeTemplate(); + t3.content = content2; + + template.insert(t1); + template.insert(t2); + template.insert(t3); + + assertThat(template.findDistinct("content", SomeTemplate.class, SomeContent.class)) + .containsExactlyInAnyOrder(content1, content2); } @Test @@ -731,9 +802,9 @@ public void testUsingAnInQueryWithObjectId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); Query q3 = new Query(Criteria.where("id").in(p3.getId())); List results3 = template.find(q3, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(1)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(1); } @Test @@ -764,9 +835,9 @@ public void testUsingAnInQueryWithStringId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeString.class); Query q3 = new Query(Criteria.where("id").in(p3.getId(), p4.getId())); List results3 = template.find(q3, PersonWithIdPropertyOfTypeString.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -801,15 +872,12 @@ public void testUsingAnInQueryWithLongId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeLong.class); Query q3 = new Query(Criteria.where("id").in(1001L, 1004L)); List results3 = template.find(q3, PersonWithIdPropertyOfTypeLong.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } - /** - * @see DATAMONGO-602 - */ - @Test + @Test // DATAMONGO-602 public void testUsingAnInQueryWithBigIntegerId() throws Exception { template.remove(new Query(), PersonWithIdPropertyOfTypeBigInteger.class); @@ -842,9 +910,9 @@ public void testUsingAnInQueryWithBigIntegerId() throws Exception { Query q3 = new Query(Criteria.where("id").in(new BigInteger("2666666666666666665069473312490162649510603601"), new BigInteger("2666666666666666665069473312490162649510603604"))); List results3 = template.find(q3, PersonWithIdPropertyOfTypeBigInteger.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -879,9 +947,9 @@ public void testUsingAnInQueryWithPrimitiveIntId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfPrimitiveInt.class); Query q3 = new Query(Criteria.where("id").in(1001, 1003)); List results3 = template.find(q3, PersonWithIdPropertyOfPrimitiveInt.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -906,7 +974,7 @@ public void testUsingInQueryWithList() throws Exception { p4.setAge(41); template.insert(p4); - List l1 = new ArrayList(); + List l1 = new ArrayList<>(); l1.add(11); l1.add(21); l1.add(41); @@ -914,10 +982,10 @@ public void testUsingInQueryWithList() throws Exception { List results1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); Query q2 = new Query(Criteria.where("age").in(l1.toArray())); List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(3)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(3); try { - List l2 = new ArrayList(); + List l2 = new ArrayList<>(); l2.add(31); Query q3 = new Query(Criteria.where("age").in(l1, l2)); template.find(q3, PersonWithIdPropertyOfTypeObjectId.class); @@ -951,8 +1019,8 @@ public void testUsingRegexQueryWithOptions() throws Exception { List results1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); Query q2 = new Query(Criteria.where("firstName").regex("S.*", "i")); List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(1)); - assertThat(results2.size(), is(2)); + assertThat(results1.size()).isEqualTo(1); + assertThat(results2.size()).isEqualTo(2); } @Test @@ -979,9 +1047,9 @@ public void testUsingAnOrQuery() throws Exception { Query orQuery = new Query(new Criteria().orOperator(where("age").in(11, 21), where("age").is(31))); List results = template.find(orQuery, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results.size(), is(3)); + assertThat(results.size()).isEqualTo(3); for (PersonWithIdPropertyOfTypeObjectId p : results) { - assertThat(p.getAge(), isOneOf(11, 21, 31)); + assertThat(p.getAge()).isIn(11, 21, 31); } } @@ -1001,21 +1069,21 @@ public void testUsingUpdateWithMultipleSet() throws Exception { Update u = new Update().set("firstName", "Bob").set("age", 10); - WriteResult wr = template.updateMulti(new Query(), u, PersonWithIdPropertyOfTypeObjectId.class); + UpdateResult wr = template.updateMulti(new Query(), u, PersonWithIdPropertyOfTypeObjectId.class); - if (wasAcknowledged(wr)) { - assertThat(wr.getN(), is(2)); + if (wr.wasAcknowledged()) { + assertThat(wr.getModifiedCount()).isEqualTo(2L); } Query q1 = new Query(Criteria.where("age").in(11, 21)); List r1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(r1.size(), is(0)); + assertThat(r1.size()).isEqualTo(0); Query q2 = new Query(Criteria.where("age").is(10)); List r2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(r2.size(), is(2)); + assertThat(r2.size()).isEqualTo(2); for (PersonWithIdPropertyOfTypeObjectId p : r2) { - assertThat(p.getAge(), is(10)); - assertThat(p.getFirstName(), is("Bob")); + assertThat(p.getAge()).isEqualTo(10); + assertThat(p.getFirstName()).isEqualTo("Bob"); } } @@ -1029,11 +1097,11 @@ public void testRemovingDocument() throws Exception { Query q1 = new Query(Criteria.where("id").is(p1.getId())); PersonWithIdPropertyOfTypeObjectId found1 = template.findOne(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(found1, notNullValue()); + assertThat(found1).isNotNull(); Query _q = new Query(Criteria.where("_id").is(p1.getId())); template.remove(_q, PersonWithIdPropertyOfTypeObjectId.class); PersonWithIdPropertyOfTypeObjectId notFound1 = template.findOne(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(notFound1, nullValue()); + assertThat(notFound1).isNull(); PersonWithIdPropertyOfTypeObjectId p2 = new PersonWithIdPropertyOfTypeObjectId(); p2.setFirstName("Bubba_to_be_removed"); @@ -1042,10 +1110,10 @@ public void testRemovingDocument() throws Exception { Query q2 = new Query(Criteria.where("id").is(p2.getId())); PersonWithIdPropertyOfTypeObjectId found2 = template.findOne(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(found2, notNullValue()); + assertThat(found2).isNotNull(); template.remove(q2, PersonWithIdPropertyOfTypeObjectId.class); PersonWithIdPropertyOfTypeObjectId notFound2 = template.findOne(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(notFound2, nullValue()); + assertThat(notFound2).isNull(); } @Test @@ -1057,16 +1125,16 @@ public void testAddingToList() { Query q1 = new Query(Criteria.where("id").is(p.getId())); PersonWithAList p2 = template.findOne(q1, PersonWithAList.class); - assertThat(p2, notNullValue()); - assertThat(p2.getWishList().size(), is(0)); + assertThat(p2).isNotNull(); + assertThat(p2.getWishList().size()).isEqualTo(0); - p2.addToWishList("please work!"); + p2.addToWishList("please work"); template.save(p2); PersonWithAList p3 = template.findOne(q1, PersonWithAList.class); - assertThat(p3, notNullValue()); - assertThat(p3.getWishList().size(), is(1)); + assertThat(p3).isNotNull(); + assertThat(p3.getWishList().size()).isEqualTo(1); Friend f = new Friend(); p.setFirstName("Erik"); @@ -1076,9 +1144,9 @@ public void testAddingToList() { template.save(p3); PersonWithAList p4 = template.findOne(q1, PersonWithAList.class); - assertThat(p4, notNullValue()); - assertThat(p4.getWishList().size(), is(1)); - assertThat(p4.getFriends().size(), is(1)); + assertThat(p4).isNotNull(); + assertThat(p4.getWishList().size()).isEqualTo(1); + assertThat(p4.getFriends().size()).isEqualTo(1); } @@ -1101,43 +1169,40 @@ public void testFindOneWithSort() { // test query with a sort Query q2 = new Query(Criteria.where("age").gt(10)); - q2.with(new Sort(Direction.DESC, "age")); + q2.with(Sort.by(Direction.DESC, "age")); PersonWithAList p5 = template.findOne(q2, PersonWithAList.class); - assertThat(p5.getFirstName(), is("Mark")); + assertThat(p5.getFirstName()).isEqualTo("Mark"); } - @Test + @Test // DATAMONGO-2572 public void testUsingReadPreference() throws Exception { this.template.execute("readPref", new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - assertThat(collection.getOptions(), is(0)); - assertThat(collection.getDB().getOptions(), is(0)); + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + // assertThat(collection.getOptions(), is(0)); + // assertThat(collection.read.getDB().getOptions(), is(0)); return null; } }); - MongoTemplate slaveTemplate = new MongoTemplate(factory); - slaveTemplate.setReadPreference(ReadPreference.secondary()); - slaveTemplate.execute("readPref", new CollectionCallback() { - public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException { - assertThat(collection.getReadPreference(), is(ReadPreference.secondary())); - assertThat(collection.getDB().getOptions(), is(0)); + MongoTemplate secondaryTemplate = new MongoTemplate(factory); + secondaryTemplate.setReadPreference(ReadPreference.secondary()); + secondaryTemplate.execute("readPref", new CollectionCallback() { + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + assertThat(collection.getReadPreference()).isEqualTo(ReadPreference.secondary()); + // assertThat(collection.getDB().getOptions(), is(0)); return null; } }); } - /** - * @see DATADOC-166 - */ - @Test + @Test // DATADOC-166, DATAMONGO-1762 public void removingNullIsANoOp() { - template.remove(null); + assertThatIllegalArgumentException().isThrownBy(() -> template.remove((Object) null)); } - /** - * @see DATADOC-240, DATADOC-212 - */ - @Test + @Test // DATADOC-240, DATADOC-212 public void updatesObjectIdsCorrectly() { PersonWithIdPropertyOfTypeObjectId person = new PersonWithIdPropertyOfTypeObjectId(); @@ -1150,9 +1215,9 @@ public void updatesObjectIdsCorrectly() { PersonWithIdPropertyOfTypeObjectId result = template.findById(person.getId(), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(result, is(notNullValue())); - assertThat(result.getId(), is(person.getId())); - assertThat(result.getFirstName(), is("Carter")); + assertThat(result).isNotNull(); + assertThat(result.getId()).isEqualTo(person.getId()); + assertThat(result.getFirstName()).isEqualTo("Carter"); } @Test @@ -1162,24 +1227,25 @@ public void testWriteConcernResolver() { person.setId(new ObjectId()); person.setFirstName("Dave"); - template.setWriteConcern(noneOrUnacknowledged()); + template.setWriteConcern(WriteConcern.UNACKNOWLEDGED); template.save(person); - WriteResult result = template.updateFirst(query(where("id").is(person.getId())), update("firstName", "Carter"), + template.updateFirst(query(where("id").is(person.getId())), update("firstName", "Carter"), PersonWithIdPropertyOfTypeObjectId.class); FsyncSafeWriteConcernResolver resolver = new FsyncSafeWriteConcernResolver(); template.setWriteConcernResolver(resolver); Query q = query(where("_id").is(person.getId())); Update u = update("firstName", "Carter"); - result = template.updateFirst(q, u, PersonWithIdPropertyOfTypeObjectId.class); + template.updateFirst(q, u, PersonWithIdPropertyOfTypeObjectId.class); MongoAction lastMongoAction = resolver.getMongoAction(); - assertThat(lastMongoAction.getCollectionName(), is("personWithIdPropertyOfTypeObjectId")); - assertThat(lastMongoAction.getDefaultWriteConcern(), equalTo(noneOrUnacknowledged())); - assertThat(lastMongoAction.getDocument(), notNullValue()); - assertThat(lastMongoAction.getEntityType().toString(), is(PersonWithIdPropertyOfTypeObjectId.class.toString())); - assertThat(lastMongoAction.getMongoActionOperation(), is(MongoActionOperation.UPDATE)); - assertThat(lastMongoAction.getQuery(), equalTo(q.getQueryObject())); + assertThat(lastMongoAction.getCollectionName()).isEqualTo("personWithIdPropertyOfTypeObjectId"); + assertThat(lastMongoAction.getDefaultWriteConcern()).isEqualTo(WriteConcern.UNACKNOWLEDGED); + assertThat(lastMongoAction.getDocument()).isNotNull(); + assertThat(lastMongoAction.getEntityType().toString()) + .isEqualTo(PersonWithIdPropertyOfTypeObjectId.class.toString()); + assertThat(lastMongoAction.getMongoActionOperation()).isEqualTo(MongoActionOperation.UPDATE); + assertThat(lastMongoAction.getQuery()).isEqualTo(q.getQueryObject()); } private class FsyncSafeWriteConcernResolver implements WriteConcernResolver { @@ -1188,7 +1254,7 @@ private class FsyncSafeWriteConcernResolver implements WriteConcernResolver { public WriteConcern resolve(MongoAction action) { this.mongoAction = action; - return WriteConcern.FSYNC_SAFE; + return WriteConcern.JOURNALED; } public MongoAction getMongoAction() { @@ -1196,78 +1262,67 @@ public MongoAction getMongoAction() { } } - /** - * @see DATADOC-246 - */ - @Test + @Test // DATADOC-246 public void updatesDBRefsCorrectly() { DBRef first = new DBRef("foo", new ObjectId()); DBRef second = new DBRef("bar", new ObjectId()); - template.updateFirst(null, update("dbRefs", Arrays.asList(first, second)), ClassWithDBRefs.class); + template.updateFirst(new Query(), update("dbRefs", Arrays.asList(first, second)), ClassWithDBRefs.class); } class ClassWithDBRefs { List dbrefs; } - /** - * @see DATADOC-202 - */ - @Test + @Test // DATADOC-202 public void executeDocument() { + template.insert(new Person("Tom")); template.insert(new Person("Dick")); template.insert(new Person("Harry")); - final List names = new ArrayList(); + final List names = new ArrayList<>(); template.executeQuery(new Query(), template.getCollectionName(Person.class), new DocumentCallbackHandler() { - public void processDocument(DBObject dbObject) { - String name = (String) dbObject.get("firstName"); + public void processDocument(org.bson.Document document) { + String name = (String) document.get("firstName"); if (name != null) { names.add(name); } } }); - assertEquals(3, names.size()); + assertThat(names.size()).isEqualTo(3); // template.remove(new Query(), Person.class); } - /** - * @see DATADOC-202 - */ - @Test + @Test // DATADOC-202 public void executeDocumentWithCursorPreparer() { template.insert(new Person("Tom")); template.insert(new Person("Dick")); template.insert(new Person("Harry")); - final List names = new ArrayList(); + final List names = new ArrayList<>(); template.executeQuery(new Query(), template.getCollectionName(Person.class), new DocumentCallbackHandler() { - public void processDocument(DBObject dbObject) { - String name = (String) dbObject.get("firstName"); + public void processDocument(org.bson.Document document) { + String name = (String) document.get("firstName"); if (name != null) { names.add(name); } } }, new CursorPreparer() { - public DBCursor prepare(DBCursor cursor) { - cursor.limit(1); - return cursor; + public FindIterable prepare(FindIterable iterable) { + iterable.limit(1); + return iterable; } }); - assertEquals(1, names.size()); - // template.remove(new Query(), Person.class); + assertThat(names.size()).isEqualTo(1); + template.remove(new Query(), Person.class); } - /** - * @see DATADOC-183 - */ - @Test + @Test // DATADOC-183 public void countsDocumentsCorrectly() { - assertThat(template.count(new Query(), Person.class), is(0L)); + assertThat(template.count(new Query(), Person.class)).isEqualTo(0L); Person dave = new Person("Dave"); Person carter = new Person("Carter"); @@ -1275,51 +1330,39 @@ public void countsDocumentsCorrectly() { template.save(dave); template.save(carter); - assertThat(template.count(null, Person.class), is(2L)); - assertThat(template.count(query(where("firstName").is("Carter")), Person.class), is(1L)); + assertThat(template.count(new Query(), Person.class)).isEqualTo(2L); + assertThat(template.count(query(where("firstName").is("Carter")), Person.class)).isEqualTo(1L); } - /** - * @see DATADOC-183 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATADOC-183 public void countRejectsNullEntityClass() { - template.count(null, (Class) null); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, (Class) null)); } - /** - * @see DATADOC-183 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATADOC-183 public void countRejectsEmptyCollectionName() { - template.count(null, ""); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, "")); } - /** - * @see DATADOC-183 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATADOC-183 public void countRejectsNullCollectionName() { - template.count(null, (String) null); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, (String) null)); } @Test public void returnsEntityWhenQueryingForDateTime() { - DateTime dateTime = new DateTime(2011, 3, 3, 12, 0, 0, 0); + LocalDateTime dateTime = LocalDateTime.of(2011, 3, 3, 12, 0, 0, 0); TestClass testClass = new TestClass(dateTime); mappingTemplate.save(testClass); - List testClassList = mappingTemplate.find(new Query(Criteria.where("myDate").is(dateTime.toDate())), + List testClassList = mappingTemplate.find(new Query(Criteria.where("myDate").is(dateTime)), TestClass.class); - assertThat(testClassList.size(), is(1)); - assertThat(testClassList.get(0).myDate, is(testClass.myDate)); + assertThat(testClassList.size()).isEqualTo(1); + assertThat(testClassList.get(0).myDate).isEqualTo(testClass.myDate); } - /** - * @see DATADOC-230 - */ - @Test + @Test // DATADOC-230 public void removesEntityFromCollection() { template.remove(new Query(), "mycollection"); @@ -1327,16 +1370,13 @@ public void removesEntityFromCollection() { Person person = new Person("Dave"); template.save(person, "mycollection"); - assertThat(template.findAll(TestClass.class, "mycollection").size(), is(1)); + assertThat(template.findAll(TestClass.class, "mycollection").size()).isEqualTo(1); template.remove(person, "mycollection"); - assertThat(template.findAll(Person.class, "mycollection").isEmpty(), is(true)); + assertThat(template.findAll(Person.class, "mycollection").isEmpty()).isTrue(); } - /** - * @see DATADOC-349 - */ - @Test + @Test // DATADOC-349 public void removesEntityWithAnnotatedIdIfIdNeedsMassaging() { String id = new ObjectId().toString(); @@ -1346,16 +1386,13 @@ public void removesEntityWithAnnotatedIdIfIdNeedsMassaging() { template.save(sample); - assertThat(template.findOne(query(where("id").is(id)), Sample.class).id, is(id)); + assertThat(template.findOne(query(where("id").is(id)), Sample.class).id).isEqualTo(id); template.remove(sample); - assertThat(template.findOne(query(where("id").is(id)), Sample.class), is(nullValue())); + assertThat(template.findOne(query(where("id").is(id)), Sample.class)).isNull(); } - /** - * @see DATAMONGO-423 - */ - @Test + @Test // DATAMONGO-423 public void executesQueryWithNegatedRegexCorrectly() { Sample first = new Sample(); @@ -1370,31 +1407,28 @@ public void executesQueryWithNegatedRegexCorrectly() { Query query = query(where("field").not().regex("Matthews")); List result = template.find(query, Sample.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).field, is("Beauford")); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).field).isEqualTo("Beauford"); } - /** - * @see DATAMONGO-447 - */ - @Test + @Test // DATAMONGO-447, GH-4707 public void storesAndRemovesTypeWithComplexId() { MyId id = new MyId(); + id.id = Instant.now().minusSeconds(2); id.first = "foo"; id.second = "bar"; + id.id = Instant.now().minusSeconds(3); TypeWithMyId source = new TypeWithMyId(); source.id = id; template.save(source); - template.remove(query(where("id").is(id)), TypeWithMyId.class); + assertThat(template.remove(query(where("id").is(id)), TypeWithMyId.class)).extracting(DeleteResult::getDeletedCount) + .isEqualTo(1L); } - /** - * @see DATAMONGO-506 - */ - @Test + @Test // DATAMONGO-506 public void exceutesBasicQueryCorrectly() { Address address = new Address(); @@ -1410,20 +1444,17 @@ public void exceutesBasicQueryCorrectly() { Query query = new BasicQuery("{'name' : 'Oleg'}"); List result = template.find(query, MyPerson.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), hasProperty("name", is("Oleg"))); + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Oleg"); query = new BasicQuery("{'address.state' : 'PA' }"); result = template.find(query, MyPerson.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), hasProperty("name", is("Oleg"))); + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Oleg"); } - /** - * @see DATAMONGO-279 - */ - @Test(expected = OptimisticLockingFailureException.class) + @Test // DATAMONGO-279 public void optimisticLockingHandling() { // Init version @@ -1435,8 +1466,8 @@ public void optimisticLockingHandling() { List result = template .findAll(PersonWithVersionPropertyOfTypeInteger.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).version, is(0)); + assertThat(result).hasSize(1); + assertThat(result.get(0).version).isEqualTo(0); // Version change person = result.get(0); @@ -1444,24 +1475,23 @@ public void optimisticLockingHandling() { template.save(person); - assertThat(person.version, is(1)); + assertThat(person.version).isEqualTo(1); result = mappingTemplate.findAll(PersonWithVersionPropertyOfTypeInteger.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).version, is(1)); + assertThat(result).hasSize(1); + assertThat(result.get(0).version).isEqualTo(1); // Optimistic lock exception person.version = 0; person.firstName = "Patryk3"; - template.save(person); + final PersonWithVersionPropertyOfTypeInteger toBeSaved = person; + + assertThatExceptionOfType(OptimisticLockingFailureException.class).isThrownBy(() -> template.save(toBeSaved)); } - /** - * @see DATAMONGO-562 - */ - @Test + @Test // DATAMONGO-562 public void optimisticLockingHandlingWithExistingId() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); @@ -1471,22 +1501,27 @@ public void optimisticLockingHandlingWithExistingId() { template.save(person); } - /** - * @see DATAMONGO-617 - */ - @Test + @Test // DATAMONGO-617 public void doesNotFailOnVersionInitForUnversionedEntity() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("firstName", "Oliver"); + org.bson.Document document = new org.bson.Document(); + document.put("firstName", "Oliver"); - template.insert(dbObject, template.determineCollectionName(PersonWithVersionPropertyOfTypeInteger.class)); + template.insert(document, template.getCollectionName(PersonWithVersionPropertyOfTypeInteger.class)); } - /** - * @see DATAMONGO-539 - */ - @Test + @Test // DATAMONGO-1617 + public void doesNotFailOnInsertForEntityWithNonAutogeneratableId() { + + PersonWithIdPropertyOfTypeUUID person = new PersonWithIdPropertyOfTypeUUID(); + person.setFirstName("Laszlo"); + person.setAge(33); + + template.insert(person); + assertThat(person.getId()).isNotNull(); + } + + @Test // DATAMONGO-539 public void removesObjectFromExplicitCollection() { String collectionName = "explicit"; @@ -1495,97 +1530,72 @@ public void removesObjectFromExplicitCollection() { PersonWithConvertedId person = new PersonWithConvertedId(); person.name = "Dave"; template.save(person, collectionName); - assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty(), is(false)); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty()).isFalse(); template.remove(person, collectionName); - assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty(), is(true)); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty()).isTrue(); } - /** - * @see DATAMONGO-549 - */ + // DATAMONGO-549 public void savesMapCorrectly() { - Map map = new HashMap(); + Map map = new HashMap<>(); map.put("key", "value"); template.save(map, "maps"); } - /** - * @see DATAMONGO-549 - */ - @Test(expected = MappingException.class) + @Test // DATAMONGO-549, DATAMONGO-1730 public void savesMongoPrimitiveObjectCorrectly() { - template.save(new Object(), "collection"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save(new Object(), "collection")); } - /** - * @see DATAMONGO-549 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-549 public void rejectsNullObjectToBeSaved() { - template.save(null); + assertThatIllegalArgumentException().isThrownBy(() -> template.save(null)); } - /** - * @see DATAMONGO-550 - */ - @Test - public void savesPlainDbObjectCorrectly() { + @Test // DATAMONGO-550 + public void savesPlainDocumentCorrectly() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - template.save(dbObject, "collection"); + org.bson.Document document = new org.bson.Document("foo", "bar"); + template.save(document, "collection"); - assertThat(dbObject.containsField("_id"), is(true)); + assertThat(document.containsKey("_id")).isTrue(); } - /** - * @see DATAMONGO-550 - */ - @Test(expected = InvalidDataAccessApiUsageException.class) + @Test // DATAMONGO-550, DATAMONGO-1730 public void rejectsPlainObjectWithOutExplicitCollection() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - template.save(dbObject, "collection"); + org.bson.Document document = new org.bson.Document("foo", "bar"); + template.save(document, "collection"); - template.findById(dbObject.get("_id"), DBObject.class); + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> template.findById(document.get("_id"), org.bson.Document.class)); } - /** - * @see DATAMONGO-550 - */ - @Test - public void readsPlainDbObjectById() { + @Test // DATAMONGO-550 + public void readsPlainDocumentById() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - template.save(dbObject, "collection"); + org.bson.Document document = new org.bson.Document("foo", "bar"); + template.save(document, "collection"); - DBObject result = template.findById(dbObject.get("_id"), DBObject.class, "collection"); - assertThat(result.get("foo"), is(dbObject.get("foo"))); - assertThat(result.get("_id"), is(dbObject.get("_id"))); + org.bson.Document result = template.findById(document.get("_id"), org.bson.Document.class, "collection"); + assertThat(result.get("foo")).isEqualTo(document.get("foo")); + assertThat(result.get("_id")).isEqualTo(document.get("_id")); } - /** - * @see DATAMONGO-551 - */ - @Test + @Test // DATAMONGO-551 public void writesPlainString() { template.save("{ 'foo' : 'bar' }", "collection"); } - /** - * @see DATAMONGO-551 - */ - @Test(expected = MappingException.class) + @Test // DATAMONGO-551 public void rejectsNonJsonStringForSave() { - template.save("Foobar!", "collection"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save("Foobar", "collection")); } - /** - * @see DATAMONGO-588 - */ - @Test + @Test // DATAMONGO-588 public void initializesVersionOnInsert() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); @@ -1593,13 +1603,28 @@ public void initializesVersionOnInsert() { template.insert(person); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); } - /** - * @see DATAMONGO-588 - */ - @Test + @Test // DATAMONGO-2195 + public void removeVersionedEntityConsidersVersion() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person); + assertThat(person.version).isEqualTo(0); + template.update(PersonWithVersionPropertyOfTypeInteger.class).matching(query(where("id").is(person.id))) + .apply(new Update().set("firstName", "Walter")).first(); + + DeleteResult deleteResult = template.remove(person); + + assertThat(deleteResult.wasAcknowledged()).isTrue(); + assertThat(deleteResult.getDeletedCount()).isZero(); + assertThat(template.count(new Query(), PersonWithVersionPropertyOfTypeInteger.class)).isOne(); + } + + @Test // DATAMONGO-588 public void initializesVersionOnBatchInsert() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); @@ -1607,70 +1632,69 @@ public void initializesVersionOnBatchInsert() { template.insertAll(Arrays.asList(person)); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); } - /** - * @see DATAMONGO-568 - */ - @Test - public void queryCantBeNull() { + @Test // DATAMONGO-1992 + public void initializesIdAndVersionAndOfImmutableObject() { + + ImmutableVersioned versioned = new ImmutableVersioned(); + + ImmutableVersioned saved = template.insert(versioned); - List result = template.findAll(PersonWithIdPropertyOfTypeObjectId.class); - assertThat(template.find(null, PersonWithIdPropertyOfTypeObjectId.class), is(result)); + assertThat(saved).isNotSameAs(versioned); + assertThat(versioned.id).isNull(); + assertThat(versioned.version).isNull(); + + assertThat(saved.id).isNotNull(); + assertThat(saved.version).isEqualTo(0L); } - /** - * @see DATAMONGO-620 - */ - @Test + @Test // DATAMONGO-568, DATAMONGO-1762 + public void queryCantBeNull() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.find(null, PersonWithIdPropertyOfTypeObjectId.class)); + } + + @Test // DATAMONGO-620 public void versionsObjectIntoDedicatedCollection() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; template.save(person, "personX"); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); template.save(person, "personX"); - assertThat(person.version, is(1)); + assertThat(person.version).isEqualTo(1); } - /** - * @see DATAMONGO-621 - */ - @Test + @Test // DATAMONGO-621 public void correctlySetsLongVersionProperty() { PersonWithVersionPropertyOfTypeLong person = new PersonWithVersionPropertyOfTypeLong(); person.firstName = "Dave"; template.save(person); - assertThat(person.version, is(0L)); + assertThat(person.version).isEqualTo(0L); } - /** - * @see DATAMONGO-622 - */ - @Test(expected = DuplicateKeyException.class) + @Test // DATAMONGO-622 public void preventsDuplicateInsert() { - template.setWriteConcern(WriteConcern.SAFE); + template.setWriteConcern(WriteConcern.ACKNOWLEDGED); PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; template.save(person); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); person.version = null; - template.save(person); + assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> template.save(person)); } - /** - * @see DATAMONGO-629 - */ - @Test + @Test // DATAMONGO-629 public void countAndFindWithoutTypeInformation() { Person person = new Person(); @@ -1679,14 +1703,11 @@ public void countAndFindWithoutTypeInformation() { Query query = query(where("_id").is(person.getId())); String collectionName = template.getCollectionName(Person.class); - assertThat(template.find(query, HashMap.class, collectionName), hasSize(1)); - assertThat(template.count(query, collectionName), is(1L)); + assertThat(template.find(query, HashMap.class, collectionName)).hasSize(1); + assertThat(template.count(query, collectionName)).isEqualTo(1L); } - /** - * @see DATAMONGO-571 - */ - @Test + @Test // DATAMONGO-571, GH-3407 public void nullsPropertiesForVersionObjectUpdates() { VersionedPerson person = new VersionedPerson(); @@ -1694,19 +1715,22 @@ public void nullsPropertiesForVersionObjectUpdates() { person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); + person.firstname = null; person.lastname = null; template.save(person); person = template.findOne(query(where("id").is(person.id)), VersionedPerson.class); - assertThat(person.lastname, is(nullValue())); + assertThat(person.firstname).isNull(); + assertThat(person.lastname).isNull(); + + org.bson.Document document = template.findOne(query(where("_id").is(person.id)), org.bson.Document.class, + "versionedPerson"); + assertThat(document).doesNotContainKey("firstname").containsEntry("lastname", null); } - /** - * @see DATAMONGO-571 - */ - @Test + @Test // DATAMONGO-571 public void nullsValuesForUpdatesOfUnversionedEntity() { Person person = new Person("Dave"); @@ -1716,22 +1740,19 @@ public void nullsValuesForUpdatesOfUnversionedEntity() { template.save(person); person = template.findOne(query(where("id").is(person.getId())), Person.class); - assertThat(person.getFirstName(), is(nullValue())); + assertThat(person.getFirstName()).isNull(); } - /** - * @see DATAMONGO-679 - */ - @Test + @Test // DATAMONGO-679 public void savesJsonStringCorrectly() { - DBObject dbObject = new BasicDBObject().append("first", "first").append("second", "second"); + org.bson.Document document = new org.bson.Document().append("first", "first").append("second", "second"); - template.save(dbObject.toString(), "collection"); + template.save(document, "collection"); - List result = template.findAll(DBObject.class, "collection"); - assertThat(result.size(), is(1)); - assertThat(result.get(0).containsField("first"), is(true)); + List result = template.findAll(org.bson.Document.class, "collection"); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).containsKey("first")).isTrue(); } @Test @@ -1742,15 +1763,12 @@ public void executesExistsCorrectly() { Query query = query(where("id").is(sample.id)); - assertThat(template.exists(query, Sample.class), is(true)); - assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)), is(true)); - assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class)), is(true)); + assertThat(template.exists(query, Sample.class)).isTrue(); + assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class))).isTrue(); + assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class))).isTrue(); } - /** - * @see DATAMONGO-675 - */ - @Test + @Test // DATAMONGO-675 public void updateConsidersMappingAnnotations() { TypeWithFieldAnnotation entity = new TypeWithFieldAnnotation(); @@ -1763,13 +1781,10 @@ public void updateConsidersMappingAnnotations() { FindAndModifyOptions options = new FindAndModifyOptions().returnNew(true); TypeWithFieldAnnotation result = template.findAndModify(query, update, options, TypeWithFieldAnnotation.class); - assertThat(result.emailAddress, is("new")); + assertThat(result.emailAddress).isEqualTo("new"); } - /** - * @see DATAMONGO-671 - */ - @Test + @Test // DATAMONGO-671 public void findsEntityByDateReference() { TypeWithDate entity = new TypeWithDate(); @@ -1779,14 +1794,35 @@ public void findsEntityByDateReference() { Query query = query(where("date").lt(new Date())); List result = template.find(query, TypeWithDate.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).date, is(notNullValue())); + assertThat(result).hasSize(1); + assertThat(result.get(0).date).isNotNull(); } - /** - * @see DATAMONGO-540 - */ - @Test + @Test // GH-4390 + void nativeDriverDateTimeCodecShouldBeApplied/*when configured*/() { + + MongoTestTemplate ops = new MongoTestTemplate(cfg -> { + cfg.configureConversion(conversion -> { + conversion.customConversions( + MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + }); + }); + + TypeWithDate source = new TypeWithDate(); + source.id = "id-1"; + source.date = Date.from(Instant.now()); + + ops.save(source); + + var dbDate = ops.execute(TypeWithDate.class, + collection -> collection.find(new org.bson.Document("_id", source.id)).first().get("date")); + + TypeWithDate target = ops.findOne(query(where("date").is(source.date)), TypeWithDate.class); + + assertThat(target.date).isEqualTo(source.date).isEqualTo(dbDate); + } + + @Test // DATAMONGO-540 public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() { String idValue = "4711"; @@ -1798,15 +1834,12 @@ public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() { template.upsert(query, update, Sample.class); Sample result = template.findOne(query, Sample.class); - assertThat(result, is(notNullValue())); - assertThat(result.field, is(fieldValue)); - assertThat(result.id, is(idValue)); + assertThat(result).isNotNull(); + assertThat(result.field).isEqualTo(fieldValue); + assertThat(result.id).isEqualTo(idValue); } - /** - * @see DATAMONGO-392 - */ - @Test + @Test // DATAMONGO-392 public void updatesShouldRetainTypeInformation() { Document doc = new Document(); @@ -1821,16 +1854,13 @@ public void updatesShouldRetainTypeInformation() { Document result = template.findOne(query, Document.class); - assertThat(result, is(notNullValue())); - assertThat(result.id, is(doc.id)); - assertThat(result.model, is(notNullValue())); - assertThat(result.model.value(), is(newModelValue)); + assertThat(result).isNotNull(); + assertThat(result.id).isEqualTo(doc.id); + assertThat(result.model).isNotNull(); + assertThat(result.model.value()).isEqualTo(newModelValue); } - /** - * @see DATAMONGO-702 - */ - @Test + @Test // DATAMONGO-702 public void queryShouldSupportRealAndAliasedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFields obj = new ObjectWith3AliasedFields(); @@ -1848,16 +1878,13 @@ public void queryShouldSupportRealAndAliasedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(nullValue())); - assertThat(result.property2, is(obj.property2)); - assertThat(result.property3, is(obj.property3)); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isNull(); + assertThat(result.property2).isEqualTo(obj.property2); + assertThat(result.property3).isEqualTo(obj.property3); } - /** - * @see DATAMONGO-702 - */ - @Test + @Test // DATAMONGO-702, DATAMONGO-2294 public void queryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() { ObjectWith3AliasedFields obj = new ObjectWith3AliasedFields(); @@ -1870,21 +1897,17 @@ public void queryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() { Query query = new Query(Criteria.where("id").is(obj.id)); query.fields() // - .exclude("property2") // real property name - .exclude("prop3"); // aliased property name + .exclude("property2", "prop3"); // real property name, aliased property name ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(obj.property1)); - assertThat(result.property2, is(nullValue())); - assertThat(result.property3, is(nullValue())); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isEqualTo(obj.property1); + assertThat(result.property2).isNull(); + assertThat(result.property3).isNull(); } - /** - * @see DATAMONGO-702 - */ - @Test + @Test // DATAMONGO-702 public void findMultipleWithQueryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() { ObjectWith3AliasedFields obj0 = new ObjectWith3AliasedFields(); @@ -1908,28 +1931,25 @@ public void findMultipleWithQueryShouldSupportRealAndAliasedPropertyNamesForFiel List results = template.find(query, ObjectWith3AliasedFields.class); - assertThat(results, is(notNullValue())); - assertThat(results.size(), is(2)); + assertThat(results).isNotNull(); + assertThat(results.size()).isEqualTo(2); ObjectWith3AliasedFields result0 = results.get(0); - assertThat(result0, is(notNullValue())); - assertThat(result0.id, is(obj0.id)); - assertThat(result0.property1, is(obj0.property1)); - assertThat(result0.property2, is(nullValue())); - assertThat(result0.property3, is(nullValue())); + assertThat(result0).isNotNull(); + assertThat(result0.id).isEqualTo(obj0.id); + assertThat(result0.property1).isEqualTo(obj0.property1); + assertThat(result0.property2).isNull(); + assertThat(result0.property3).isNull(); ObjectWith3AliasedFields result1 = results.get(1); - assertThat(result1, is(notNullValue())); - assertThat(result1.id, is(obj1.id)); - assertThat(result1.property1, is(obj1.property1)); - assertThat(result1.property2, is(nullValue())); - assertThat(result1.property3, is(nullValue())); + assertThat(result1).isNotNull(); + assertThat(result1.id).isEqualTo(obj1.id); + assertThat(result1.property1).isEqualTo(obj1.property1); + assertThat(result1.property2).isNull(); + assertThat(result1.property3).isNull(); } - /** - * @see DATAMONGO-702 - */ - @Test + @Test // DATAMONGO-702 public void queryShouldSupportNestedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFieldsAndNestedAddress obj = new ObjectWith3AliasedFieldsAndNestedAddress(); @@ -1953,19 +1973,16 @@ public void queryShouldSupportNestedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFieldsAndNestedAddress result = template.findOne(query, ObjectWith3AliasedFieldsAndNestedAddress.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(nullValue())); - assertThat(result.property2, is(obj.property2)); - assertThat(result.property3, is(nullValue())); - assertThat(result.address, is(notNullValue())); - assertThat(result.address.city, is(nullValue())); - assertThat(result.address.state, is(stateValue)); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isNull(); + assertThat(result.property2).isEqualTo(obj.property2); + assertThat(result.property3).isNull(); + assertThat(result.address).isNotNull(); + assertThat(result.address.city).isNull(); + assertThat(result.address.state).isEqualTo(stateValue); } - /** - * @see DATAMONGO-709 - */ - @Test + @Test // DATAMONGO-709 public void aQueryRestrictedWithOneRestrictedResultTypeShouldReturnOnlyInstancesOfTheRestrictedType() { BaseDoc doc0 = new BaseDoc(); @@ -1986,15 +2003,12 @@ public void aQueryRestrictedWithOneRestrictedResultTypeShouldReturnOnlyInstances Query query = Query.query(where("value").is("foo")).restrict(SpecialDoc.class); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(instanceOf(SpecialDoc.class))); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0)).isInstanceOf(SpecialDoc.class); } - /** - * @see DATAMONGO-709 - */ - @Test + @Test // DATAMONGO-709 public void aQueryRestrictedWithMultipleRestrictedResultTypesShouldReturnOnlyInstancesOfTheRestrictedTypes() { BaseDoc doc0 = new BaseDoc(); @@ -2015,16 +2029,13 @@ public void aQueryRestrictedWithMultipleRestrictedResultTypesShouldReturnOnlyIns Query query = Query.query(where("value").is("foo")).restrict(BaseDoc.class, VerySpecialDoc.class); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(2)); - assertThat(result.get(0).getClass(), is((Object) BaseDoc.class)); - assertThat(result.get(1).getClass(), is((Object) VerySpecialDoc.class)); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(2); + assertThat(result.get(0).getClass()).isEqualTo((Object) BaseDoc.class); + assertThat(result.get(1).getClass()).isEqualTo((Object) VerySpecialDoc.class); } - /** - * @see DATAMONGO-709 - */ - @Test + @Test // DATAMONGO-709 public void aQueryWithNoRestrictedResultTypesShouldReturnAllInstancesWithinTheGivenCollection() { BaseDoc doc0 = new BaseDoc(); @@ -2045,17 +2056,14 @@ public void aQueryWithNoRestrictedResultTypesShouldReturnAllInstancesWithinTheGi Query query = Query.query(where("value").is("foo")); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(3)); - assertThat(result.get(0).getClass(), is((Object) BaseDoc.class)); - assertThat(result.get(1).getClass(), is((Object) SpecialDoc.class)); - assertThat(result.get(2).getClass(), is((Object) VerySpecialDoc.class)); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(3); + assertThat(result.get(0).getClass()).isEqualTo((Object) BaseDoc.class); + assertThat(result.get(1).getClass()).isEqualTo((Object) SpecialDoc.class); + assertThat(result.get(2).getClass()).isEqualTo((Object) VerySpecialDoc.class); } - /** - * @see DATAMONGO-771 - */ - @Test + @Test // DATAMONGO-771 public void allowInsertWithPlainJsonString() { String id = "4711"; @@ -2065,15 +2073,24 @@ public void allowInsertWithPlainJsonString() { template.insert(json, "sample"); List result = template.findAll(Sample.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).id, is(id)); - assertThat(result.get(0).field, is(value)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).id).isEqualTo(id); + assertThat(result.get(0).field).isEqualTo(value); } - /** - * @see DATAMONGO-816 - */ - @Test + @Test // DATAMONGO-2028 + public void allowInsertOfDbObjectWithMappedTypes() { + + DBObject dbObject = new BasicDBObject("_id", "foo").append("duration", Duration.ofSeconds(100)); + template.insert(dbObject, "sample"); + List result = template.findAll(org.bson.Document.class, "sample"); + + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getString("_id")).isEqualTo("foo"); + assertThat(result.get(0).getString("duration")).isEqualTo("PT1M40S"); + } + + @Test // DATAMONGO-816 public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() { ObjectWithEnumValue o = new ObjectWithEnumValue(); @@ -2086,44 +2103,38 @@ public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() { new DocumentCallbackHandler() { @Override - public void processDocument(DBObject dbObject) throws MongoException, DataAccessException { + public void processDocument(org.bson.Document document) throws MongoException, DataAccessException { - assertThat(dbObject, is(notNullValue())); + assertThat(document).isNotNull(); - ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, dbObject); + ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, document); - assertThat(result.value, is(EnumValue.VALUE2)); + assertThat(result.value).isEqualTo(EnumValue.VALUE2); } }); } - /** - * @see DATAMONGO-811 - */ - @Test + @Test // DATAMONGO-811 public void updateFirstShouldIncreaseVersionForVersionedEntity() { VersionedPerson person = new VersionedPerson(); person.firstname = "Dave"; person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); Query qry = query(where("id").is(person.id)); VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterFirstSave.version, is(0L)); + assertThat(personAfterFirstSave.version).isEqualTo(0L); template.updateFirst(qry, Update.update("lastname", "Bubu"), VersionedPerson.class); VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterUpdateFirst.version, is(1L)); - assertThat(personAfterUpdateFirst.lastname, is("Bubu")); + assertThat(personAfterUpdateFirst.version).isEqualTo(1L); + assertThat(personAfterUpdateFirst.lastname).isEqualTo("Bubu"); } - /** - * @see DATAMONGO-811 - */ - @Test + @Test // DATAMONGO-811 public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() { VersionedPerson person1 = new VersionedPerson(); @@ -2140,17 +2151,14 @@ public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() { for (VersionedPerson p : template.find(q, VersionedPerson.class)) { if ("Metthews".equals(p.lastname)) { - assertThat(p.version, equalTo(Long.valueOf(1))); + assertThat(p.version).isEqualTo(Long.valueOf(1)); } else { - assertThat(p.version, equalTo(Long.valueOf(0))); + assertThat(p.version).isEqualTo(Long.valueOf(0)); } } } - /** - * @see DATAMONGO-811 - */ - @Test + @Test // DATAMONGO-811 public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() { VersionedPerson person1 = new VersionedPerson(); @@ -2166,14 +2174,11 @@ public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() { template.updateMulti(q, Update.update("lastname", "Metthews"), VersionedPerson.class); for (VersionedPerson p : template.find(q, VersionedPerson.class)) { - assertThat(p.version, equalTo(Long.valueOf(1))); + assertThat(p.version).isEqualTo(Long.valueOf(1)); } } - /** - * @see DATAMONGO-686 - */ - @Test + @Test // DATAMONGO-686 public void itShouldBePossibleToReuseAnExistingQuery() { Sample sample = new Sample(); @@ -2185,18 +2190,15 @@ public void itShouldBePossibleToReuseAnExistingQuery() { Query query = new Query(); query.addCriteria(where("_id").in("42", "43")); - assertThat(template.count(query, Sample.class), is(1L)); + assertThat(template.count(query, Sample.class)).isEqualTo(1L); - query.with(new PageRequest(0, 10)); - query.with(new Sort("field")); + query.with(PageRequest.of(0, 10)); + query.with(Sort.by("field")); - assertThat(template.find(query, Sample.class), is(not(empty()))); + assertThat(template.find(query, Sample.class)).isNotEmpty(); } - /** - * @see DATAMONGO-807 - */ - @Test + @Test // DATAMONGO-807 public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() { Document document = new Document(); @@ -2209,19 +2211,16 @@ public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() { template.findAndModify(query, update, Document.class); Document retrieved = template.findOne(query, Document.class); - assertThat(retrieved.model, instanceOf(ModelA.class)); - assertThat(retrieved.model.value(), equalTo("value2")); + assertThat(retrieved.model).isInstanceOf(ModelA.class); + assertThat(retrieved.model.value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentWithNestedCollectionWhenWholeCollectionIsReplaced() { DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2232,33 +2231,30 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW Query query = query(where("id").is(doc.id)); Update update = Update.update("models", Collections.singletonList(entry)); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(2)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(2); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(0).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key2").value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentWithNestedCollectionWhenFirstElementIsReplaced() { DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2269,33 +2265,30 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW Query query = query(where("id").is(doc.id)); Update update = Update.update("models.0", entry); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(2)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(2); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(0).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key2").value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldAddTypeInformationOnDocumentWithNestedCollectionObjectInsertedAtSecondIndex() { DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2304,33 +2297,30 @@ public void findAndModifyShouldAddTypeInformationOnDocumentWithNestedCollectionO Query query = query(where("id").is(doc.id)); Update update = Update.update("models.1", Collections.singletonMap("key2", new ModelA("value2"))); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(1)); - assertThat(retrieved.models.get(1).entrySet(), hasSize(1)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(1); + assertThat(retrieved.models.get(1).entrySet()).hasSize(1); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(1).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(1).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(1).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(1).get("key2").value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingPositionedElement() throws Exception { - List models = new ArrayList(); + List models = new ArrayList<>(); models.add(new ModelA("value1")); DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection( @@ -2341,26 +2331,23 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnEmbeddedD Query query = query(where("id").is(doc.id)); Update update = Update.update("embeddedDocument.models.0", new ModelA("value2")); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(1)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(1); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingSecondElement() throws Exception { - List models = new ArrayList(); + List models = new ArrayList<>(); models.add(new ModelA("value1")); DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection( @@ -2371,23 +2358,20 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocu Query query = query(where("id").is(doc.id)); Update update = Update.update("embeddedDocument.models.1", new ModelA("value2")); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(2)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value1")); - assertThat(retrieved.embeddedDocument.models.get(1).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(2); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value1"); + assertThat(retrieved.embeddedDocument.models.get(1).value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenRewriting() throws Exception { @@ -2402,27 +2386,24 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocu Update update = Update.update("embeddedDocument", new DocumentWithCollection(Arrays. asList(new ModelA("value2")))); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(1)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(1); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWithNestedLists() { DocumentWithNestedList doc = new DocumentWithNestedList(); - List entry = new ArrayList(); + List entry = new ArrayList<>(); entry.add(new ModelA("value1")); doc.models.add(entry); @@ -2430,7 +2411,7 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWith Query query = query(where("id").is(doc.id)); - assertThat(template.findOne(query, DocumentWithNestedList.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedList.class)).isNotNull(); Update update = Update.update("models.0.1", new ModelA("value2")); @@ -2438,214 +2419,358 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWith DocumentWithNestedList retrieved = template.findOne(query, DocumentWithNestedList.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0), hasSize(2)); + assertThat(retrieved.models.get(0)).hasSize(2); - assertThat(retrieved.models.get(0).get(0), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get(0).value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get(0)).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get(0).value()).isEqualTo("value1"); - assertThat(retrieved.models.get(0).get(1), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get(1).value(), equalTo("value2")); + assertThat(retrieved.models.get(0).get(1)).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get(1).value()).isEqualTo("value2"); } - /** - * @see DATAMONGO-407 - */ - @Test - public void updatesShouldRetainTypeInformationEvenForCollections() { + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceDocument() { - List models = Arrays. asList(new ModelA("foo")); + org.bson.Document doc = new org.bson.Document("foo", "bar"); + template.save(doc, "findandreplace"); - DocumentWithCollection doc = new DocumentWithCollection(models); - doc.id = "4711"; - template.insert(doc); + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + org.bson.Document previous = template.findAndReplace(query(where("foo").is("bar")), replacement, + FindAndReplaceOptions.options(), org.bson.Document.class, "findandreplace"); - Query query = new Query(Criteria.where("id").is(doc.id)); - query.addCriteria(where("models.value").is("foo")); - String newModelValue = "bar"; - Update update = Update.update("models.$", new ModelA(newModelValue)); - template.updateFirst(query, update, DocumentWithCollection.class); + assertThat(previous).containsEntry("foo", "bar"); + assertThat(template.findOne(query(where("foo").is("baz")), org.bson.Document.class, "findandreplace")).isNotNull(); + } - Query findQuery = new Query(Criteria.where("id").is(doc.id)); - DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class); + @Test // DATAMONGO-1827 + @MongoVersion(asOf = "3.6") + public void findAndReplaceShouldErrorOnIdPresent() { - assertThat(result, is(notNullValue())); - assertThat(result.id, is(doc.id)); - assertThat(result.models, is(notNullValue())); - assertThat(result.models, hasSize(1)); - assertThat(result.models.get(0).value(), is(newModelValue)); - } + template.save(new MyPerson("Walter")); - /** - * @see DATAMONGO-812 - */ - @Test - public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithComplexTypes() { + MyPerson replacement = new MyPerson("Heisenberg"); + replacement.id = "invalid-id"; - assumeThat(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_FOUR), is(true)); + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.findAndReplace(query(where("name").is("Walter")), replacement)); + } - DocumentWithCollection document = new DocumentWithCollection(Collections. emptyList()); - template.save(document); - Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollection.class).models, hasSize(1)); + @Test // DATAMONGO-1827 + public void findAndReplaceShouldErrorOnSkip() { - Update update = new Update().push("models").each(new ModelA("model-b"), new ModelA("model-c")); - template.updateMulti(query, update, DocumentWithCollection.class); + assertThatIllegalArgumentException().isThrownBy( + () -> template.findAndReplace(query(where("name").is("Walter")).skip(10), new MyPerson("Heisenberg"))); + } - assertThat(template.findOne(query, DocumentWithCollection.class).models, hasSize(3)); + @Test // DATAMONGO-1827 + public void findAndReplaceShouldErrorOnLimit() { + + assertThatIllegalArgumentException().isThrownBy( + () -> template.findAndReplace(query(where("name").is("Walter")).limit(10), new MyPerson("Heisenberg"))); } - /** - * @see DATAMONGO-812 - */ - @Test - public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithSimpleTypes() { + @Test // DATAMONGO-1827 + public void findAndReplaceShouldConsiderSortAndUpdateFirstIfMultipleFound() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_FOUR), is(true)); + MyPerson walter1 = new MyPerson("Walter 1"); + MyPerson walter2 = new MyPerson("Walter 2"); - DocumentWithCollectionOfSimpleType document = new DocumentWithCollectionOfSimpleType(); - document.values = Arrays.asList("spring"); - template.save(document); + template.save(walter1); + template.save(walter2); - Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(1)); + MyPerson replacement = new MyPerson("Heisenberg"); - Update update = new Update().push("values").each("data", "mongodb"); - template.updateMulti(query, update, DocumentWithCollectionOfSimpleType.class); + template.findAndReplace(query(where("name").regex("Walter.*")).with(Sort.by(Direction.DESC, "name")), replacement); - assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3)); + assertThat(template.findAll(MyPerson.class)).hasSize(2).contains(walter1).doesNotContain(walter2); } - /** - * @see DATAMONOGO-828 - */ - @Test - public void updateFirstShouldDoNothingWhenCalledForEntitiesThatDoNotExist() { + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceObject() { - Query q = query(where("id").is(Long.MIN_VALUE)); + MyPerson person = new MyPerson("Walter"); + template.save(person); - template.updateFirst(q, Update.update("lastname", "supercalifragilisticexpialidocious"), VersionedPerson.class); - assertThat(template.findOne(q, VersionedPerson.class), nullValue()); + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")); + + assertThat(previous.getName()).isEqualTo("Walter"); + assertThat(template.findOne(query(where("id").is(person.id)), MyPerson.class)).hasFieldOrPropertyWithValue("name", + "Heisenberg"); } - /** - * @see DATAMONGO-354 - */ - @Test - public void testUpdateShouldAllowMultiplePushAll() { + @Test // DATAMONGO-1827 + public void findAndReplaceShouldConsiderFields() { - DocumentWithMultipleCollections doc = new DocumentWithMultipleCollections(); - doc.id = "1234"; - doc.string1 = Arrays.asList("spring"); - doc.string2 = Arrays.asList("one"); + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); - template.save(doc); + Query query = query(where("name").is("Walter")); + query.fields().include("address"); - Update update = new Update().pushAll("string1", new Object[] { "data", "mongodb" }); - update.pushAll("string2", new String[] { "two", "three" }); + MyPerson previous = template.findAndReplace(query, new MyPerson("Heisenberg")); - Query findQuery = new Query(Criteria.where("id").is(doc.id)); - template.updateFirst(findQuery, update, DocumentWithMultipleCollections.class); + assertThat(previous.getName()).isNull(); + assertThat(previous.getAddress()).isEqualTo(person.address); + } - DocumentWithMultipleCollections result = template.findOne(findQuery, DocumentWithMultipleCollections.class); - assertThat(result.string1, hasItems("spring", "data", "mongodb")); - assertThat(result.string2, hasItems("one", "two", "three")); + @Test // DATAMONGO-1827 + public void findAndReplaceNonExistingWithUpsertFalse() { + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")); + + assertThat(previous).isNull(); + assertThat(template.findAll(MyPerson.class)).isEmpty(); } - /** - * @see DATAMONGO-404 - */ - @Test - public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollection() { + @Test // DATAMONGO-1827 + public void findAndReplaceNonExistingWithUpsertTrue() { - Sample sample1 = new Sample("1", "A"); - Sample sample2 = new Sample("2", "B"); - template.save(sample1); - template.save(sample2); + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().upsert()); - DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection(); - doc.id = "1"; - doc.dbRefAnnotatedList = Arrays.asList( // - sample1, // - sample2 // - ); - template.save(doc); + assertThat(previous).isNull(); + assertThat(template.findAll(MyPerson.class)).hasSize(1); + } - Update update = new Update().pull("dbRefAnnotatedList", doc.dbRefAnnotatedList.get(1)); + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceObjectReturingNew() { - Query qry = query(where("id").is("1")); - template.updateFirst(qry, update, DocumentWithDBRefCollection.class); + MyPerson person = new MyPerson("Walter"); + template.save(person); - DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); + MyPerson updated = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().returnNew()); - assertThat(result, is(notNullValue())); - assertThat(result.dbRefAnnotatedList, hasSize(1)); - assertThat(result.dbRefAnnotatedList.get(0), is(notNullValue())); - assertThat(result.dbRefAnnotatedList.get(0).id, is((Object) "1")); + assertThat(updated.getName()).isEqualTo("Heisenberg"); } - /** - * @see DATAMONGO-404 - */ - @Test - public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollectionWhenGivenAnIdValueOfComponentTypeEntity() { + @Test // DATAMONGO-1827 + public void findAndReplaceShouldProjectReturnedObjectCorrectly() { - Sample sample1 = new Sample("1", "A"); - Sample sample2 = new Sample("2", "B"); - template.save(sample1); - template.save(sample2); + template.save(new MyPerson("Walter")); - DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection(); - doc.id = "1"; - doc.dbRefAnnotatedList = Arrays.asList( // - sample1, // - sample2 // - ); - template.save(doc); + MyPersonProjection projection = template.findAndReplace(query(where("name").is("Walter")), + new MyPerson("Heisenberg"), FindAndReplaceOptions.empty(), MyPerson.class, MyPersonProjection.class); - Update update = new Update().pull("dbRefAnnotatedList.id", "2"); + assertThat(projection.getName()).isEqualTo("Walter"); + } - Query qry = query(where("id").is("1")); - template.updateFirst(qry, update, DocumentWithDBRefCollection.class); + @Test // GH-4707 + public void findAndReplaceUpsertsObjectWithComplexId() { - DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); + MyId id = new MyId(); + id.id = Instant.now().minusSeconds(2); + id.first = "foo"; + id.second = "bar"; + id.time = Instant.now().minusSeconds(3); + + TypeWithMyId replacement = new TypeWithMyId(); + replacement.value = "spring"; - assertThat(result, is(notNullValue())); - assertThat(result.dbRefAnnotatedList, hasSize(1)); - assertThat(result.dbRefAnnotatedList.get(0), is(notNullValue())); - assertThat(result.dbRefAnnotatedList.get(0).id, is((Object) "1")); + template.findAndReplace(query(where("id").is(id)), replacement, FindAndReplaceOptions.options().upsert()); + template.doInCollection(TypeWithMyId.class, collection -> { + + org.bson.Document dbValue = collection.find(new org.bson.Document("_id.first", "foo")).first(); + + assertThat(dbValue).isNotNull(); + assertThat(dbValue.getEmbedded(List.of("_id", "_id"), Object.class)).isInstanceOf(Date.class); + assertThat(dbValue.getEmbedded(List.of("_id", "t"), Object.class)).isInstanceOf(Date.class); + }); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // GH-4609 + public void shouldReadNestedProjection() { + + MyPerson walter = new MyPerson("Walter"); + walter.address = new Address("spring", "data"); + template.save(walter); + + PersonPWA result = template.query(MyPerson.class) + .as(PersonPWA.class) + .matching(where("id").is(walter.id)) + .firstValue(); + + assertThat(result.getAddress().getCity()).isEqualTo("data"); + } + + interface PersonPWA { + String getName(); + AdressProjection getAddress(); + } + + interface AdressProjection { + String getCity(); + } + + @Test // GH-4300 + public void findAndReplaceShouldAllowNativeDomainTypesAndReturnAProjection() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); + + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), + new org.bson.Document("name", "Heisenberg"), FindAndReplaceOptions.options(), org.bson.Document.class, + "myPerson", MyPerson.class); + + assertThat(previous).isNotNull(); + assertThat(previous.getAddress()).isEqualTo(person.address); + + org.bson.Document loaded = template.execute(MyPerson.class, collection -> { + return collection.find(new org.bson.Document("name", "Heisenberg")).first(); + }); + assertThat(loaded.get("_id")).isEqualTo(new ObjectId(person.id)); + } + + @Test // DATAMONGO-407 + public void updatesShouldRetainTypeInformationEvenForCollections() { + + List models = Arrays. asList(new ModelA("foo")); + + DocumentWithCollection doc = new DocumentWithCollection(models); + doc.id = "4711"; + template.insert(doc); + + Query query = new Query(Criteria.where("id").is(doc.id)); + query.addCriteria(where("models.value").is("foo")); + String newModelValue = "bar"; + Update update = Update.update("models.$", new ModelA(newModelValue)); + template.updateFirst(query, update, DocumentWithCollection.class); + + Query findQuery = new Query(Criteria.where("id").is(doc.id)); + DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class); + + assertThat(result).isNotNull(); + assertThat(result.id).isEqualTo(doc.id); + assertThat(result.models).isNotNull(); + assertThat(result.models).hasSize(1); + assertThat(result.models.get(0).value()).isEqualTo(newModelValue); + } + + @Test // DATAMONGO-812 + @MongoVersion(asOf = "2.4") + public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithComplexTypes() { + + DocumentWithCollection document = new DocumentWithCollection(Collections. emptyList()); + template.save(document); + Query query = query(where("id").is(document.id)); + assertThat(template.findOne(query, DocumentWithCollection.class).models).isEmpty(); + + Update update = new Update().push("models").each(new ModelA("model-b"), new ModelA("model-c")); + template.updateMulti(query, update, DocumentWithCollection.class); + + assertThat(template.findOne(query, DocumentWithCollection.class).models).hasSize(2); + } + + @Test // DATAMONGO-812 + @MongoVersion(asOf = "2.4") + public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithSimpleTypes() { + + DocumentWithCollectionOfSimpleType document = new DocumentWithCollectionOfSimpleType(); + document.values = Arrays.asList("spring"); + template.save(document); + + Query query = query(where("id").is(document.id)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(1); + + Update update = new Update().push("values").each("data", "mongodb"); + template.updateMulti(query, update, DocumentWithCollectionOfSimpleType.class); + + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(3); + } + + @Test // DATAMONOGO-828 + public void updateFirstShouldDoNothingWhenCalledForEntitiesThatDoNotExist() { + + Query q = query(where("id").is(Long.MIN_VALUE)); + + template.updateFirst(q, Update.update("lastname", "supercalifragilisticexpialidocious"), VersionedPerson.class); + assertThat(template.findOne(q, VersionedPerson.class)).isNull(); + } + + @Test // DATAMONGO-404 + public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollection() { + + Sample sample1 = new Sample("1", "A"); + Sample sample2 = new Sample("2", "B"); + template.save(sample1); + template.save(sample2); + + DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection(); + doc.id = "1"; + doc.dbRefAnnotatedList = Arrays.asList( // + sample1, // + sample2 // + ); + template.save(doc); + + Update update = new Update().pull("dbRefAnnotatedList", doc.dbRefAnnotatedList.get(1)); + + Query qry = query(where("id").is("1")); + template.updateFirst(qry, update, DocumentWithDBRefCollection.class); + + DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); + + assertThat(result).isNotNull(); + assertThat(result.dbRefAnnotatedList).hasSize(1); + assertThat(result.dbRefAnnotatedList.get(0)).isNotNull(); + assertThat(result.dbRefAnnotatedList.get(0).id).isEqualTo((Object) "1"); + } + + @Test // DATAMONGO-404 + public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollectionWhenGivenAnIdValueOfComponentTypeEntity() { + + Sample sample1 = new Sample("1", "A"); + Sample sample2 = new Sample("2", "B"); + template.save(sample1); + template.save(sample2); + + DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection(); + doc.id = "1"; + doc.dbRefAnnotatedList = Arrays.asList( // + sample1, // + sample2 // + ); + template.save(doc); + + Update update = new Update().pull("dbRefAnnotatedList.id", "2"); + + Query qry = query(where("id").is("1")); + template.updateFirst(qry, update, DocumentWithDBRefCollection.class); + + DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); + + assertThat(result).isNotNull(); + assertThat(result.dbRefAnnotatedList).hasSize(1); + assertThat(result.dbRefAnnotatedList.get(0)).isNotNull(); + assertThat(result.dbRefAnnotatedList.get(0).id).isEqualTo((Object) "1"); + } + + @Test // DATAMONGO-852 public void updateShouldNotBumpVersionNumberIfVersionPropertyIncludedInUpdate() { VersionedPerson person = new VersionedPerson(); person.firstname = "Dave"; person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); Query qry = query(where("id").is(person.id)); VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterFirstSave.version, is(0L)); + assertThat(personAfterFirstSave.version).isEqualTo(0L); template.updateFirst(qry, Update.update("lastname", "Bubu").set("version", 100L), VersionedPerson.class); VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterUpdateFirst.version, is(100L)); - assertThat(personAfterUpdateFirst.lastname, is("Bubu")); + assertThat(personAfterUpdateFirst.version).isEqualTo(100L); + assertThat(personAfterUpdateFirst.lastname).isEqualTo("Bubu"); } - /** - * @see DATAMONGO-468 - */ - @Test + @Test // DATAMONGO-468 public void shouldBeAbleToUpdateDbRefPropertyWithDomainObject() { Sample sample1 = new Sample("1", "A"); @@ -2665,35 +2790,29 @@ public void shouldBeAbleToUpdateDbRefPropertyWithDomainObject() { DocumentWithDBRefCollection updatedDoc = template.findOne(qry, DocumentWithDBRefCollection.class); - assertThat(updatedDoc, is(notNullValue())); - assertThat(updatedDoc.dbRefProperty, is(notNullValue())); - assertThat(updatedDoc.dbRefProperty.id, is(sample2.id)); - assertThat(updatedDoc.dbRefProperty.field, is(sample2.field)); + assertThat(updatedDoc).isNotNull(); + assertThat(updatedDoc.dbRefProperty).isNotNull(); + assertThat(updatedDoc.dbRefProperty.id).isEqualTo(sample2.id); + assertThat(updatedDoc.dbRefProperty.field).isEqualTo(sample2.field); } - /** - * @see DATAMONGO-862 - */ - @Test + @Test // DATAMONGO-862 public void testUpdateShouldWorkForPathsOnInterfaceMethods() { - DocumentWithCollection document = new DocumentWithCollection(Arrays. asList(new ModelA("spring"), - new ModelA("data"))); + DocumentWithCollection document = new DocumentWithCollection( + Arrays. asList(new ModelA("spring"), new ModelA("data"))); template.save(document); - Query query = query(where("id").is(document.id).and("models._id").exists(true)); + Query query = query(where("id").is(document.id).and("models.value").exists(true)); Update update = new Update().set("models.$.value", "mongodb"); template.findAndModify(query, update, DocumentWithCollection.class); DocumentWithCollection result = template.findOne(query(where("id").is(document.id)), DocumentWithCollection.class); - assertThat(result.models.get(0).value(), is("mongodb")); + assertThat(result.models.get(0).value()).isEqualTo("mongodb"); } - /** - * @see DATAMONGO-773 - */ - @Test + @Test // DATAMONGO-773 public void testShouldSupportQueryWithIncludedDbRefField() { Sample sample = new Sample("47111", "foo"); @@ -2710,17 +2829,14 @@ public void testShouldSupportQueryWithIncludedDbRefField() { List result = template.find(qry, DocumentWithDBRefCollection.class); - assertThat(result, is(notNullValue())); - assertThat(result, hasSize(1)); - assertThat(result.get(0), is(notNullValue())); - assertThat(result.get(0).dbRefProperty, is(notNullValue())); - assertThat(result.get(0).dbRefProperty.field, is(sample.field)); + assertThat(result).isNotNull(); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNotNull(); + assertThat(result.get(0).dbRefProperty).isNotNull(); + assertThat(result.get(0).dbRefProperty.field).isEqualTo(sample.field); } - /** - * @see DATAMONGO-566 - */ - @Test + @Test // DATAMONGO-566 public void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { Sample spring = new Sample("100", "spring"); @@ -2731,19 +2847,16 @@ public void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { Query qry = query(where("field").in("spring", "mongodb")); List result = template.findAllAndRemove(qry, Sample.class); - assertThat(result, hasSize(2)); + assertThat(result).hasSize(2); - assertThat( - template.getDb().getCollection("sample") - .find(new BasicDBObject("field", new BasicDBObject("$in", Arrays.asList("spring", "mongodb")))).count(), - is(0)); - assertThat(template.getDb().getCollection("sample").find(new BasicDBObject("field", "data")).count(), is(1)); + assertThat(template.getDb().getCollection("sample").countDocuments( + new org.bson.Document("field", new org.bson.Document("$in", Arrays.asList("spring", "mongodb"))))) + .isEqualTo(0L); + assertThat(template.getDb().getCollection("sample").countDocuments(new org.bson.Document("field", "data"))) + .isEqualTo(1L); } - /** - * @see DATAMONGO-1001 - */ - @Test + @Test // DATAMONGO-1001 public void shouldAllowSavingOfLazyLoadedDbRefs() { template.dropCollection(SomeTemplate.class); @@ -2767,14 +2880,11 @@ public void shouldAllowSavingOfLazyLoadedDbRefs() { loadedContent.setText("data"); template.save(loadedContent); - assertThat(template.findById(content.id, SomeContent.class).getText(), is("data")); + assertThat(template.findById(content.id, SomeContent.class).getText()).isEqualTo("data"); } - /** - * @see DATAMONGO-880 - */ - @Test + @Test // DATAMONGO-880 public void savingAndReassigningLazyLoadingProxies() { template.dropCollection(SomeTemplate.class); @@ -2803,14 +2913,11 @@ public void savingAndReassigningLazyLoadingProxies() { SomeMessage savedMessage = template.findById(message.id, SomeMessage.class); - assertThat(savedMessage.dbrefContent.text, is(content.text)); - assertThat(savedMessage.normalContent.text, is(content.text)); + assertThat(savedMessage.dbrefContent.text).isEqualTo(content.text); + assertThat(savedMessage.normalContent.text).isEqualTo(content.text); } - /** - * @see DATAMONGO-884 - */ - @Test + @Test // DATAMONGO-884 public void callingNonObjectMethodsOnLazyLoadingProxyShouldReturnNullIfUnderlyingDbrefWasDeletedInbetween() { template.dropCollection(SomeTemplate.class); @@ -2831,15 +2938,12 @@ public void callingNonObjectMethodsOnLazyLoadingProxyShouldReturnNullIfUnderlyin template.remove(content); - assertThat(savedTmpl.getContent().toString(), is("someContent:C1$LazyLoadingProxy")); - assertThat(savedTmpl.getContent(), is(instanceOf(LazyLoadingProxy.class))); - assertThat(savedTmpl.getContent().getText(), is(nullValue())); + assertThat(savedTmpl.getContent().toString()).isEqualTo("someContent:C1$LazyLoadingProxy"); + assertThat(savedTmpl.getContent()).isInstanceOf(LazyLoadingProxy.class); + assertThat(savedTmpl.getContent().getText()).isNull(); } - /** - * @see DATAMONGO-471 - */ - @Test + @Test // DATAMONGO-471 public void updateMultiShouldAddValuesCorrectlyWhenUsingAddToSetWithEach() { DocumentWithCollectionOfSimpleType document = new DocumentWithCollectionOfSimpleType(); @@ -2847,18 +2951,15 @@ public void updateMultiShouldAddValuesCorrectlyWhenUsingAddToSetWithEach() { template.save(document); Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(1)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(1); Update update = new Update().addToSet("values").each("data", "mongodb"); template.updateMulti(query, update, DocumentWithCollectionOfSimpleType.class); - assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(3); } - /** - * @see DATAMONGO-1210 - */ - @Test + @Test // DATAMONGO-1210 public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSimpleDocuments() { DocumentWithCollectionOfSamples doc = new DocumentWithCollectionOfSamples(); @@ -2868,7 +2969,7 @@ public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSim Query query = query(where("id").is(doc.id)); - assertThat(template.findOne(query, DocumentWithCollectionOfSamples.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithCollectionOfSamples.class)).isNotNull(); Update update = new Update().addToSet("samples").each(new Sample(null, "sample2"), new Sample(null, "sample1")); @@ -2876,16 +2977,13 @@ public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSim DocumentWithCollectionOfSamples retrieved = template.findOne(query, DocumentWithCollectionOfSamples.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.samples, hasSize(2)); - assertThat(retrieved.samples.get(0).field, is("sample1")); - assertThat(retrieved.samples.get(1).field, is("sample2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.samples).hasSize(2); + assertThat(retrieved.samples.get(0).field).isEqualTo("sample1"); + assertThat(retrieved.samples.get(1).field).isEqualTo("sample2"); } - /** - * @see DATAMONGO-888 - */ - @Test + @Test // DATAMONGO-888 public void sortOnIdFieldPropertyShouldBeMappedCorrectly() { DoucmentWithNamedIdField one = new DoucmentWithNamedIdField(); @@ -2899,14 +2997,11 @@ public void sortOnIdFieldPropertyShouldBeMappedCorrectly() { template.save(one); template.save(two); - Query query = query(where("_id").in("1", "2")).with(new Sort(Direction.DESC, "someIdKey")); - assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one)); + Query query = query(where("_id").in("1", "2")).with(Sort.by(Direction.DESC, "someIdKey")); + assertThat(template.find(query, DoucmentWithNamedIdField.class)).containsExactly(two, one); } - /** - * @see DATAMONGO-888 - */ - @Test + @Test // DATAMONGO-888 public void sortOnAnnotatedFieldPropertyShouldBeMappedCorrectly() { DoucmentWithNamedIdField one = new DoucmentWithNamedIdField(); @@ -2920,14 +3015,11 @@ public void sortOnAnnotatedFieldPropertyShouldBeMappedCorrectly() { template.save(one); template.save(two); - Query query = query(where("_id").in("1", "2")).with(new Sort(Direction.DESC, "value")); - assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one)); + Query query = query(where("_id").in("1", "2")).with(Sort.by(Direction.DESC, "value")); + assertThat(template.find(query, DoucmentWithNamedIdField.class)).containsExactly(two, one); } - /** - * @see DATAMONGO-913 - */ - @Test + @Test // DATAMONGO-913 public void shouldRetrieveInitializedValueFromDbRefAssociationAfterLoad() { SomeContent content = new SomeContent(); @@ -2945,17 +3037,14 @@ public void shouldRetrieveInitializedValueFromDbRefAssociationAfterLoad() { SomeTemplate result = template.findOne(query(where("content").is(tmpl.getContent())), SomeTemplate.class); - assertThat(result, is(notNullValue())); - assertThat(result.getContent(), is(notNullValue())); - assertThat(result.getContent().getId(), is(notNullValue())); - assertThat(result.getContent().getName(), is(notNullValue())); - assertThat(result.getContent().getText(), is(content.getText())); + assertThat(result).isNotNull(); + assertThat(result.getContent()).isNotNull(); + assertThat(result.getContent().getId()).isNotNull(); + assertThat(result.getContent().getName()).isNotNull(); + assertThat(result.getContent().getText()).isEqualTo(content.getText()); } - /** - * @see DATAMONGO-913 - */ - @Test + @Test // DATAMONGO-913 public void shouldReuseExistingDBRefInQueryFromDbRefAssociationAfterLoad() { SomeContent content = new SomeContent(); @@ -2976,30 +3065,24 @@ public void shouldReuseExistingDBRefInQueryFromDbRefAssociationAfterLoad() { // Use lazy-loading-proxy in query result = template.findOne(query(where("content").is(result.getContent())), SomeTemplate.class); - assertNotNull(result.getContent().getName()); - assertThat(result.getContent().getName(), is(content.getName())); + assertThat(result.getContent().getName()).isNotNull(); + assertThat(result.getContent().getName()).isEqualTo(content.getName()); } - /** - * @see DATAMONGO-970 - */ - @Test - public void insertsAndRemovesBasicDbObjectCorrectly() { + @Test // DATAMONGO-970 + public void insertsAndRemovesBasicDocumentCorrectly() { - BasicDBObject object = new BasicDBObject("key", "value"); + org.bson.Document object = new org.bson.Document("key", "value"); template.insert(object, "collection"); - assertThat(object.get("_id"), is(notNullValue())); - assertThat(template.findAll(DBObject.class, "collection"), hasSize(1)); + assertThat(object.get("_id")).isNotNull(); + assertThat(template.findAll(Document.class, "collection")).hasSize(1); template.remove(object, "collection"); - assertThat(template.findAll(DBObject.class, "collection"), hasSize(0)); + assertThat(template.findAll(Document.class, "collection")).hasSize(0); } - /** - * @see DATAMONGO-1207 - */ - @Test + @Test // DATAMONGO-1207 public void ignoresNullElementsForInsertAll() { Address newYork = new Address("NY", "New York"); @@ -3009,14 +3092,38 @@ public void ignoresNullElementsForInsertAll() { List
                    result = template.findAll(Address.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(newYork, washington)); + assertThat(result).hasSize(2); + assertThat(result).contains(newYork, washington); } - /** - * @see DATAMONGO-1208 - */ - @Test + @Test // DATAMONGO-1176 + public void generatesIdForInsertAll() { + + Person walter = new Person(null, "Walter"); + Person jesse = new Person(null, "Jesse"); + + template.insertAll(Arrays.asList(walter, jesse)); + + List result = template.findAll(Person.class); + + assertThat(result).hasSize(2); + assertThat(walter.getId()).isNotNull(); + assertThat(jesse.getId()).isNotNull(); + } + + @Test // GH-4944 + public void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).iterator().next(); + org.bson.Document document = template.execute(RawStringId.class, collection -> collection.find().first()); + + assertThat(returned.id).isEqualTo(document.get("_id")); + } + + @Test // DATAMONGO-1208 public void takesSortIntoAccountWhenStreaming() { Person youngestPerson = new Person("John", 20); @@ -3025,17 +3132,13 @@ public void takesSortIntoAccountWhenStreaming() { template.insertAll(Arrays.asList(oldestPerson, youngestPerson)); Query q = new Query(); - q.with(new Sort(Direction.ASC, "age")); - CloseableIterator stream = template.stream(q, Person.class); + q.with(Sort.by(Direction.ASC, "age")); + List streamResults = template.stream(q, Person.class).map(Person::getAge).toList(); - assertThat(stream.next().getAge(), is(youngestPerson.getAge())); - assertThat(stream.next().getAge(), is(oldestPerson.getAge())); + assertThat(streamResults).containsExactly(youngestPerson.getAge(), oldestPerson.getAge()); } - /** - * @see DATAMONGO-1208 - */ - @Test + @Test // DATAMONGO-1208 public void takesLimitIntoAccountWhenStreaming() { Person youngestPerson = new Person("John", 20); @@ -3044,17 +3147,14 @@ public void takesLimitIntoAccountWhenStreaming() { template.insertAll(Arrays.asList(oldestPerson, youngestPerson)); Query q = new Query(); - q.with(new PageRequest(0, 1, new Sort(Direction.ASC, "age"))); - CloseableIterator stream = template.stream(q, Person.class); + q.with(PageRequest.of(0, 1, Sort.by(Direction.ASC, "age"))); + Iterator stream = template.stream(q, Person.class).iterator(); - assertThat(stream.next().getAge(), is(youngestPerson.getAge())); - assertThat(stream.hasNext(), is(false)); + assertThat(stream.next().getAge()).isEqualTo(youngestPerson.getAge()); + assertThat(stream.hasNext()).isFalse(); } - /** - * @see DATAMONGO-1204 - */ - @Test + @Test // DATAMONGO-1204 public void resolvesCyclicDBRefCorrectly() { SomeMessage message = new SomeMessage(); @@ -3072,15 +3172,12 @@ public void resolvesCyclicDBRefCorrectly() { SomeMessage messageLoaded = template.findOne(query(where("id").is(message.id)), SomeMessage.class); SomeContent contentLoaded = template.findOne(query(where("id").is(content.id)), SomeContent.class); - assertThat(messageLoaded.dbrefContent.id, is(contentLoaded.id)); - assertThat(contentLoaded.dbrefMessage.id, is(messageLoaded.id)); + assertThat(messageLoaded.dbrefContent.id).isEqualTo(contentLoaded.id); + assertThat(contentLoaded.dbrefMessage.id).isEqualTo(messageLoaded.id); } - /** - * @see DATAMONGO-1287 - */ - @Test - public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstrcutorArgument() { + @Test // DATAMONGO-1287, DATAMONGO-2004 + public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstructorArgument() { Document docInCtor = new Document(); docInCtor.id = "doc-in-ctor"; @@ -3093,14 +3190,11 @@ public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstr DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class))); - assertThat(loaded.refToDocNotUsedInCtor, nullValue()); + assertThat(loaded.refToDocUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocNotUsedInCtor).isNull(); } - /** - * @see DATAMONGO-1287 - */ - @Test + @Test // DATAMONGO-1287 public void shouldNotReuseLazyLoadedDBRefWhenTypeUsedInPersistenceConstrcutorButValueRefersToAnotherProperty() { Document docNotUsedInCtor = new Document(); @@ -3115,15 +3209,12 @@ public void shouldNotReuseLazyLoadedDBRefWhenTypeUsedInPersistenceConstrcutorBut DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class)); - assertThat(loaded.refToDocUsedInCtor, nullValue()); + assertThat(loaded.refToDocNotUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocUsedInCtor).isNull(); } - /** - * @see DATAMONGO-1287 - */ - @Test - public void shouldRespectParamterValueWhenAttemptingToReuseLazyLoadedDBRefUsedInPersistenceConstrcutor() { + @Test // DATAMONGO-1287, DATAMONGO-2004 + public void shouldRespectParameterValueWhenAttemptingToReuseLazyLoadedDBRefUsedInPersistenceConstructor() { Document docInCtor = new Document(); docInCtor.id = "doc-in-ctor"; @@ -3141,14 +3232,11 @@ public void shouldRespectParamterValueWhenAttemptingToReuseLazyLoadedDBRefUsedIn DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class))); - assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class)); + assertThat(loaded.refToDocUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocNotUsedInCtor).isInstanceOf(LazyLoadingProxy.class); } - /** - * @see DATAMONGO-1401 - */ - @Test + @Test // DATAMONGO-1401 public void updateShouldWorkForTypesContainingGeoJsonTypes() { WithGeoJson wgj = new WithGeoJson(); @@ -3161,155 +3249,1203 @@ public void updateShouldWorkForTypesContainingGeoJsonTypes() { wgj.description = "datamongo-1401-update"; template.save(wgj); - assertThat(template.findOne(query(where("id").is(wgj.id)), WithGeoJson.class).point, is(equalTo(wgj.point))); + assertThat(template.findOne(query(where("id").is(wgj.id)), WithGeoJson.class).point).isEqualTo(wgj.point); } - static class DoucmentWithNamedIdField { + @Test // DATAMONGO-1404 + public void updatesDateValueCorrectlyWhenUsingMinOperator() { - @Id String someIdKey; + Calendar cal = Calendar.getInstance(Locale.US); + cal.set(2013, 10, 13, 0, 0, 0); - @Field(value = "val") // - String value; + TypeWithDate twd = new TypeWithDate(); + twd.date = new Date(); + template.save(twd); + template.updateFirst(query(where("id").is(twd.id)), new Update().min("date", cal.getTime()), TypeWithDate.class); - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (someIdKey == null ? 0 : someIdKey.hashCode()); - result = prime * result + (value == null ? 0 : value.hashCode()); - return result; - } + TypeWithDate loaded = template.find(query(where("id").is(twd.id)), TypeWithDate.class).get(0); + assertThat(loaded.date).isEqualTo(cal.getTime()); + } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof DoucmentWithNamedIdField)) { - return false; - } - DoucmentWithNamedIdField other = (DoucmentWithNamedIdField) obj; - if (someIdKey == null) { - if (other.someIdKey != null) { - return false; - } - } else if (!someIdKey.equals(other.someIdKey)) { - return false; - } - if (value == null) { - if (other.value != null) { - return false; - } - } else if (!value.equals(other.value)) { - return false; - } - return true; - } + @Test // DATAMONGO-1404 + public void updatesNumericValueCorrectlyWhenUsingMinOperator() { - } + TypeWithNumbers twn = new TypeWithNumbers(); + twn.byteVal = 100; + twn.doubleVal = 200D; + twn.floatVal = 300F; + twn.intVal = 400; + twn.longVal = 500L; - static class DocumentWithDBRefCollection { + // Note that $min operator uses String comparison for BigDecimal/BigInteger comparison according to BSON sort rules. + twn.bigIntegerVal = new BigInteger("600"); + twn.bigDeciamVal = new BigDecimal("700.0"); - @Id public String id; + template.save(twn); - @Field("db_ref_list")/** @see DATAMONGO-1058 */ - @org.springframework.data.mongodb.core.mapping.DBRef// - public List dbRefAnnotatedList; + byte byteVal = 90; + Update update = new Update()// + .min("byteVal", byteVal) // + .min("doubleVal", 190D) // + .min("floatVal", 290F) // + .min("intVal", 390) // + .min("longVal", 490) // + .min("bigIntegerVal", new BigInteger("590")) // + .min("bigDeciamVal", new BigDecimal("690")) // + ; - @org.springframework.data.mongodb.core.mapping.DBRef// - public Sample dbRefProperty; + template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); + + TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); + assertThat(loaded.byteVal).isEqualTo(byteVal); + assertThat(loaded.doubleVal).isEqualTo(190D); + assertThat(loaded.floatVal).isEqualTo(290F); + assertThat(loaded.intVal).isEqualTo(390); + assertThat(loaded.longVal).isEqualTo(490L); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("590")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("690")); } - static class DocumentWithCollection { + @Test // DATAMONGO-1404 + public void updatesDateValueCorrectlyWhenUsingMaxOperator() { - @Id String id; - List models; + Calendar cal = Calendar.getInstance(Locale.US); + cal.set(2013, 10, 13, 0, 0, 0); - DocumentWithCollection(List models) { - this.models = models; - } - } + TypeWithDate twd = new TypeWithDate(); + twd.date = cal.getTime(); + template.save(twd); - static class DocumentWithCollectionOfSimpleType { + cal.set(2019, 10, 13, 0, 0, 0); + template.updateFirst(query(where("id").is(twd.id)), new Update().max("date", cal.getTime()), TypeWithDate.class); - @Id String id; - List values; + TypeWithDate loaded = template.find(query(where("id").is(twd.id)), TypeWithDate.class).get(0); + assertThat(loaded.date).isEqualTo(cal.getTime()); } - static class DocumentWithCollectionOfSamples { - @Id String id; - List samples; - } + @Test // DATAMONGO-1404 + public void updatesNumericValueCorrectlyWhenUsingMaxOperator() { - static class DocumentWithMultipleCollections { - @Id String id; - List string1; - List string2; - } + TypeWithNumbers twn = new TypeWithNumbers(); + twn.byteVal = 100; + twn.doubleVal = 200D; + twn.floatVal = 300F; + twn.intVal = 400; + twn.longVal = 500L; - static class DocumentWithNestedCollection { - @Id String id; - List> models = new ArrayList>(); - } + // Note that $max operator uses String comparison for BigDecimal/BigInteger comparison according to BSON sort rules. + twn.bigIntegerVal = new BigInteger("600"); + twn.bigDeciamVal = new BigDecimal("700.0"); - static class DocumentWithNestedList { - @Id String id; - List> models = new ArrayList>(); - } + template.save(twn); - static class DocumentWithEmbeddedDocumentWithCollection { - @Id String id; - DocumentWithCollection embeddedDocument; + byte byteVal = 101; + Update update = new Update()// + .max("byteVal", byteVal) // + .max("doubleVal", 290D) // + .max("floatVal", 390F) // + .max("intVal", 490) // + .max("longVal", 590) // + .max("bigIntegerVal", new BigInteger("690")) // + .max("bigDeciamVal", new BigDecimal("790")) // + ; - DocumentWithEmbeddedDocumentWithCollection(DocumentWithCollection embeddedDocument) { - this.embeddedDocument = embeddedDocument; - } + template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); + + TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); + assertThat(loaded.byteVal).isEqualTo(byteVal); + assertThat(loaded.doubleVal).isEqualTo(290D); + assertThat(loaded.floatVal).isEqualTo(390F); + assertThat(loaded.intVal).isEqualTo(490); + assertThat(loaded.longVal).isEqualTo(590L); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("690")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("790")); } - static interface Model { - String value(); + @Test // DATAMONGO-1404 + public void updatesBigNumberValueUsingStringComparisonWhenUsingMaxOperator() { - String id(); - } + TypeWithNumbers twn = new TypeWithNumbers(); - static class ModelA implements Model { + // Note that $max operator uses String comparison for BigDecimal/BigInteger comparison according to BSON sort rules. + // Therefore "80" is considered greater than "700" + twn.bigIntegerVal = new BigInteger("600"); + twn.bigDeciamVal = new BigDecimal("700.0"); - @Id String id; - private String value; + template.save(twn); - ModelA(String value) { - this.value = value; - } + Update update = new Update()// + .max("bigIntegerVal", new BigInteger("70")) // + .max("bigDeciamVal", new BigDecimal("80")) // + ; - @Override - public String value() { - return this.value; - } + template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); - @Override - public String id() { - return id; - } + TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("70")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("80")); } - static class Document { + @Test // DATAMONGO-1404 + public void updatesBigNumberValueUsingStringComparisonWhenUsingMinOperator() { - @Id public String id; - public Model model; - } + TypeWithNumbers twn = new TypeWithNumbers(); - static class MyId { + // Note that $max operator uses String comparison for BigDecimal/BigInteger comparison according to BSON sort rules. + // Therefore "80" is considered greater than "700" + twn.bigIntegerVal = new BigInteger("80"); + twn.bigDeciamVal = new BigDecimal("90.0"); - String first; + template.save(twn); + + Update update = new Update()// + .min("bigIntegerVal", new BigInteger("700")) // + .min("bigDeciamVal", new BigDecimal("800")) // + ; + + template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); + + TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("700")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("800")); + } + + @Test // DATAMONGO-1431, DATAMONGO-2323 + public void streamExecutionUsesExplicitCollectionName() { + + template.remove(new Query(), "some_special_collection"); + template.remove(new Query(), Document.class); + + Document document = new Document(); + + template.insert(document, "some_special_collection"); + + Stream stream = template.stream(new Query(), Document.class); + assertThat(stream).isEmpty(); + + Iterator stream2 = template + .stream(new Query(where("_id").is(document.id)), org.bson.Document.class, "some_special_collection").iterator(); + + assertThat(stream2.hasNext()).isTrue(); + assertThat(stream2.next().get("_id")).isEqualTo(new ObjectId(document.id)); + assertThat(stream2.hasNext()).isFalse(); + } + + @Test // DATAMONGO-1194 + public void shouldFetchListOfReferencesCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + DocumentWithDBRefCollection source = new DocumentWithDBRefCollection(); + source.dbRefAnnotatedList = Arrays.asList(two, one); + + template.save(source); + + assertThat(template.findOne(query(where("id").is(source.id)), DocumentWithDBRefCollection.class)).isEqualTo(source); + } + + @Test // DATAMONGO-1194 + public void shouldFetchListOfLazyReferencesCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + DocumentWithDBRefCollection source = new DocumentWithDBRefCollection(); + source.lazyDbRefAnnotatedList = Arrays.asList(two, one); + + template.save(source); + + DocumentWithDBRefCollection target = template.findOne(query(where("id").is(source.id)), + DocumentWithDBRefCollection.class); + + assertThat(target.lazyDbRefAnnotatedList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyDbRefAnnotatedList()).containsExactly(two, one); + } + + @Test // DATAMONGO-1194 + public void shouldFetchMapOfLazyReferencesCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + DocumentWithDBRefCollection source = new DocumentWithDBRefCollection(); + source.lazyDbRefAnnotatedMap = new LinkedHashMap<>(); + source.lazyDbRefAnnotatedMap.put("tyrion", two); + source.lazyDbRefAnnotatedMap.put("jon", one); + template.save(source); + + DocumentWithDBRefCollection target = template.findOne(query(where("id").is(source.id)), + DocumentWithDBRefCollection.class); + + assertThat(target.lazyDbRefAnnotatedMap).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefAnnotatedMap.values()).containsExactly(two, one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchLazyReferenceWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + + template.save(one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, one, + null, null); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefProperty).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefProperty).isEqualTo(one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchMapOfLazyReferencesWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + Map map = new LinkedHashMap<>(); + map.put("tyrion", two); + map.put("jon", one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, null, + null, map); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefAnnotatedMap).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefAnnotatedMap.values()).containsExactly(two, one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchListOfLazyReferencesWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + List list = Arrays.asList(two, one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, null, + list, null); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefAnnotatedList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyDbRefAnnotatedList()).containsExactly(two, one); + } + + @Test // DATAMONGO-1513 + @DirtiesContext + public void populatesIdsAddedByEventListener() { + + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onBeforeSave(BeforeSaveEvent event) { + event.getDocument().put("_id", UUID.randomUUID().toString()); + } + }); + + Document document = new Document(); + + template.insertAll(Collections.singletonList(document)); + + assertThat(document.id).isNotNull(); + } + + @Test // DATAMONGO-2189 + @DirtiesContext + public void afterSaveEventContainsSavedObjectUsingInsertAll() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insertAll(Collections.singletonList(source)); + + assertThat(saved.get()).isNotNull(); + assertThat(saved.get()).isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + + } + + @Test // DATAMONGO-2189 + @DirtiesContext + public void afterSaveEventContainsSavedObjectUsingInsert() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insert(source); + + assertThat(saved.get()).isNotNull(); + assertThat(saved.get()).isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + } + + @Test // DATAMONGO-1509 + public void findsByGenericNestedListElements() { + + List modelList = Collections.singletonList(new ModelA("value")); + DocumentWithCollection dwc = new DocumentWithCollection(modelList); + + template.insert(dwc); + + Query query = query(where("models").is(modelList)); + assertThat(template.findOne(query, DocumentWithCollection.class)).isEqualTo(dwc); + } + + @Test // DATAMONGO-1517 + @MongoVersion(asOf = "3.4") + public void decimal128TypeShouldBeSavedAndLoadedCorrectly() + throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { + + Class decimal128Type = ClassUtils.resolveClassName("org.bson.types.Decimal128", null); + + WithObjectTypeProperty source = new WithObjectTypeProperty(); + source.id = "decimal128-property-value"; + source.value = decimal128Type.getConstructor(BigDecimal.class).newInstance(new BigDecimal(100)); + + template.save(source); + + WithObjectTypeProperty loaded = template.findOne(query(where("id").is(source.id)), WithObjectTypeProperty.class); + assertThat(loaded.getValue()).isInstanceOf(decimal128Type); + } + + @Test // DATAMONGO-1718 + public void findAndRemoveAllWithoutExplicitDomainTypeShouldRemoveAndReturnEntitiesCorrectly() { + + Sample jon = new Sample("1", "jon snow"); + Sample bran = new Sample("2", "bran stark"); + Sample rickon = new Sample("3", "rickon stark"); + + template.save(jon); + template.save(bran); + template.save(rickon); + + List result = template.findAllAndRemove(query(where("field").regex(".*stark$")), + template.getCollectionName(Sample.class)); + + assertThat(result).hasSize(2); + assertThat(result).contains(bran, rickon); + assertThat(template.count(new BasicQuery("{}"), template.getCollectionName(Sample.class))).isEqualTo(1L); + } + + @Test // DATAMONGO-1779 + public void appliesQueryLimitToEmptyQuery() { + + Sample first = new Sample("1", "Dave Matthews"); + Sample second = new Sample("2", "Carter Beauford"); + + template.insertAll(Arrays.asList(first, second)); + + assertThat(template.find(new Query().limit(1), Sample.class)).hasSize(1); + } + + @Test // DATAMONGO-1870 + public void removeShouldConsiderLimit() { + + List samples = IntStream.range(0, 100) // + .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // + .collect(Collectors.toList()); + + template.bulkOps(BulkMode.UNORDERED, Sample.class).insert(samples).execute(); + + DeleteResult wr = template.remove(query(where("field").is("lannister")).limit(25), Sample.class); + + assertThat(wr.getDeletedCount()).isEqualTo(25L); + assertThat(template.count(new Query(), Sample.class)).isEqualTo(75L); + } + + @Test // DATAMONGO-1870 + public void removeShouldConsiderSkipAndSort() { + + List samples = IntStream.range(0, 100) // + .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // + .collect(Collectors.toList()); + + template.bulkOps(BulkMode.UNORDERED, Sample.class).insert(samples).execute(); + + DeleteResult wr = template.remove(new Query().skip(25).with(Sort.by("field")), Sample.class); + + assertThat(wr.getDeletedCount()).isEqualTo(75L); + assertThat(template.count(new Query(), Sample.class)).isEqualTo(25L); + assertThat(template.count(query(where("field").is("lannister")), Sample.class)).isEqualTo(25L); + assertThat(template.count(query(where("field").is("stark")), Sample.class)).isEqualTo(0L); + } + + @Test // DATAMONGO-1988 + public void findByNestedDocumentWithStringIdMappingToObjectIdMatchesDocumentsCorrectly() { + + DocumentWithNestedTypeHavingStringIdProperty source = new DocumentWithNestedTypeHavingStringIdProperty(); + source.id = "id-1"; + source.sample = new Sample(); + source.sample.id = new ObjectId().toHexString(); + + template.save(source); + + DocumentWithNestedTypeHavingStringIdProperty target = template + .query(DocumentWithNestedTypeHavingStringIdProperty.class) + .matching(query(where("sample.id").is(source.sample.id))).firstValue(); + + assertThat(target).isEqualTo(source); + } + + @Test // DATAMONGO-1992 + public void writesAuditingMetadataForImmutableTypes() { + + ImmutableAudited source = new ImmutableAudited(null, null); + ImmutableAudited result = template.save(source); + + assertThat(result).isNotSameAs(source).describedAs("Expected a different instances to be returned"); + assertThat(result.modified).isNotNull().describedAs("Auditing field must not be null"); + + ImmutableAudited read = template.findOne(query(where("id").is(result.getId())), ImmutableAudited.class); + + assertThat(read.modified).isEqualTo(result.modified.truncatedTo(ChronoUnit.MILLIS)) + .describedAs("Expected auditing information to be read"); + } + + @Test // DATAMONGO-1798 + public void saveAndLoadStringThatIsAnObjectIdAsString() { + + RawStringId source = new RawStringId(); + source.id = new ObjectId().toHexString(); + source.value = "new value"; + + template.save(source); + + org.bson.Document result = template + .execute(db -> (org.bson.Document) db.getCollection(template.getCollectionName(RawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo(source.id); + + RawStringId target = template.findOne(query(where("id").is(source.id)), RawStringId.class); + assertThat(target).isEqualTo(source); + } + + @Test // GH-4184 + void insertHonorsExistingRawId() { + + RawStringId source = new RawStringId(); + source.id = "abc"; + source.value = "new value"; + + template.insert(source); + + org.bson.Document result = template + .execute(db -> db.getCollection(template.getCollectionName(RawStringId.class)).find().limit(1).cursor().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo("abc"); + + RawStringId target = template.findOne(query(where("id").is(source.id)), RawStringId.class); + assertThat(target).isEqualTo(source); + } + + @Test // GH-4026 + void saveShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.save(source); + + template.execute(RawStringId.class, collection -> { + + org.bson.Document first = collection.find(new org.bson.Document()).first(); + assertThat(first.get("_id")).isInstanceOf(String.class); + return null; + }); + } + + @Test // GH-4026 + void insertShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.insert(source); + + template.execute(RawStringId.class, collection -> { + + org.bson.Document first = collection.find(new org.bson.Document()).first(); + assertThat(first.get("_id")).isInstanceOf(String.class); + return null; + }); + } + + @Test // DATAMONGO-2193 + public void shouldNotConvertStringToObjectIdForNonIdField() { + + ObjectId outerId = new ObjectId(); + String innerId = new ObjectId().toHexString(); + + org.bson.Document source = new org.bson.Document() // + .append("_id", outerId) // + .append("inner", new org.bson.Document("id", innerId).append("value", "boooh")); + + template.getDb().getCollection(template.getCollectionName(Outer.class)).insertOne(source); + + Outer target = template.findOne(query(where("inner.id").is(innerId)), Outer.class); + assertThat(target).isNotNull(); + assertThat(target.id).isEqualTo(outerId); + assertThat(target.inner.id).isEqualTo(innerId); + } + + @Test // DATAMONGO-2294 + public void shouldProjectWithCollections() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); + + Query queryByChainedInclude = query(where("name").is("Walter")); + queryByChainedInclude.fields().include("id").include("name"); + + Query queryByCollectionInclude = query(where("name").is("Walter")); + queryByCollectionInclude.fields().include("id", "name"); + + MyPerson first = template.findAndReplace(queryByChainedInclude, new MyPerson("Walter")); + MyPerson second = template.findAndReplace(queryByCollectionInclude, new MyPerson("Walter")); + + assertThat(first).isEqualTo(second); + assertThat(first.address).isNull(); + assertThat(second.address).isNull(); + } + + @Test // DATAMONGO-2451 + public void sortOnIdFieldWithExplicitTypeShouldWork() { + + template.dropCollection(WithIdAndFieldAnnotation.class); + + WithIdAndFieldAnnotation f = new WithIdAndFieldAnnotation(); + f.id = new ObjectId().toHexString(); + f.value = "value"; + + template.save(f); + + assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty(); + } + + @Test // GH-3407 + void shouldWriteSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findById(doc.id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldUpdateSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + subdoc.nickname = "Heisenberg"; + doc.subdocument = subdoc; + + template.save(doc); + + String id = doc.id; + + doc.id = null; + subdoc.nickname = null; + template.update(WithSubdocument.class).replaceWith(doc).findAndReplaceValue(); + + org.bson.Document loaded = template.findById(id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldFindSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findOne(query(where("subdocument").is(subdoc)), org.bson.Document.class, + "withSubdocument"); + + assertThat(loaded).isNotNull(); + } + + @Test // GH-3811 + public void sliceShouldLimitCollectionValues() { + + DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType(); + source.id = "id-1"; + source.values = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + Criteria criteria = Criteria.where("id").is(source.id); + Query query = Query.query(criteria); + query.fields().slice("values", 0, 1); + DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class); + + assertThat(target.values).containsExactly("spring"); + } + + @Test // GH-2750 + void shouldExecuteQueryWithExpression() { + + TypeWithFieldAnnotation source1 = new TypeWithFieldAnnotation(); + source1.emailAddress = "spring.data@pivotal.com"; + + TypeWithFieldAnnotation source2 = new TypeWithFieldAnnotation(); + source2.emailAddress = "spring.data@vmware.com"; + + template.insertAll(List.of(source1, source2)); + + TypeWithFieldAnnotation loaded = template.query(TypeWithFieldAnnotation.class) + .matching(expr(StringOperators.valueOf("emailAddress").regexFind(".*@vmware.com$", "i"))).firstValue(); + + assertThat(loaded).isEqualTo(source2); + } + + @Test // GH-4300 + public void replaceShouldReplaceDocument() { + + org.bson.Document doc = new org.bson.Document("foo", "bar"); + String collectionName = "replace"; + template.save(doc, collectionName); + + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + UpdateResult updateResult = template.replace(query(where("foo").is("bar")), replacement, + ReplaceOptions.replaceOptions(), collectionName); + + assertThat(updateResult.wasAcknowledged()).isTrue(); + assertThat(template.findOne(query(where("foo").is("baz")), org.bson.Document.class, collectionName)).isNotNull(); + } + + @Test // GH-4464 + void saveEntityWithDotInFieldName() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + template.save(source); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "v1"); + } + + @Test // GH-4464 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void queryEntityWithDotInFieldNameUsingExpr() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + WithFieldNameContainingDots source2 = new WithFieldNameContainingDots(); + source2.id = "id-2"; + source2.value = "v2"; + + template.save(source); + template.save(source2); + + WithFieldNameContainingDots loaded = template.query(WithFieldNameContainingDots.class) // with property -> fieldname mapping + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("value")).equalToValue("v1"))).firstValue(); + + assertThat(loaded).isEqualTo(source); + + loaded = template.query(WithFieldNameContainingDots.class) // using raw fieldname + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("field.name.with.dots")).equalToValue("v1"))).firstValue(); + + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4464 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void updateEntityWithDotInFieldNameUsingAggregations() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + template.save(source); + + template.update(WithFieldNameContainingDots.class) + .matching(where("id").is(source.id)) + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("value", "changed")))) + .first(); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "changed"); + + template.update(WithFieldNameContainingDots.class) + .matching(where("id").is(source.id)) + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("field.name.with.dots", "changed-again")))) + .first(); + + raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "changed-again"); + } + + @Test // GH-4464 + void savesMapWithDotInKey() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(WithFieldNameContainingDots.class), client); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + template.getConverter().getMappingContext()); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), converter); + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.mapValue = Map.of("k1", "v1", "map.key.with.dot", "v2"); + + template.save(source); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, + collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + + assertThat(raw.get("mapValue", org.bson.Document.class)) + .containsEntry("k1", "v1") + .containsEntry("map.key.with.dot", "v2"); + } + + @Test // GH-4464 + void readsMapWithDotInKey() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(WithFieldNameContainingDots.class), client); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + template.getConverter().getMappingContext()); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), converter); + + Map sourceMap = Map.of("k1", "v1", "sourceMap.key.with.dot", "v2"); + template.execute(WithFieldNameContainingDots.class, + collection -> { + collection.insertOne(new org.bson.Document("_id", "id-1").append("mapValue", sourceMap)); + return null; + } + ); + + WithFieldNameContainingDots loaded = template.query(WithFieldNameContainingDots.class) + .matching(where("id").is("id-1")) + .firstValue(); + + assertThat(loaded.mapValue).isEqualTo(sourceMap); + } + + private AtomicReference createAfterSaveReference() { + + AtomicReference saved = new AtomicReference<>(); + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + saved.set(event.getSource()); + } + }); + + return saved; + } + + static class TypeWithNumbers { + + @Id String id; + Integer intVal; + Float floatVal; + Long longVal; + Double doubleVal; + BigDecimal bigDeciamVal; + BigInteger bigIntegerVal; + Byte byteVal; + } + + static class DoucmentWithNamedIdField { + + @Id String someIdKey; + + @Field(value = "val") // + String value; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (someIdKey == null ? 0 : someIdKey.hashCode()); + result = prime * result + (value == null ? 0 : value.hashCode()); + return result; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof DoucmentWithNamedIdField)) { + return false; + } + DoucmentWithNamedIdField other = (DoucmentWithNamedIdField) obj; + if (someIdKey == null) { + if (other.someIdKey != null) { + return false; + } + } else if (!someIdKey.equals(other.someIdKey)) { + return false; + } + if (value == null) { + if (other.value != null) { + return false; + } + } else if (!value.equals(other.value)) { + return false; + } + return true; + } + + } + + static class DocumentWithDBRefCollection { + + @Id public String id; + + @Field("db_ref_list") // DATAMONGO-1058 + @org.springframework.data.mongodb.core.mapping.DBRef // + public List dbRefAnnotatedList; + + @org.springframework.data.mongodb.core.mapping.DBRef // + public Sample dbRefProperty; + + @Field("lazy_db_ref_list") // DATAMONGO-1194 + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) // + public List lazyDbRefAnnotatedList; + + @Field("lazy_db_ref_map") // DATAMONGO-1194 + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) public Map lazyDbRefAnnotatedMap; + + public String getId() { + return this.id; + } + + public List getDbRefAnnotatedList() { + return this.dbRefAnnotatedList; + } + + public Sample getDbRefProperty() { + return this.dbRefProperty; + } + + public List getLazyDbRefAnnotatedList() { + return this.lazyDbRefAnnotatedList; + } + + public Map getLazyDbRefAnnotatedMap() { + return this.lazyDbRefAnnotatedMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setDbRefAnnotatedList(List dbRefAnnotatedList) { + this.dbRefAnnotatedList = dbRefAnnotatedList; + } + + public void setDbRefProperty(Sample dbRefProperty) { + this.dbRefProperty = dbRefProperty; + } + + public void setLazyDbRefAnnotatedList(List lazyDbRefAnnotatedList) { + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + } + + public void setLazyDbRefAnnotatedMap(Map lazyDbRefAnnotatedMap) { + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithDBRefCollection that = (DocumentWithDBRefCollection) o; + return Objects.equals(id, that.id) && Objects.equals(dbRefAnnotatedList, that.dbRefAnnotatedList) + && Objects.equals(dbRefProperty, that.dbRefProperty) + && Objects.equals(lazyDbRefAnnotatedList, that.lazyDbRefAnnotatedList) + && Objects.equals(lazyDbRefAnnotatedMap, that.lazyDbRefAnnotatedMap); + } + + @Override + public int hashCode() { + return Objects.hash(id, dbRefAnnotatedList, dbRefProperty, lazyDbRefAnnotatedList, lazyDbRefAnnotatedMap); + } + + public String toString() { + return "MongoTemplateTests.DocumentWithDBRefCollection(id=" + this.getId() + ", dbRefAnnotatedList=" + + this.getDbRefAnnotatedList() + ", dbRefProperty=" + this.getDbRefProperty() + ", lazyDbRefAnnotatedList=" + + this.getLazyDbRefAnnotatedList() + ", lazyDbRefAnnotatedMap=" + this.getLazyDbRefAnnotatedMap() + ")"; + } + } + + static class DocumentWithLazyDBRefsAndConstructorCreation { + + @Id public String id; + + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) // + public Sample lazyDbRefProperty; + + @Field("lazy_db_ref_list") + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) // + public List lazyDbRefAnnotatedList; + + @Field("lazy_db_ref_map") + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) public Map lazyDbRefAnnotatedMap; + + public DocumentWithLazyDBRefsAndConstructorCreation(String id, Sample lazyDbRefProperty, + List lazyDbRefAnnotatedList, Map lazyDbRefAnnotatedMap) { + this.id = id; + this.lazyDbRefProperty = lazyDbRefProperty; + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + public String getId() { + return this.id; + } + + public Sample getLazyDbRefProperty() { + return this.lazyDbRefProperty; + } + + public List getLazyDbRefAnnotatedList() { + return this.lazyDbRefAnnotatedList; + } + + public Map getLazyDbRefAnnotatedMap() { + return this.lazyDbRefAnnotatedMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setLazyDbRefProperty(Sample lazyDbRefProperty) { + this.lazyDbRefProperty = lazyDbRefProperty; + } + + public void setLazyDbRefAnnotatedList(List lazyDbRefAnnotatedList) { + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + } + + public void setLazyDbRefAnnotatedMap(Map lazyDbRefAnnotatedMap) { + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + public String toString() { + return "MongoTemplateTests.DocumentWithLazyDBRefsAndConstructorCreation(id=" + this.getId() + + ", lazyDbRefProperty=" + this.getLazyDbRefProperty() + ", lazyDbRefAnnotatedList=" + + this.getLazyDbRefAnnotatedList() + ", lazyDbRefAnnotatedMap=" + this.getLazyDbRefAnnotatedMap() + ")"; + } + } + + static class DocumentWithCollection { + + @Id String id; + List models; + + DocumentWithCollection(List models) { + this.models = models; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithCollection that = (DocumentWithCollection) o; + return Objects.equals(id, that.id) && Objects.equals(models, that.models); + } + + @Override + public int hashCode() { + return Objects.hash(id, models); + } + } + + static class DocumentWithCollectionOfSimpleType { + + @Id String id; + List values; + } + + static class DocumentWithCollectionOfSamples { + @Id String id; + List samples; + } + + static class DocumentWithNestedTypeHavingStringIdProperty { + + @Id String id; + Sample sample; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithNestedTypeHavingStringIdProperty that = (DocumentWithNestedTypeHavingStringIdProperty) o; + return Objects.equals(id, that.id) && Objects.equals(sample, that.sample); + } + + @Override + public int hashCode() { + return Objects.hash(id, sample); + } + } + + static class DocumentWithMultipleCollections { + @Id String id; + List string1; + List string2; + } + + static class DocumentWithNestedCollection { + @Id String id; + List> models = new ArrayList<>(); + } + + static class DocumentWithNestedList { + @Id String id; + List> models = new ArrayList<>(); + } + + static class DocumentWithEmbeddedDocumentWithCollection { + @Id String id; + DocumentWithCollection embeddedDocument; + + DocumentWithEmbeddedDocumentWithCollection(DocumentWithCollection embeddedDocument) { + this.embeddedDocument = embeddedDocument; + } + } + + static interface Model { + String value(); + + String id(); + } + + static class ModelA implements Model { + + @Id String id; + private String value; + + ModelA(String value) { + this.value = value; + } + + @Override + public String value() { + return this.value; + } + + @Override + public String id() { + return id; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ModelA modelA = (ModelA) o; + return Objects.equals(id, modelA.id) && Objects.equals(value, modelA.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + } + + static class Document { + + @Id public String id; + public Model model; + } + + static class MyId { + + String first; String second; + Instant id; + + @Field("t") Instant time; } static class TypeWithMyId { @Id MyId id; + String value; } static class Sample { @@ -3317,20 +4453,37 @@ static class Sample { @Id String id; String field; - public Sample() {} - public Sample(String id, String field) { this.id = id; this.field = field; } + + public Sample() {} + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(field, sample.field); + } + + @Override + public int hashCode() { + return Objects.hash(id, field); + } } static class TestClass { - DateTime myDate; + LocalDateTime myDate; @PersistenceConstructor - TestClass(DateTime myDate) { + TestClass(LocalDateTime myDate) { this.myDate = myDate; } } @@ -3341,21 +4494,21 @@ static class PersonWithConvertedId { String name; } - static enum DateTimeToDateConverter implements Converter { + static enum DateTimeToDateConverter implements Converter { INSTANCE; - public Date convert(DateTime source) { - return source == null ? null : source.toDate(); + public Date convert(LocalDateTime source) { + return source == null ? null : java.util.Date.from(source.atZone(ZoneId.systemDefault()).toInstant()); } } - static enum DateToDateTimeConverter implements Converter { + static enum DateToDateTimeConverter implements Converter { INSTANCE; - public DateTime convert(Date source) { - return source == null ? null : new DateTime(source.getTime()); + public LocalDateTime convert(Date source) { + return source == null ? null : LocalDateTime.ofInstant(source.toInstant(), ZoneId.systemDefault()); } } @@ -3365,9 +4518,62 @@ public static class MyPerson { String name; Address address; + public MyPerson() {} + + public MyPerson(String name) { + this.name = name; + } + + public MyPerson(String id, String name, Address address) { + this.id = id; + this.name = name; + this.address = address; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + public String getName() { return name; } + + public void setName(String name) { + this.name = name; + } + + public Address getAddress() { + return address; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + MyPerson myPerson = (MyPerson) o; + return Objects.equals(id, myPerson.id) && Objects.equals(name, myPerson.name) + && Objects.equals(address, myPerson.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, address); + } + } + + interface MyPersonProjection { + + String getName(); } static class Address { @@ -3383,7 +4589,7 @@ static class Address { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; @@ -3414,13 +4620,31 @@ public int hashCode() { static class VersionedPerson { @Version Long version; - String id, firstname, lastname; + String id, firstname; + @Field(write = Field.Write.ALWAYS) String lastname; } static class TypeWithFieldAnnotation { @Id ObjectId id; @Field("email") String emailAddress; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TypeWithFieldAnnotation that = (TypeWithFieldAnnotation) o; + return Objects.equals(id, that.id) && Objects.equals(emailAddress, that.emailAddress); + } + + @Override + public int hashCode() { + return Objects.hash(id, emailAddress); + } } static class TypeWithDate { @@ -3474,7 +4698,6 @@ public String getName() { public void setText(String text) { this.text = text; - } public String getId() { @@ -3484,6 +4707,24 @@ public String getId() { public String getText() { return text; } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeContent that = (SomeContent) o; + return Objects.equals(id, that.id) && Objects.equals(text, that.text) && Objects.equals(name, that.name) + && Objects.equals(dbrefMessage, that.dbrefMessage); + } + + @Override + public int hashCode() { + return Objects.hash(id, text, name, dbrefMessage); + } } static class SomeMessage { @@ -3514,4 +4755,326 @@ static class WithGeoJson { GeoJsonPoint point; } + static class WithObjectTypeProperty { + + @Id String id; + Object value; + + public WithObjectTypeProperty() {} + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Object getValue() { + return value; + } + + public void setValue(Object value) { + this.value = value; + } + } + + static class PersonWithIdPropertyOfTypeUUIDListener + extends AbstractMongoEventListener { + + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + + PersonWithIdPropertyOfTypeUUID person = event.getSource(); + + if (person.getId() != null) { + return; + } + + person.setId(UUID.randomUUID()); + } + } + + public static class Message { + + private ObjectId id; + + private String text; + + private Date timestamp; + + public Message() {} + + public Message(String text) { + super(); + this.text = text; + this.timestamp = new Date(); + } + + public Message(String text, Date timestamp) { + super(); + this.text = text; + this.timestamp = timestamp; + } + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public Date getTimestamp() { + return timestamp; + } + + public void setTimestamp(Date timestamp) { + this.timestamp = timestamp; + } + + @Override + public String toString() { + return "Message [id=" + id + ", text=" + text + ", timestamp=" + timestamp + "]"; + } + + } + + // DATAMONGO-1992 + static class ImmutableVersioned { + + final @Id String id; + final @Version Long version; + + public ImmutableVersioned() { + this(null, null); + } + + public ImmutableVersioned(String id, Long version) { + this.id = id; + this.version = version; + } + + ImmutableVersioned withVersion(Long version) { + return new ImmutableVersioned(id, version); + } + + ImmutableVersioned withId(String id) { + return new ImmutableVersioned(id, version); + } + } + + static class ImmutableAudited { + + final @Id String id; + final @LastModifiedDate Instant modified; + + ImmutableAudited(String id, Instant modified) { + this.id = id; + this.modified = modified; + } + + ImmutableAudited withId(String id) { + return new ImmutableAudited(id, modified); + } + + ImmutableAudited withModified(Instant modified) { + return new ImmutableAudited(id, modified); + } + + public String getId() { + return id; + } + + public Instant getModified() { + return modified; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableAudited that = (ImmutableAudited) o; + return Objects.equals(id, that.id) && Objects.equals(modified, that.modified); + } + + @Override + public int hashCode() { + return Objects.hash(id, modified); + } + } + + static class RawStringId { + + @MongoId String id; + String value; + + public RawStringId() {} + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + } + + static class Outer { + + @Id ObjectId id; + Inner inner; + } + + static class Inner { + + @Field("id") String id; + String value; + } + + static class WithIdAndFieldAnnotation { + + @Id // + @Field(name = "_id") // + String id; + String value; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + } + + static class WithSubdocument { + + @Id // + @Field(name = "_id") // + String id; + SubdocumentWithWriteNull subdocument; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public SubdocumentWithWriteNull getSubdocument() { + return subdocument; + } + + public void setSubdocument(SubdocumentWithWriteNull subdocument) { + this.subdocument = subdocument; + } + } + + static class SubdocumentWithWriteNull { + + final String firstname, lastname; + + @Field(write = Field.Write.ALWAYS) String nickname; + + public SubdocumentWithWriteNull(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + + public String getFirstname() { + return firstname; + } + + public String getLastname() { + return lastname; + } + + public String getNickname() { + return nickname; + } + + public void setNickname(String nickname) { + this.nickname = nickname; + } + } + + static class WithFieldNameContainingDots { + + String id; + + @Field(value = "field.name.with.dots", nameType = Type.KEY) + String value; + + Map mapValue; + + @Override + public String toString() { + return "WithMap{" + "id='" + id + '\'' + ", value='" + value + '\'' + ", mapValue=" + mapValue + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithFieldNameContainingDots withFieldNameContainingDots = (WithFieldNameContainingDots) o; + return Objects.equals(id, withFieldNameContainingDots.id) && Objects.equals(value, withFieldNameContainingDots.value) + && Objects.equals(mapValue, withFieldNameContainingDots.mapValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, mapValue); + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java new file mode 100644 index 0000000000..ec0ab192fa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java @@ -0,0 +1,396 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static java.util.UUID.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.MongoTestUtils.*; + +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Persistable; +import org.springframework.data.mongodb.CapturingTransactionOptionsResolver; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.MongoTransactionOptions; +import org.springframework.data.mongodb.MongoTransactionOptionsResolver; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.test.util.AfterTransactionAssertion; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.transaction.AfterTransaction; +import org.springframework.test.context.transaction.BeforeTransaction; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; + +/** + * @author Christoph Strobl + * @author Yan Kardziyaka + * @currentRead Shadow's Edge - Brent Weeks + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@ContextConfiguration +@Transactional(transactionManager = "txManager") +@SetSystemProperty(key = "tx.read.concern", value = "local") +public class MongoTemplateTransactionTests { + + static final String DB_NAME = "template-tx-tests"; + static final String COLLECTION_NAME = "assassins"; + + static @ReplSetClient MongoClient mongoClient; + + @Configuration + @EnableTransactionManagement + static class Config extends AbstractMongoClientConfiguration { + + @Bean + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Bean + CapturingTransactionOptionsResolver txOptionsResolver() { + return new CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver.defaultResolver()); + } + + @Bean + MongoTransactionManager txManager(MongoDatabaseFactory dbFactory, + MongoTransactionOptionsResolver txOptionsResolver) { + return new MongoTransactionManager(dbFactory, txOptionsResolver, MongoTransactionOptions.NONE); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Bean + public TransactionOptionsTestService transactionOptionsTestService(MongoOperations operations) { + return new TransactionOptionsTestService<>(operations, Assassin.class); + } + } + + @Autowired MongoTemplate template; + @Autowired MongoClient client; + @Autowired TransactionOptionsTestService transactionOptionsTestService; + @Autowired CapturingTransactionOptionsResolver transactionOptionsResolver; + + List>> assertionList; + + @BeforeEach + public void setUp() { + + template.setReadPreference(ReadPreference.primary()); + assertionList = new CopyOnWriteArrayList<>(); + transactionOptionsResolver.clear(); // clean out left overs from dirty context + } + + @BeforeTransaction + public void beforeTransaction() { + createOrReplaceCollection(DB_NAME, COLLECTION_NAME, client); + } + + @AfterTransaction + public void verifyDbState() { + + MongoCollection collection = client.getDatabase(DB_NAME).withReadPreference(ReadPreference.primary()) + .getCollection(COLLECTION_NAME); + + assertionList.forEach(it -> { + + boolean isPresent = collection.countDocuments(Filters.eq("_id", it.getId())) != 0; + + assertThat(isPresent).isEqualTo(it.shouldBePresent()) + .withFailMessage(String.format("After transaction entity %s should %s.", it.getPersistable(), + it.shouldBePresent() ? "be present" : "NOT be present")); + }); + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldOperateCommitCorrectly() { + + Assassin hu = new Assassin("hu", "Hu Gibbet"); + template.save(hu); + + assertAfterTransaction(hu).isPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldOperateRollbackCorrectly() { + + Assassin vi = new Assassin("vi", "Viridiana Sovari"); + template.save(vi); + + assertAfterTransaction(vi).isNotPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldBeAbleToViewChangesDuringTransaction() throws InterruptedException { + + Assassin durzo = new Assassin("durzo", "Durzo Blint"); + template.save(durzo); + + Thread.sleep(100); + Assassin retrieved = template.findOne(query(where("id").is(durzo.getId())), Assassin.class); + + assertThat(retrieved).isEqualTo(durzo); + + assertAfterTransaction(durzo).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidMaxCommitTime() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.saveWithInvalidMaxCommitTime(assassin)) // + .isInstanceOf(IllegalArgumentException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldCommitOnTransactionWithinMaxCommitTime() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + transactionOptionsTestService.saveWithinMaxCommitTime(assassin); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(Duration.ofMinutes(1), + MongoTransactionOptions::getMaxCommitTime); + + assertAfterTransaction(assassin).isPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowInvalidDataAccessApiUsageExceptionOnTransactionWithAvailableReadConcern() { + + assertThatThrownBy(() -> transactionOptionsTestService.availableReadConcernFind(randomUUID().toString())) // + .isInstanceOf(InvalidDataAccessApiUsageException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidReadConcern() { + + assertThatThrownBy(() -> transactionOptionsTestService.invalidReadConcernFind(randomUUID().toString())) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldReadTransactionOptionFromSystemProperty() { + + transactionOptionsTestService.environmentReadConcernFind(randomUUID().toString()); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns( + new ReadConcern(ReadConcernLevel.fromString(System.getProperty("tx.read.concern"))), + MongoTransactionOptions::getReadConcern); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldNotThrowOnTransactionWithMajorityReadConcern() { + assertThatNoException() // + .isThrownBy(() -> transactionOptionsTestService.majorityReadConcernFind(randomUUID().toString())); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowUncategorizedMongoDbExceptionOnTransactionWithPrimaryPreferredReadPreference() { + + assertThatThrownBy(() -> transactionOptionsTestService.findFromPrimaryPreferredReplica(randomUUID().toString())) // + .isInstanceOf(UncategorizedMongoDbException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidReadPreference() { + + assertThatThrownBy(() -> transactionOptionsTestService.findFromInvalidReplica(randomUUID().toString())) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldNotThrowOnTransactionWithPrimaryReadPreference() { + + assertThatNoException() // + .isThrownBy(() -> transactionOptionsTestService.findFromPrimaryReplica(randomUUID().toString())); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowTransactionSystemExceptionOnTransactionWithUnacknowledgedWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.unacknowledgedWriteConcernSave(assassin)) // + .isInstanceOf(TransactionSystemException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.invalidWriteConcernSave(assassin)) // + .isInstanceOf(IllegalArgumentException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldCommitOnTransactionWithAcknowledgedWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + transactionOptionsTestService.acknowledgedWriteConcernSave(assassin); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(WriteConcern.ACKNOWLEDGED, + MongoTransactionOptions::getWriteConcern); + + assertAfterTransaction(assassin).isPresent(); + } + + // --- Just some helpers and tests entities + + private AfterTransactionAssertion assertAfterTransaction(Assassin assassin) { + + AfterTransactionAssertion assertion = new AfterTransactionAssertion<>(assassin); + assertionList.add(assertion); + return assertion; + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class Assassin implements Persistable { + + @Id String id; + String name; + + public Assassin(String id, String name) { + this.id = id; + this.name = name; + } + + @Override + public boolean isNew() { + return id == null; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Assassin assassin = (Assassin) o; + return Objects.equals(id, assassin.id) && Objects.equals(name, assassin.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateTransactionTests.Assassin(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 40af85d3e1..79a0bb1fcb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,150 +15,267 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.mockito.Matchers.*; import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigInteger; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.hamcrest.core.Is; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; -import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; import org.springframework.context.support.GenericApplicationContext; +import org.springframework.context.support.StaticApplicationContext; import org.springframework.core.convert.converter.Converter; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; import org.springframework.data.annotation.Version; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.aggregation.Aggregation; -import org.springframework.data.mongodb.core.convert.CustomConversions; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.aggregation.*; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators.Gte; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Sharded; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.lang.Nullable; +import org.springframework.mock.env.MockEnvironment; import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.CollectionUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.CommandResult; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; -import com.mongodb.MapReduceCommand; -import com.mongodb.MapReduceOutput; -import com.mongodb.Mongo; +import com.mongodb.MongoClientSettings; import com.mongodb.MongoException; +import com.mongodb.MongoNamespace; +import com.mongodb.ReadConcern; import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; +import com.mongodb.ServerCursor; +import com.mongodb.WriteConcern; +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.DistinctIterable; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.MapReduceAction; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Unit tests for {@link MongoTemplate}. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch + * @author Michael J. Simons + * @author Roman Puchkovskiy + * @author Yadhukrishna S Pai + * @author Jakub Zurawa + * @author Ben Foster */ -@RunWith(MockitoJUnitRunner.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoTemplateUnitTests extends MongoOperationsUnitTests { - MongoTemplate template; - - @Mock MongoDbFactory factory; - @Mock Mongo mongo; - @Mock DB db; - @Mock DBCollection collection; - @Mock DBCursor cursor; - - MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); - MappingMongoConverter converter; - MongoMappingContext mappingContext; - - @Before - public void setUp() { - - when(cursor.copy()).thenReturn(cursor); - when(factory.getDb()).thenReturn(db); + private MongoTemplate template; + + @Mock MongoDatabaseFactory factory; + @Mock MongoClient mongo; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + @Mock MongoCollection collectionWithWriteConcern; + @Mock MongoCursor cursor; + @Mock FindIterable findIterable; + @Mock AggregateIterable aggregateIterable; + @Mock MapReduceIterable mapReduceIterable; + @Mock DistinctIterable distinctIterable; + @Mock UpdateResult updateResult; + @Mock DeleteResult deleteResult; + + private Document commandResultDocument = new Document(); + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private MockEnvironment environment = new MockEnvironment(); + + @BeforeEach + void beforeEach() { + + when(findIterable.iterator()).thenReturn(cursor); + when(factory.getMongoDatabase()).thenReturn(db); when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); - when(db.getCollection(Mockito.any(String.class))).thenReturn(collection); - when(collection.find(Mockito.any(DBObject.class))).thenReturn(cursor); - when(cursor.limit(anyInt())).thenReturn(cursor); - when(cursor.sort(Mockito.any(DBObject.class))).thenReturn(cursor); - when(cursor.hint(anyString())).thenReturn(cursor); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(db.getCollection(any(String.class), eq(Document.class))).thenReturn(collection); + when(db.runCommand(any(), any(Class.class))).thenReturn(commandResultDocument); + when(collection.find(any(org.bson.Document.class), any(Class.class))).thenReturn(findIterable); + when(collection.mapReduce(any(), any(), eq(Document.class))).thenReturn(mapReduceIterable); + when(collection.countDocuments(any(Bson.class), any(CountOptions.class))).thenReturn(1L); + when(collection.estimatedDocumentCount(any())).thenReturn(1L); + when(collection.getNamespace()).thenReturn(new MongoNamespace("db.mock-collection")); + when(collection.aggregate(any(List.class), any())).thenReturn(aggregateIterable); + when(collection.withReadConcern(any())).thenReturn(collection); + when(collection.withReadPreference(any())).thenReturn(collection); + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))).thenReturn(updateResult); + when(collection.withWriteConcern(any())).thenReturn(collectionWithWriteConcern); + when(collection.distinct(anyString(), any(Document.class), any())).thenReturn(distinctIterable); + when(collectionWithWriteConcern.deleteOne(any(Bson.class), any())).thenReturn(deleteResult); + when(collectionWithWriteConcern.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))).thenReturn(updateResult); + when(findIterable.projection(any())).thenReturn(findIterable); + when(findIterable.sort(any(org.bson.Document.class))).thenReturn(findIterable); + when(findIterable.collation(any())).thenReturn(findIterable); + when(findIterable.limit(anyInt())).thenReturn(findIterable); + when(mapReduceIterable.collation(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.sort(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.iterator()).thenReturn(cursor); + when(mapReduceIterable.filter(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.collectionName(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.databaseName(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.action(any())).thenReturn(mapReduceIterable); + when(aggregateIterable.collation(any())).thenReturn(aggregateIterable); + when(aggregateIterable.allowDiskUse(any())).thenReturn(aggregateIterable); + when(aggregateIterable.batchSize(anyInt())).thenReturn(aggregateIterable); + when(aggregateIterable.map(any())).thenReturn(aggregateIterable); + when(aggregateIterable.maxTime(anyLong(), any())).thenReturn(aggregateIterable); + when(aggregateIterable.into(any())).thenReturn(Collections.emptyList()); + when(aggregateIterable.hint(any())).thenReturn(aggregateIterable); + when(aggregateIterable.hintString(any())).thenReturn(aggregateIterable); + when(distinctIterable.collation(any())).thenReturn(distinctIterable); + when(distinctIterable.map(any())).thenReturn(distinctIterable); + when(distinctIterable.into(any())).thenReturn(Collections.emptyList()); this.mappingContext = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + mappingContext.setAutoIndexCreation(true); + mappingContext.setEnvironment(environment); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); + when(this.converter.getEnvironment()).thenReturn(environment); + converter.afterPropertiesSet(); this.template = new MongoTemplate(factory, converter); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullDatabaseName() throws Exception { - new MongoTemplate(mongo, null); + @Test + void rejectsNullDatabaseName() { + assertThatIllegalArgumentException().isThrownBy(() -> new MongoTemplate(mongo, null)); + } + + @Test // DATAMONGO-1968 + void rejectsNullMongo() { + assertThatIllegalArgumentException().isThrownBy(() -> new MongoTemplate((MongoClient) null, "database")); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullMongo() throws Exception { - new MongoTemplate(null, "database"); + @Test // DATAMONGO-1968 + void rejectsNullMongoClient() { + assertThatIllegalArgumentException() + .isThrownBy(() -> new MongoTemplate((com.mongodb.client.MongoClient) null, "database")); } - @Test(expected = DataAccessException.class) - public void removeHandlesMongoExceptionProperly() throws Exception { + @Test // DATAMONGO-1870 + void removeHandlesMongoExceptionProperly() { + MongoTemplate template = mockOutGetDb(); - when(db.getCollection("collection")).thenThrow(new MongoException("Exception!")); - template.remove(null, "collection"); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.remove(null, "collection")); } @Test - public void defaultsConverterToMappingMongoConverter() throws Exception { + void defaultsConverterToMappingMongoConverter() { MongoTemplate template = new MongoTemplate(mongo, "database"); - assertTrue(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter); + assertThat(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter).isTrue(); } - @Test(expected = InvalidDataAccessApiUsageException.class) - public void rejectsNotFoundMapReduceResource() { + @Test + void rejectsNotFoundMapReduceResource() { GenericApplicationContext ctx = new GenericApplicationContext(); ctx.refresh(); template.setApplicationContext(ctx); - template.mapReduce("foo", "classpath:doesNotExist.js", "function() {}", Person.class); + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.mapReduce("foo", "classpath:doesNotExist.js", "function() {}", Person.class)); } - /** - * @see DATAMONGO-322 - */ - @Test(expected = InvalidDataAccessApiUsageException.class) - public void rejectsEntityWithNullIdIfNotSupportedIdType() { + @Test // DATAMONGO-322 + void rejectsEntityWithNullIdIfNotSupportedIdType() { Object entity = new NotAutogenerateableId(); - template.save(entity); + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class).isThrownBy(() -> template.save(entity)); } - /** - * @see DATAMONGO-322 - */ - @Test - public void storesEntityWithSetIdAlthoughNotAutogenerateable() { + @Test // DATAMONGO-322 + void storesEntityWithSetIdAlthoughNotAutogenerateable() { NotAutogenerateableId entity = new NotAutogenerateableId(); entity.id = 1; @@ -166,31 +283,36 @@ public void storesEntityWithSetIdAlthoughNotAutogenerateable() { template.save(entity); } - /** - * @see DATAMONGO-322 - */ - @Test - public void autogeneratesIdForEntityWithAutogeneratableId() { + @Test // DATAMONGO-322 + void autogeneratesIdForEntityWithAutogeneratableId() { this.converter.afterPropertiesSet(); MongoTemplate template = spy(this.template); - doReturn(new ObjectId()).when(template).saveDBObject(Mockito.any(String.class), Mockito.any(DBObject.class), - Mockito.any(Class.class)); + doReturn(new ObjectId()).when(template).saveDocument(any(String.class), any(Document.class), any(Class.class)); AutogenerateableId entity = new AutogenerateableId(); template.save(entity); - assertThat(entity.id, is(notNullValue())); + assertThat(entity.id).isNotNull(); } - /** - * @see DATAMONGO-374 - */ - @Test - public void convertsUpdateConstraintsUsingConverters() { + @Test // DATAMONGO-1912 + void autogeneratesIdForMap() { - CustomConversions conversions = new CustomConversions(Collections.singletonList(MyConverter.INSTANCE)); + MongoTemplate template = spy(this.template); + doReturn(new ObjectId()).when(template).saveDocument(any(String.class), any(Document.class), any(Class.class)); + + Map entity = new LinkedHashMap<>(); + template.save(entity, "foo"); + + assertThat(entity).containsKey("_id"); + } + + @Test // DATAMONGO-374 + void convertsUpdateConstraintsUsingConverters() { + + CustomConversions conversions = new MongoCustomConversions(Collections.singletonList(MyConverter.INSTANCE)); this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); @@ -200,85 +322,70 @@ public void convertsUpdateConstraintsUsingConverters() { template.updateFirst(query, update, Wrapper.class); QueryMapper queryMapper = new QueryMapper(converter); - DBObject reference = queryMapper.getMappedObject(update.getUpdateObject(), null); + Document reference = queryMapper.getMappedObject(update.getUpdateObject(), Optional.empty()); - verify(collection, times(1)).update(Mockito.any(DBObject.class), eq(reference), anyBoolean(), anyBoolean()); + verify(collection, times(1)).updateOne(any(org.bson.Document.class), eq(reference), any(UpdateOptions.class)); } - /** - * @see DATAMONGO-474 - */ - @Test - public void setsUnpopulatedIdField() { + @Test // DATAMONGO-474 + void setsUnpopulatedIdField() { NotAutogenerateableId entity = new NotAutogenerateableId(); template.populateIdIfNecessary(entity, 5); - assertThat(entity.id, is(5)); + assertThat(entity.id).isEqualTo(5); } - /** - * @see DATAMONGO-474 - */ - @Test - public void doesNotSetAlreadyPopulatedId() { + @Test // DATAMONGO-474 + void doesNotSetAlreadyPopulatedId() { NotAutogenerateableId entity = new NotAutogenerateableId(); entity.id = 5; template.populateIdIfNecessary(entity, 7); - assertThat(entity.id, is(5)); + assertThat(entity.id).isEqualTo(5); } - /** - * @see DATAMONGO-868 - */ - @Test - public void findAndModifyShouldBumpVersionByOneWhenVersionFieldNotIncludedInUpdate() { + @Test // DATAMONGO-868 + void findAndModifyShouldBumpVersionByOneWhenVersionFieldNotIncludedInUpdate() { VersionedEntity v = new VersionedEntity(); v.id = 1; v.version = 0; - ArgumentCaptor captor = ArgumentCaptor.forClass(DBObject.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(org.bson.Document.class); template.findAndModify(new Query(), new Update().set("id", "10"), VersionedEntity.class); - verify(collection, times(1)).findAndModify(Matchers.any(DBObject.class), - org.mockito.Matchers.isNull(DBObject.class), org.mockito.Matchers.isNull(DBObject.class), eq(false), - captor.capture(), eq(false), eq(false)); - Assert.assertThat(captor.getValue().get("$inc"), Is. is(new BasicDBObject("version", 1L))); + verify(collection, times(1)).findOneAndUpdate(any(org.bson.Document.class), captor.capture(), + any(FindOneAndUpdateOptions.class)); + assertThat(captor.getValue().get("$inc")).isEqualTo(new Document("version", 1L)); } - /** - * @see DATAMONGO-868 - */ - @Test - public void findAndModifyShouldNotBumpVersionByOneWhenVersionFieldAlreadyIncludedInUpdate() { + @Test // DATAMONGO-868 + void findAndModifyShouldNotBumpVersionByOneWhenVersionFieldAlreadyIncludedInUpdate() { VersionedEntity v = new VersionedEntity(); v.id = 1; v.version = 0; - ArgumentCaptor captor = ArgumentCaptor.forClass(DBObject.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(org.bson.Document.class); template.findAndModify(new Query(), new Update().set("version", 100), VersionedEntity.class); - verify(collection, times(1)).findAndModify(Matchers.any(DBObject.class), isNull(DBObject.class), - isNull(DBObject.class), eq(false), captor.capture(), eq(false), eq(false)); - Assert.assertThat(captor.getValue().get("$set"), Is. is(new BasicDBObject("version", 100))); - Assert.assertThat(captor.getValue().get("$inc"), nullValue()); + verify(collection, times(1)).findOneAndUpdate(any(org.bson.Document.class), captor.capture(), + any(FindOneAndUpdateOptions.class)); + + assertThat(captor.getValue().get("$set")).isEqualTo(new Document("version", 100)); + assertThat(captor.getValue().get("$inc")).isNull(); } - /** - * @see DATAMONGO-533 - */ - @Test - public void registersDefaultEntityIndexCreatorIfApplicationContextHasOneForDifferentMappingContext() { + @Test // DATAMONGO-533 + void registersDefaultEntityIndexCreatorIfApplicationContextHasOneForDifferentMappingContext() { GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.getBeanFactory().registerSingleton("foo", - new MongoPersistentEntityIndexCreator(new MongoMappingContext(), factory)); + new MongoPersistentEntityIndexCreator(new MongoMappingContext(), template)); applicationContext.refresh(); GenericApplicationContext spy = spy(applicationContext); @@ -289,236 +396,338 @@ public void registersDefaultEntityIndexCreatorIfApplicationContextHasOneForDiffe verify(spy, times(1)).addApplicationListener(argThat(new ArgumentMatcher() { @Override - public boolean matches(Object argument) { - - if (!(argument instanceof MongoPersistentEntityIndexCreator)) { - return false; - } - - return ((MongoPersistentEntityIndexCreator) argument).isIndexCreatorFor(mappingContext); + public boolean matches(MongoPersistentEntityIndexCreator argument) { + return argument.isIndexCreatorFor(mappingContext); } })); } - /** - * @see DATAMONGO-566 - */ - @Test - public void findAllAndRemoveShouldRetrieveMatchingDocumentsPriorToRemoval() { + @Test // DATAMONGO-566 + void findAllAndRemoveShouldRetrieveMatchingDocumentsPriorToRemoval() { BasicQuery query = new BasicQuery("{'foo':'bar'}"); template.findAllAndRemove(query, VersionedEntity.class); - verify(collection, times(1)).find(Matchers.eq(query.getQueryObject())); + verify(collection, times(1)).find(Mockito.eq(query.getQueryObject()), any(Class.class)); } - /** - * @see DATAMONGO-566 - */ - @Test - public void findAllAndRemoveShouldRemoveDocumentsReturedByFindQuery() { + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))); + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + when(converter.getProjectionFactory()).thenReturn(new SpelAwareProxyProjectionFactory()); + template = new MongoTemplate(factory, converter); - Mockito.when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); - Mockito.when(cursor.next()).thenReturn(new BasicDBObject("_id", Integer.valueOf(0))) - .thenReturn(new BasicDBObject("_id", Integer.valueOf(1))); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.findAll(Person.class)) + .withMessageContaining("returned null"); + } + + @Test // DATAMONGO-566 + void findAllAndRemoveShouldRemoveDocumentsReturedByFindQuery() { - ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(DBObject.class); + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))) + .thenReturn(new org.bson.Document("_id", Integer.valueOf(1))); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(org.bson.Document.class); BasicQuery query = new BasicQuery("{'foo':'bar'}"); template.findAllAndRemove(query, VersionedEntity.class); - verify(collection, times(1)).remove(queryCaptor.capture()); + verify(collection, times(1)).deleteMany(queryCaptor.capture(), any()); - DBObject idField = DBObjectTestUtils.getAsDBObject(queryCaptor.getValue(), "_id"); - assertThat((Object[]) idField.get("$in"), is(new Object[] { Integer.valueOf(0), Integer.valueOf(1) })); + Document idField = DocumentTestUtils.getAsDocument(queryCaptor.getValue(), "_id"); + assertThat((List) idField.get("$in")).containsExactly(Integer.valueOf(0), Integer.valueOf(1)); } - /** - * @see DATAMONGO-566 - */ - @Test - public void findAllAndRemoveShouldNotTriggerRemoveIfFindResultIsEmpty() { + @Test // DATAMONGO-566 + void findAllAndRemoveShouldNotTriggerRemoveIfFindResultIsEmpty() { template.findAllAndRemove(new BasicQuery("{'foo':'bar'}"), VersionedEntity.class); - verify(collection, never()).remove(Mockito.any(DBObject.class)); + verify(collection, never()).deleteMany(any(org.bson.Document.class)); } - /** - * @see DATAMONGO-948 - */ - @Test - public void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() { + @Test // DATAMONGO-948 + void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() { - Query query = Query.query(Criteria.where("foo").is("bar")).with(new Sort("foo")); + Query query = Query.query(Criteria.where("foo").is("bar")).with(Sort.by("foo")); template.executeQuery(query, "collection1", new DocumentCallbackHandler() { @Override - public void processDocument(DBObject dbObject) throws MongoException, DataAccessException { + public void processDocument(Document document) throws MongoException, DataAccessException { // nothing to do - just a test } }); - ArgumentCaptor captor = ArgumentCaptor.forClass(DBObject.class); - verify(cursor, times(1)).sort(captor.capture()); - assertThat(captor.getValue(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get())); + ArgumentCaptor captor = ArgumentCaptor.forClass(org.bson.Document.class); + + verify(findIterable, times(1)).sort(captor.capture()); + assertThat(captor.getValue()).isEqualTo(new Document("foo", 1)); } - /** - * @see DATAMONGO-1166 - */ - @Test - public void aggregateShouldHonorReadPreferenceWhenSet() { + @Test // DATAMONGO-1166, DATAMONGO-1824 + void aggregateShouldHonorReadPreferenceWhenSet() { - when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn( - mock(CommandResult.class)); - when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class)); template.setReadPreference(ReadPreference.secondary()); - template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class); + template.aggregate(newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class); - verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary())); + verify(collection).withReadPreference(eq(ReadPreference.secondary())); } - /** - * @see DATAMONGO-1166 - */ - @Test - public void aggregateShouldIgnoreReadPreferenceWhenNotSet() { + @Test // DATAMONGO-1166, DATAMONGO-1824 + void aggregateShouldIgnoreReadPreferenceWhenNotSet() { - when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn( - mock(CommandResult.class)); - when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class)); + template.aggregate(newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class); - template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class); + verify(collection, never()).withReadPreference(any()); + } + + @Test // GH-4277 + void aggregateShouldHonorOptionsReadConcernWhenSet() { - verify(this.db, times(1)).command(Mockito.any(DBObject.class)); + AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build(); + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); } - /** - * @see DATAMONGO-1166 - */ - @Test - public void geoNearShouldHonorReadPreferenceWhenSet() { + @Test // GH-4277 + void aggregateShouldHonorOptionsReadPreferenceWhenSet() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build(); + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); + } + + @Test // GH-4277 + void aggregateStreamShouldHonorOptionsReadPreferenceWhenSet() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build(); + template.aggregateStream(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", + Wrapper.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); + } + + @Test // GH-4644 + void aggregateStreamShouldHonorMaxTimeIfSet() { + + AggregationOptions options = AggregationOptions.builder().maxTime(Duration.ofSeconds(20)).build(); + + template.aggregateStream(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", + Wrapper.class); + + verify(aggregateIterable).maxTime(20000, TimeUnit.MILLISECONDS); + } + + @Test // DATAMONGO-2153 + void aggregateShouldHonorOptionsComment() { + + AggregationOptions options = AggregationOptions.builder().comment("expensive").build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).comment("expensive"); + } + + @Test // DATAMONGO-1836 + void aggregateShouldHonorOptionsHint() { + + Document hint = new Document("dummyField", 1); + AggregationOptions options = AggregationOptions.builder().hint(hint).build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).hint(hint); + } + + @Test // GH-4238 + void aggregateShouldHonorOptionsHintString() { + + AggregationOptions options = AggregationOptions.builder().hint("index-1").build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).hintString("index-1"); + } + + @Test // GH-3542 + void aggregateShouldUseRelaxedMappingByDefault() { + + MongoTemplate template = new MongoTemplate(factory, converter) { + + @Override + protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, + Class outputType, AggregationOperationContext context) { + + assertThat(ReflectionTestUtils.getField(context, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + return super.doAggregate(aggregation, collectionName, outputType, context); + } + }; + + template.aggregate( + newAggregation(Jedi.class, Aggregation.unwind("foo")).withOptions(AggregationOptions.builder().build()), + Jedi.class); + } + + @Test // GH-3542 + void aggregateShouldUseStrictMappingIfOptionsIndicate() { + + MongoTemplate template = new MongoTemplate(factory, converter) { + + @Override + protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, + Class outputType, AggregationOperationContext context) { + + assertThat(context).isInstanceOf(TypeBasedAggregationOperationContext.class); + return super.doAggregate(aggregation, collectionName, outputType, context); + } + }; + + assertThatExceptionOfType(InvalidPersistentPropertyPath.class) + .isThrownBy(() -> template.aggregate(newAggregation(Jedi.class, Aggregation.unwind("foo")) + .withOptions(AggregationOptions.builder().strictMapping().build()), Jedi.class)); + } + + @Test // DATAMONGO-1166, DATAMONGO-2264 + void geoNearShouldHonorReadPreferenceWhenSet() { - when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn( - mock(CommandResult.class)); - when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class)); template.setReadPreference(ReadPreference.secondary()); NearQuery query = NearQuery.near(new Point(1, 1)); template.geoNear(query, Wrapper.class); - verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary())); + verify(collection).withReadPreference(eq(ReadPreference.secondary())); } - /** - * @see DATAMONGO-1166 - */ - @Test - public void geoNearShouldIgnoreReadPreferenceWhenNotSet() { + @Test // GH-4277 + void geoNearShouldHonorReadPreferenceFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadPreference(ReadPreference.secondary()); + + template.geoNear(query, Wrapper.class); + + verify(collection).withReadPreference(eq(ReadPreference.secondary())); + } - when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn( - mock(CommandResult.class)); - when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class)); + @Test // GH-4277 + void geoNearShouldHonorReadConcernFromQuery() { NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadConcern(ReadConcern.SNAPSHOT); + template.geoNear(query, Wrapper.class); - verify(this.db, times(1)).command(Mockito.any(DBObject.class)); + verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT)); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void mapReduceShouldUseZeroAsDefaultLimit() { + @Test // DATAMONGO-1166, DATAMONGO-2264 + void geoNearShouldIgnoreReadPreferenceWhenNotSet() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + template.geoNear(query, Wrapper.class); - ArgumentCaptor captor = ArgumentCaptor.forClass(MapReduceCommand.class); + verify(collection, never()).withReadPreference(any()); + } + + @Test // DATAMONGO-1334 + @Disabled("TODO: mongo3 - a bit hard to tests with the immutable object stuff") + void mapReduceShouldUseZeroAsDefaultLimit() { - MapReduceOutput output = mock(MapReduceOutput.class); - when(output.results()).thenReturn(Collections. emptySet()); - when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output); + MongoCursor cursor = mock(MongoCursor.class); + MapReduceIterable output = mock(MapReduceIterable.class); + when(output.limit(anyInt())).thenReturn(output); + when(output.sort(any(Document.class))).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); + when(output.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); + + when(collection.mapReduce(anyString(), anyString())).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); template.mapReduce(query, "collection", "function(){}", "function(key,values){}", Wrapper.class); - verify(collection).mapReduce(captor.capture()); - - assertThat(captor.getValue().getLimit(), is(0)); + verify(output, times(1)).limit(1); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void mapReduceShouldPickUpLimitFromQuery() { + @Test // DATAMONGO-1334 + void mapReduceShouldPickUpLimitFromQuery() { - ArgumentCaptor captor = ArgumentCaptor.forClass(MapReduceCommand.class); + MongoCursor cursor = mock(MongoCursor.class); + MapReduceIterable output = mock(MapReduceIterable.class); + when(output.limit(anyInt())).thenReturn(output); + when(output.sort(any())).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); + when(output.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); - MapReduceOutput output = mock(MapReduceOutput.class); - when(output.results()).thenReturn(Collections. emptySet()); - when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); query.limit(100); template.mapReduce(query, "collection", "function(){}", "function(key,values){}", Wrapper.class); - verify(collection).mapReduce(captor.capture()); - - assertThat(captor.getValue().getLimit(), is(100)); + verify(output, times(1)).limit(100); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void mapReduceShouldPickUpLimitFromOptions() { + @Test // DATAMONGO-1334 + void mapReduceShouldPickUpLimitFromOptions() { - ArgumentCaptor captor = ArgumentCaptor.forClass(MapReduceCommand.class); + MongoCursor cursor = mock(MongoCursor.class); + MapReduceIterable output = mock(MapReduceIterable.class); + when(output.limit(anyInt())).thenReturn(output); + when(output.sort(any())).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); + when(output.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); - MapReduceOutput output = mock(MapReduceOutput.class); - when(output.results()).thenReturn(Collections. emptySet()); - when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); template.mapReduce(query, "collection", "function(){}", "function(key,values){}", new MapReduceOptions().limit(1000), Wrapper.class); - verify(collection).mapReduce(captor.capture()); - assertThat(captor.getValue().getLimit(), is(1000)); + verify(output, times(1)).limit(1000); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { + @Test // DATAMONGO-1334 + void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { - ArgumentCaptor captor = ArgumentCaptor.forClass(MapReduceCommand.class); + MongoCursor cursor = mock(MongoCursor.class); + MapReduceIterable output = mock(MapReduceIterable.class); + when(output.limit(anyInt())).thenReturn(output); + when(output.sort(any())).thenReturn(output); + when(output.filter(any())).thenReturn(output); + when(output.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); - MapReduceOutput output = mock(MapReduceOutput.class); - when(output.results()).thenReturn(Collections. emptySet()); - when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); template.mapReduce("collection", "function(){}", "function(key,values){}", new MapReduceOptions().limit(1000), Wrapper.class); - verify(collection).mapReduce(captor.capture()); - assertThat(captor.getValue().getLimit(), is(1000)); + verify(output, times(1)).limit(1000); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferently() { + @Test // DATAMONGO-1334 + void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferently() { - ArgumentCaptor captor = ArgumentCaptor.forClass(MapReduceCommand.class); + MongoCursor cursor = mock(MongoCursor.class); + MapReduceIterable output = mock(MapReduceIterable.class); + when(output.limit(anyInt())).thenReturn(output); + when(output.sort(any())).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); + when(output.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); - MapReduceOutput output = mock(MapReduceOutput.class); - when(output.results()).thenReturn(Collections. emptySet()); - when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); query.limit(100); @@ -526,73 +735,2284 @@ public void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferent template.mapReduce(query, "collection", "function(){}", "function(key,values){}", new MapReduceOptions().limit(1000), Wrapper.class); - verify(collection).mapReduce(captor.capture()); + verify(output, times(1)).limit(1000); + } + + @Test // DATAMONGO-1639 + void beforeConvertEventForUpdateSeesNextVersion() { + + when(updateResult.getModifiedCount()).thenReturn(1L); - assertThat(captor.getValue().getLimit(), is(1000)); + final VersionedEntity entity = new VersionedEntity(); + entity.id = 1; + entity.version = 0; + + GenericApplicationContext context = new GenericApplicationContext(); + context.refresh(); + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + assertThat(event.getSource().version).isEqualTo(1); + } + }); + + template.setApplicationContext(context); + + template.save(entity); } - class AutogenerateableId { + @Test // DATAMONGO-1447 + void shouldNotAppend$isolatedToNonMulitUpdate() { - @Id BigInteger id; + template.updateFirst(new Query(), new Update().isolated().set("jon", "snow"), Wrapper.class); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateOne(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).doesNotContainKey("$isolated"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - class NotAutogenerateableId { + @Test // DATAMONGO-1447 + void shouldAppend$isolatedToUpdateMultiEmptyQuery() { - @Id Integer id; + template.updateMulti(new Query(), new Update().isolated().set("jon", "snow"), Wrapper.class); - public Pattern getId() { - return Pattern.compile("."); - } + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).hasSize(1).containsEntry("$isolated", 1); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - static class VersionedEntity { + @Test // DATAMONGO-1447 + void shouldAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateSetsValue() { - @Id Integer id; - @Version Integer version; + Update update = new Update().isolated().set("jon", "snow"); + Query query = new BasicQuery("{'eddard':'stark'}"); + + template.updateMulti(query, update, Wrapper.class); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 1).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - enum MyConverter implements Converter { + @Test // DATAMONGO-1447 + void shouldNotAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateDoesNotSetValue() { - INSTANCE; + Update update = new Update().set("jon", "snow"); + Query query = new BasicQuery("{'eddard':'stark'}"); - public String convert(AutogenerateableId source) { - return source.toString(); - } + template.updateMulti(query, update, Wrapper.class); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).doesNotContainKey("$isolated").containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - class Wrapper { + @Test // DATAMONGO-1447 + void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateDoesNotSetValue() { - AutogenerateableId foo; + Update update = new Update().set("jon", "snow"); + Query query = new BasicQuery("{'eddard':'stark', '$isolated' : 1}"); + + template.updateMulti(query, update, Wrapper.class); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 1).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - /** - * Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual - * behaviour. - * - * @return - */ - private MongoTemplate mockOutGetDb() { + @Test // DATAMONGO-1447 + void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateSetsValue() { - MongoTemplate template = spy(this.template); - stub(template.getDb()).toReturn(db); - return template; + Update update = new Update().isolated().set("jon", "snow"); + Query query = new BasicQuery("{'eddard':'stark', '$isolated' : 0}"); + + template.updateMulti(query, update, Wrapper.class); + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(Bson.class); + + verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); + + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 0).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoOperationsUnitTests#getOperations() - */ - @Override - protected MongoOperations getOperationsForExceptionHandling() { - MongoTemplate template = spy(this.template); - stub(template.getDb()).toThrow(new MongoException("Error!")); - return template; + @Test // DATAMONGO-1311 + void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findIterable.batchSize(anyInt())).thenReturn(findIterable); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class); + + verify(findIterable).batchSize(1234); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoOperationsUnitTests#getOperations() - */ - @Override - protected MongoOperations getOperations() { - return this.template; + @Test // GH-4277 + void findShouldUseReadConcernWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}").withReadConcern(ReadConcern.SNAPSHOT), AutogenerateableId.class); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void findShouldUseReadPreferenceWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}").withReadPreference(ReadPreference.secondary()), + AutogenerateableId.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); + } + + @Test // DATAMONGO-1518 + void executeQueryShouldUseCollationWhenPresent() { + + template.executeQuery(new BasicQuery("{}").collation(Collation.of("fr")), "collection-1", val -> {}); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void streamQueryShouldUseCollationWhenPresent() { + + template.stream(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void findShouldUseCollationWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}").collation(Collation.of("fr")), AutogenerateableId.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void findOneShouldUseCollationWhenPresent() { + + template.findOne(new BasicQuery("{'foo' : 'bar'}").collation(Collation.of("fr")), AutogenerateableId.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void existsShouldUseCollationWhenPresent() { + + template.exists(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1518 + void findAndModfiyShoudUseCollationWhenPresent() { + + template.findAndModify(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void findAndRemoveShouldUseCollationWhenPresent() { + + template.findAndRemove(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-2196 + void removeShouldApplyWriteConcern() { + + Person person = new Person(); + person.id = "id-1"; + + template.setWriteConcern(WriteConcern.UNACKNOWLEDGED); + template.remove(person); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + verify(collectionWithWriteConcern).deleteOne(any(Bson.class), any()); + } + + @Test // DATAMONGO-1518 + void findAndRemoveManyShouldUseCollationWhenPresent() { + + template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class, + true); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void updateOneShouldUseCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), + AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void updateManyShouldUseCollationWhenPresent() { + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), + AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-3218 + void updateUsesHintStringFromQuery() { + + template.updateFirst(new Query().withHint("index-1"), new Update().set("spring", "data"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-1"); + } + + @Test // GH-3218 + void updateUsesHintDocumentFromQuery() { + + template.updateFirst(new Query().withHint("{ name : 1 }"), new Update().set("spring", "data"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(new Document("name", 1)); + } + + @Test // DATAMONGO-1518 + void replaceOneShouldUseCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518, DATAMONGO-1824 + void aggregateShouldUseCollationWhenPresent() { + + Aggregation aggregation = newAggregation(project("id")) + .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()); + template.aggregate(aggregation, AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1824 + void aggregateShouldUseBatchSizeWhenPresent() { + + Aggregation aggregation = newAggregation(project("id")) + .withOptions(newAggregationOptions().collation(Collation.of("fr")).cursorBatchSize(100).build()); + template.aggregate(aggregation, AutogenerateableId.class, Document.class); + + verify(aggregateIterable).batchSize(100); + } + + @Test // DATAMONGO-1518 + void mapReduceShouldUseCollationWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options().collation(Collation.of("fr")), AutogenerateableId.class); + + verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputCollectionWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).collectionName(eq("out-collection")); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldNotUseOutputCollectionForInline() { + + template.mapReduce("", "", "", MapReduceOptions.options().actionInline().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable, never()).collectionName(any()); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputActionWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options().actionMerge().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).action(eq(MapReduceAction.MERGE)); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputDatabaseWhenPresent() { + + template.mapReduce("", "", "", + MapReduceOptions.options().outputDatabase("out-database").outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).databaseName(eq("out-database")); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldNotUseOutputDatabaseForInline() { + + template.mapReduce("", "", "", MapReduceOptions.options().outputDatabase("out-database"), AutogenerateableId.class); + + verify(mapReduceIterable, never()).databaseName(any()); + } + + @Test // DATAMONGO-1518, DATAMONGO-2264 + void geoNearShouldUseCollationWhenPresent() { + + NearQuery query = NearQuery.near(0D, 0D).query(new BasicQuery("{}").collation(Collation.of("fr"))); + template.geoNear(query, AutogenerateableId.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1880 + void countShouldUseCollationWhenPresent() { + + template.count(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-2360 + void countShouldApplyQueryHintIfPresent() { + + Document queryHint = new Document("age", 1); + template.count(new BasicQuery("{}").withHint(queryHint), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(queryHint); + } + + @Test // DATAMONGO-2365 + void countShouldApplyQueryHintAsIndexNameIfPresent() { + + template.count(new BasicQuery("{}").withHint("idx-1"), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("idx-1"); + } + + @Test // DATAMONGO-1733 + void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonProjection.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(new Document("firstname", 1))); + } + + @Test // DATAMONGO-1733 + void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, + PersonProjection.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(new Document("bar", 1))); + } + + @Test // DATAMONGO-1733 + void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonSpELProjection.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); + } + + @Test // DATAMONGO-1733, DATAMONGO-2041 + void appliesFieldsToDtoProjection() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + Jedi.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(new Document("firstname", 1))); + } + + @Test // DATAMONGO-1733 + void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, + Jedi.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(new Document("bar", 1))); + } + + @Test // DATAMONGO-1733 + void doesNotApplyFieldsWhenTargetIsNotAProjection() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + Person.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); + } + + @Test // DATAMONGO-1733 + void doesNotApplyFieldsWhenTargetExtendsDomainType() { + + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonExtended.class, CursorPreparer.NO_OP_PREPARER); + + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); + } + + @Test // DATAMONGO-1348, DATAMONGO-2264 + void geoNearShouldMapQueryCorrectly() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.query(Query.query(Criteria.where("customName").is("rand al'thor"))); + + template.geoNear(query, WithNamedFields.class); + + ArgumentCaptor> capture = ArgumentCaptor.forClass(List.class); + + verify(collection).aggregate(capture.capture(), eq(Document.class)); + Document $geoNear = capture.getValue().iterator().next(); + + assertThat($geoNear).containsEntry("$geoNear.query.custom-named-field", "rand al'thor") + .doesNotContainKey("query.customName"); + } + + @Test // DATAMONGO-1348, DATAMONGO-2264 + void geoNearShouldMapGeoJsonPointCorrectly() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(1, 2)); + query.query(Query.query(Criteria.where("customName").is("rand al'thor"))); + + template.geoNear(query, WithNamedFields.class); + + ArgumentCaptor> capture = ArgumentCaptor.forClass(List.class); + + verify(collection).aggregate(capture.capture(), eq(Document.class)); + Document $geoNear = capture.getValue().iterator().next(); + + assertThat($geoNear).containsEntry("$geoNear.near.type", "Point").containsEntry("$geoNear.near.coordinates.[0]", 1D) + .containsEntry("$geoNear.near.coordinates.[1]", 2D); + } + + @Test // DATAMONGO-2155, GH-3407 + void saveVersionedEntityShouldCallUpdateCorrectly() { + + when(updateResult.getModifiedCount()).thenReturn(1L); + + VersionedEntity entity = new VersionedEntity(); + entity.id = 1; + entity.version = 10; + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + + template.save(entity); + + verify(collection, times(1)).replaceOne(queryCaptor.capture(), updateCaptor.capture(), any(com.mongodb.client.model.ReplaceOptions.class)); + + assertThat(queryCaptor.getValue()).isEqualTo(new Document("_id", 1).append("version", 10)); + assertThat(updateCaptor.getValue()) + .isEqualTo(new Document("version", 11).append("_class", VersionedEntity.class.getName()).append("name", null)); + } + + @Test // DATAMONGO-1783 + void usesQueryOffsetForCountOperation() { + + template.count(new BasicQuery("{}").skip(100), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getSkip()).isEqualTo(100); + } + + @Test // DATAMONGO-1783 + void usesQueryLimitForCountOperation() { + + template.count(new BasicQuery("{}").limit(10), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getLimit()).isEqualTo(10); + } + + @Test // DATAMONGO-2215 + void updateShouldApplyArrayFilters() { + + template.updateFirst(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-2215 + void findAndModifyShouldApplyArrayFilters() { + + template.findAndModify(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-1854 + void streamQueryShouldUseDefaultCollationWhenPresent() { + + template.stream(new BasicQuery("{}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findShouldNotUseCollationWhenNoDefaultPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Jedi.class); + + verify(findIterable, never()).collation(any()); + } + + @Test // DATAMONGO-1854 + void findShouldUseDefaultCollationWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findOneShouldUseDefaultCollationWhenPresent() { + + template.findOne(new BasicQuery("{'foo' : 'bar'}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void existsShouldUseDefaultCollationWhenPresent() { + + template.exists(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void findAndModfiyShoudUseDefaultCollationWhenPresent() { + + template.findAndModify(new BasicQuery("{}"), new Update(), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void findAndRemoveShouldUseDefaultCollationWhenPresent() { + + template.findAndRemove(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldNotCollationIfNotPresent() { + + template.createCollection(AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + Assertions.assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldApplyDefaultCollation() { + + template.createCollection(Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldFavorExplicitOptionsOverDefaultCollation() { + + template.createCollection(Sith.class, CollectionOptions.just(Collation.of("en_US"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldUseDefaultCollationIfCollectionOptionsAreNull() { + + template.createCollection(Sith.class, null); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template.aggregateStream(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().collation(Collation.of("fr")).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void aggreateStreamShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void aggreateStreamShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template.aggregateStream(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().collation(Collation.of("fr")).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2390 + void aggregateShouldNoApplyZeroOrNegativeMaxTime() { + + template.aggregate( + newAggregation(Sith.class, project("id")).withOptions(newAggregationOptions().maxTime(Duration.ZERO).build()), + AutogenerateableId.class, Document.class); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ofSeconds(-1)).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable, never()).maxTime(anyLong(), any()); + } + + @Test // DATAMONGO-2390 + void aggregateShouldApplyMaxTimeIfSet() { + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ofSeconds(10)).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).maxTime(eq(10000L), eq(TimeUnit.MILLISECONDS)); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new AutogenerateableId()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findOneWithSortShouldUseCollationWhenPresent() { + + template.findOne(new BasicQuery("{}").collation(Collation.of("fr")).with(Sort.by("id")), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void findOneWithSortShouldUseDefaultCollationWhenPresent() { + + template.findOne(new BasicQuery("{}").with(Sort.by("id")), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseDefaultCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}"), new Sith()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationEvenIfDefaultCollationIsPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new Sith()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findDistinctShouldUseDefaultCollationWhenPresent() { + + template.findDistinct(new BasicQuery("{}"), "name", Sith.class, String.class); + + verify(distinctIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findDistinctPreferCollationFromQueryOverDefaultCollation() { + + template.findDistinct(new BasicQuery("{}").collation(Collation.of("fr")), "name", Sith.class, String.class); + + verify(distinctIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldUseDefaultCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldUseDefaultCollationWhenPresent() { + + template.updateMulti(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldUseDefaultCollationWhenPresent() { + + template.remove(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldPreferExplicitCollationOverDefaultCollation() { + + template.remove(new BasicQuery("{}").collation(Collation.of("fr")), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void mapReduceShouldUseDefaultCollationWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options(), Sith.class); + + verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void mapReduceShouldPreferExplicitCollationOverDefaultCollation() { + + template.mapReduce("", "", "", MapReduceOptions.options().collation(Collation.of("fr")), Sith.class); + + verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2261 + void saveShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.save(entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.insert(entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertAllShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + template.insertAll(Arrays.asList(entity1, entity2)); + + verify(beforeConvertCallback, times(2)).onBeforeConvert(any(), anyString()); + verify(beforeSaveCallback, times(2)).onBeforeSave(any(), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void findAndReplaceShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.findAndReplace(new Query(), entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void publishesEventsAndEntityCallbacksInOrder() { + + BeforeConvertCallback beforeConvertCallback = new BeforeConvertCallback() { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + + assertThat(entity.id).isEqualTo("before-convert-event"); + entity.id = "before-convert-callback"; + return entity; + } + }; + + BeforeSaveCallback beforeSaveCallback = new BeforeSaveCallback() { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + assertThat(entity.id).isEqualTo("before-save-event"); + entity.id = "before-save-callback"; + return entity; + } + }; + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "before-convert-event"; + } + + @Override + public void onBeforeSave(BeforeSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("before-convert-callback"); + event.getSource().id = "before-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.registerBean(BeforeConvertCallback.class, () -> beforeConvertCallback); + ctx.registerBean(BeforeSaveCallback.class, () -> beforeSaveCallback); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + Person saved = template.save(entity); + + assertThat(saved.id).isEqualTo("before-save-callback"); + } + + @Test // DATAMONGO-2261 + void beforeSaveCallbackAllowsTargetDocumentModifications() { + + BeforeSaveCallback beforeSaveCallback = new BeforeSaveCallback() { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + document.append("added-by", "callback"); + return entity; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, () -> beforeSaveCallback); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + + template.save(entity); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(any(), captor.capture(), any(com.mongodb.client.model.ReplaceOptions.class)); + assertThat(captor.getValue()).containsEntry("added-by", "callback"); + } + + @Test // DATAMONGO-2307 + void beforeSaveCallbackAllowsTargetEntityModificationsUsingSave() { + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, this::beforeSaveCallbackReturningNewPersonWithTransientAttribute); + ctx.refresh(); + + template.setApplicationContext(ctx); + + PersonWithTransientAttribute entity = new PersonWithTransientAttribute(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + entity.isNew = true; + + PersonWithTransientAttribute savedPerson = template.save(entity); + assertThat(savedPerson.isNew).isFalse(); + } + + @Test // DATAMONGO-2307 + void beforeSaveCallbackAllowsTargetEntityModificationsUsingInsert() { + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, this::beforeSaveCallbackReturningNewPersonWithTransientAttribute); + ctx.refresh(); + + template.setApplicationContext(ctx); + + PersonWithTransientAttribute entity = new PersonWithTransientAttribute(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + entity.isNew = true; + + PersonWithTransientAttribute savedPerson = template.insert(entity); + assertThat(savedPerson.isNew).isFalse(); + } + + // TODO: additional tests for what is when saved. + + @Test // DATAMONGO-2261 + void entityCallbacksAreNotSetByDefault() { + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2261 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2261 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + EntityCallbacks callbacks = EntityCallbacks.create(); + template.setEntityCallbacks(callbacks); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2261 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + EntityCallbacks callbacks = EntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + template.setEntityCallbacks(callbacks); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFind() { + + template.find(new Query().allowSecondaryReads(), AutogenerateableId.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindOne() { + + template.findOne(new Query().allowSecondaryReads(), AutogenerateableId.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { + + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForStream() { + + template.stream(new Query().allowSecondaryReads(), AutogenerateableId.class); + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update().set("total") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { total : { $sum : [ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowMultipleAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update() // + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) // + .set("grade").toValue(ConditionalOperators.switchCases( // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D") // + ) // + .defaultTo("F"));// + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).containsExactly(Document.parse("{ $set: { average : { $avg: \"$tests\" } } }"), + Document.parse("{ $set: { grade: { $switch: {\n" + " branches: [\n" + + " { case: { $gte: [ \"$average\", 90 ] }, then: \"A\" },\n" + + " { case: { $gte: [ \"$average\", 80 ] }, then: \"B\" },\n" + + " { case: { $gte: [ \"$average\", 70 ] }, then: \"C\" },\n" + + " { case: { $gte: [ \"$average\", 60 ] }, then: \"D\" }\n" + + " ],\n" + " default: \"F\"\n" + " } } } }")); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationExpressionToDomainType() { + + AggregationUpdate update = AggregationUpdate.update().set("name") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { firstname : { $sum:[ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldPassOnUnsetCorrectly() { + + SetOperation setOperation = SetOperation.builder().set("status").toValue("Modified").and().set("comments") + .toValue(Fields.fields("misc1").and("misc2").asList()); + AggregationUpdate update = AggregationUpdate.update(); + update.set(setOperation); + update.unset("misc1", "misc2"); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Arrays.asList(Document.parse("{ $set: { status: \"Modified\", comments: [ \"$misc1\", \"$misc2\" ] } }"), + Document.parse("{ $unset: [ \"misc1\", \"misc2\" ] }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationUnsetToDomainType() { + + AggregationUpdate update = AggregationUpdate.update(); + update.unset("name"); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo(Collections.singletonList(Document.parse("{ $unset : \"firstname\" }"))); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyToVersionedEntityIfNotPresentInFilter() { + + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))) + .thenReturn(UpdateResult.acknowledged(1, 1L, null)); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("version", 1L).append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromExistingDocumentIfNotPresentInFilter() { + + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromGivenDocumentIfShardKeyIsImmutable() { + + template.save(new ShardedEntityWithNonDefaultImmutableShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + + verifyNoInteractions(findIterable); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1")); + verify(findIterable, never()).first(); + } + + @Test // GH-3590 + void shouldIncludeValueFromNestedShardKeyPath() { + + WithShardKeyPointingToNested source = new WithShardKeyPointingToNested(); + source.id = "id-1"; + source.value = "v1"; + source.nested = new WithNamedFields(); + source.nested.customName = "cname"; + source.nested.name = "name"; + + template.save(source); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); + } + + @Test // DATAMONGO-2341 + void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + verify(findIterable).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2341 + void saveVersionedShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))) + .thenReturn(UpdateResult.acknowledged(1, 1L, null)); + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)); + + verify(findIterable).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2479 + void findShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.find(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findByIdShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.first()).thenReturn(document); + + template.findById("init", Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findOneShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.first()).thenReturn(document); + + template.findOne(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAllShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.findAll(Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndModifyShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(document); + + template.findAndModify(new Query(), new Update(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndRemoveShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(document); + + template.findAndRemove(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAllAndRemoveShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.findAllAndRemove(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(saved.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void saveShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Person saved = template.save(entity); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Person saved = template.insert(entity); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertAllShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + Collection saved = template.insertAll(Arrays.asList(entity1, entity2)); + + verify(afterSaveCallback, times(2)).onAfterSave(any(), any(), anyString()); + assertThat(saved.iterator().next().getId()).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + verify(afterSaveCallback).onAfterSave(eq(new Person("init", "luke")), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldEmitAfterSaveEvent() { + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "after-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + assertThat(saved.id).isEqualTo("after-save-event"); + } + + @Test // DATAMONGO-2556 + void esitmatedCountShouldBeDelegatedCorrectly() { + + template.estimatedCount(Person.class); + + verify(db).getCollection("star-wars", Document.class); + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-2911 + void insertErrorsOnCustomIteratorImplementation() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> template.insert(new TypeImplementingIterator())); + } + + @Test // GH-3570 + void saveErrorsOnCollectionLikeObjects() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> template.save(new ArrayList<>(Arrays.asList(1, 2, 3)), "myList")); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromString() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsPlainString.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(10); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromProperty() { + + environment.setProperty("my.timeout", "12m"); + + template.createCollection(TimeSeriesTypeWithExpireAfterFromProperty.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(12); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromIso8601String() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsIso8601Style.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.DAYS)) + .isEqualTo(1); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpression() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpression.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(11); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpressionReturningDuration() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(100); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithInvalidTimeoutExpiration() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> + template.createCollection(TimeSeriesTypeWithInvalidExpireAfter.class) + ); + } + + @Test // GH-3522 + void usedCountDocumentsForEmptyQueryByDefault() { + + template.count(new Query(), Human.class); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-3522 + void delegatesToEstimatedCountForEmptyQueryIfEnabled() { + + template.useEstimatedCount(true); + + template.count(new Query(), Human.class); + + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void stillUsesCountDocumentsForNonEmptyQueryEvenIfEstimationEnabled() { + + template.useEstimatedCount(true); + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }"), Human.class); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-4374 + void countConsidersMaxTimeMs() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").maxTimeMsec(5000), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getMaxTime(TimeUnit.MILLISECONDS)).isEqualTo(5000); + } + + @Test // GH-4374 + void countPassesOnComment() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").comment("rocks!"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getComment()).isEqualTo(BsonUtils.simpleToBsonValue("rocks!")); + } + + @Test // GH-3984 + void templatePassesOnTimeSeriesOptionsWhenNoTypeGiven() { + + template.createCollection("time-series-collection", CollectionOptions.timeSeries("time_stamp")); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").toString()); + } + + @Test // GH-4300 + void findAndReplaceAllowsDocumentSourceType() { + + template.findAndReplace(new Query(), new Document("spring", "data"), FindAndReplaceOptions.options().upsert(), + Document.class, "coll-1", Person.class); + + verify(db).getCollection(eq("coll-1"), eq(Document.class)); + verify(collection).findOneAndReplace((Bson) any(Bson.class), eq(new Document("spring", "data")), + any(FindOneAndReplaceOptions.class)); + } + + @Test // GH-4462 + void replaceShouldUseCollationWhenPresent() { + + template.replace(new BasicQuery("{}").collation(Collation.of("fr")), new AutogenerateableId()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-4462 + void replaceShouldNotUpsertByDefault() { + + template.replace(new BasicQuery("{}"), new Sith()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + } + + @Test // GH-4462 + void replaceShouldUpsert() { + + template.replace(new BasicQuery("{}"), new Sith(), ReplaceOptions.replaceOptions().upsert()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isTrue(); + } + + @Test // GH-4462 + void replaceShouldUseDefaultCollationWhenPresent() { + + template.replace(new BasicQuery("{}"), new Sith(), ReplaceOptions.replaceOptions()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // GH-4462 + void replaceShouldUseHintIfPresent() { + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + ReplaceOptions.replaceOptions().upsert()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-to-use"); + } + + @Test // GH-4462 + void replaceShouldApplyWriteConcern() { + + template.setWriteConcernResolver(new WriteConcernResolver() { + public WriteConcern resolve(MongoAction action) { + + assertThat(action.getMongoActionOperation()).isEqualTo(MongoActionOperation.REPLACE); + return WriteConcern.UNACKNOWLEDGED; + } + }); + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + ReplaceOptions.replaceOptions().upsert()); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + } + + @Test // GH-4099 + void passOnTimeSeriesExpireOption() { + + template.createCollection("time-series-collection", + CollectionOptions.timeSeries("time_stamp", options -> options.expireAfter(Duration.ofSeconds(10)))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(10); + } + + @Test // GH-4099 + void doNotSetTimeSeriesExpireOptionForNegativeValue() { + + template.createCollection("time-series-collection", + CollectionOptions.timeSeries("time_stamp", options -> options.expireAfter(Duration.ofSeconds(-10)))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(0L); + } + + + class AutogenerateableId { + + @Id BigInteger id; + } + + class NotAutogenerateableId { + + @Id Integer id; + + public Pattern getId() { + return Pattern.compile("."); + } + } + + static class VersionedEntity { + + @Id Integer id; + @Version Integer version; + + @Field(write = Field.Write.ALWAYS) String name; + } + + enum MyConverter implements Converter { + + INSTANCE; + + public String convert(AutogenerateableId source) { + return source.toString(); + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") + static class Person { + + @Id String id; + String firstname; + + public Person() {} + + public Person(String id, String firstname) { + this.id = id; + this.firstname = firstname; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getFirstname() { + return firstname; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + } + + static class PersonExtended extends Person { + + String lastname; + } + + static class PersonWithTransientAttribute extends Person { + + @Transient boolean isNew = true; + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + static class Human { + @Id String id; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + class Wrapper { + + AutogenerateableId foo; + } + + static class EntityWithListOfSimple { + List grades; + } + + static class WithNamedFields { + + @Id String id; + + String name; + @Field("custom-named-field") String customName; + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + + @Field("firstname") String name; + } + + @Sharded(shardKey = { "value", "nested.customName" }) + static class WithShardKeyPointingToNested { + String id; + String value; + WithNamedFields nested; + } + + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") Instant timestamp; + Object meta; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "${my.timeout}") + static class TimeSeriesTypeWithExpireAfterFromProperty { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "10m") + static class TimeSeriesTypeWithExpireAfterAsPlainString { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "P1D") + static class TimeSeriesTypeWithExpireAfterAsIso8601Style { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{10 + 1 + 's'}") + static class TimeSeriesTypeWithExpireAfterAsExpression { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") + static class TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "123ops") + static class TimeSeriesTypeWithInvalidExpireAfter { + + String id; + Instant timestamp; + } + + static class TypeImplementingIterator implements Iterator { + + @Override + public boolean hasNext() { + return false; + } + + @Override + public Object next() { + return null; + } + } + + /** + * Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual + * behaviour. + * + * @return + */ + private MongoTemplate mockOutGetDb() { + + MongoTemplate template = spy(this.template); + when(template.getDb()).thenReturn(db); + return template; + } + + @Override + protected MongoOperations getOperationsForExceptionHandling() { + when(template.getMongoDatabaseFactory().getMongoDatabase()).thenThrow(new MongoException("Error")); + return template; + } + + @Override + protected MongoOperations getOperations() { + return this.template; + } + + private BeforeSaveCallback beforeSaveCallbackReturningNewPersonWithTransientAttribute() { + return (entity, document, collection) -> { + + // Return a completely new instance, ie in case of an immutable entity; + PersonWithTransientAttribute newEntity = new PersonWithTransientAttribute(); + newEntity.id = entity.id; + newEntity.firstname = entity.firstname; + newEntity.isNew = false; + return newEntity; + }; + } + + static class ValueCapturingEntityCallback { + + private final List values = new ArrayList<>(1); + + protected void capture(T value) { + values.add(value); + } + + public List getValues() { + return values; + } + + @Nullable + public T getValue() { + return CollectionUtils.lastElement(values); + } + + } + + static class ValueCapturingBeforeConvertCallback extends ValueCapturingEntityCallback + implements BeforeConvertCallback { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + + capture(entity); + return entity; + } + } + + static class ValueCapturingBeforeSaveCallback extends ValueCapturingEntityCallback + implements BeforeSaveCallback { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + capture(entity); + return entity; + } + } + + static class ValueCapturingAfterSaveCallback extends ValueCapturingEntityCallback + implements AfterSaveCallback { + + @Override + public Person onAfterSave(Person entity, Document document, String collection) { + + capture(entity); + return new Person() { + { + id = "after-save"; + firstname = entity.firstname; + } + }; + } + } + + static class ValueCapturingAfterConvertCallback extends ValueCapturingEntityCallback + implements AfterConvertCallback { + + @Override + public Person onAfterConvert(Person entity, Document document, String collection) { + + capture(entity); + return new Person() { + { + id = "after-convert"; + firstname = entity.firstname; + } + }; + } + } + + static class OneElementCursor implements MongoCursor { + private final Iterator iterator; + + OneElementCursor(T element) { + iterator = Collections.singletonList(element).iterator(); + } + + @Override + public void close() { + // nothing to close + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + + @Override + public int available() { + return 1; + } + + @Override + public T tryNext() { + if (iterator.hasNext()) { + return iterator.next(); + } else { + return null; + } + } + + @Override + public ServerCursor getServerCursor() { + throw new IllegalStateException("Not implemented"); + } + + @Override + public ServerAddress getServerAddress() { + throw new IllegalStateException("Not implemented"); + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java new file mode 100644 index 0000000000..b8fc2986c2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java @@ -0,0 +1,197 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link Unwrapped}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +class MongoTemplateUnwrappedTests { + + private static @Template MongoTemplate template; + + @Test // DATAMONGO-1902 + void readWrite() { + + WithUnwrapped source = new WithUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne(query(where("id").is(source.id)), WithUnwrapped.class)).isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void filterOnUnwrappedValue() { + + WithUnwrapped source = new WithUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne( + Query.query(where("embeddableValue.stringValue").is(source.embeddableValue.stringValue)), WithUnwrapped.class)) + .isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void readWritePrefixed() { + + WithPrefixedUnwrapped source = new WithPrefixedUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne(query(where("id").is(source.id)), WithPrefixedUnwrapped.class)).isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void filterOnPrefixedUnwrappedValue() { + + WithPrefixedUnwrapped source = new WithPrefixedUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat( + template.findOne(Query.query(where("embeddableValue.stringValue").is(source.embeddableValue.stringValue)), + WithPrefixedUnwrapped.class)).isEqualTo(source); + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType embeddableValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithUnwrapped that = (WithUnwrapped) o; + return Objects.equals(id, that.id) && Objects.equals(embeddableValue, that.embeddableValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, embeddableValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.WithUnwrapped(id=" + this.id + ", embeddableValue=" + this.embeddableValue + + ")"; + } + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType embeddableValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithPrefixedUnwrapped that = (WithPrefixedUnwrapped) o; + return Objects.equals(id, that.id) && Objects.equals(embeddableValue, that.embeddableValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, embeddableValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.WithPrefixedUnwrapped(id=" + this.id + ", embeddableValue=" + + this.embeddableValue + ")"; + } + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UnwrappableType that = (UnwrappableType) o; + return Objects.equals(stringValue, that.stringValue) && Objects.equals(listValue, that.listValue) + && Objects.equals(atFieldAnnotatedValue, that.atFieldAnnotatedValue); + } + + @Override + public int hashCode() { + return Objects.hash(stringValue, listValue, atFieldAnnotatedValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.UnwrappableType(stringValue=" + this.stringValue + ", listValue=" + + this.listValue + ", atFieldAnnotatedValue=" + this.atFieldAnnotatedValue + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java new file mode 100644 index 0000000000..4249506d77 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java @@ -0,0 +1,446 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +import com.mongodb.client.result.UpdateResult; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.SetOperation; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.BasicUpdate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith({ MongoTemplateExtension.class }) +class MongoTemplateUpdateTests { + + @Template(initialEntitySet = { Score.class, Versioned.class, Book.class }) // + static MongoTestTemplate template; + + @BeforeEach + void setUp() { + template.flush(); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithSet() { + + Score score1 = new Score(1, "Maya", Arrays.asList(10, 5, 10), Arrays.asList(10, 8), 0); + Score score2 = new Score(2, "Ryan", Arrays.asList(5, 6, 5), Arrays.asList(8, 8), 8); + + template.insertAll(Arrays.asList(score1, score2)); + + AggregationUpdate update = AggregationUpdate.update().set(SetOperation.builder() // + .set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()).and() // + .set("totalQuiz").toValueOf(ArithmeticOperators.valueOf("quiz").sum())) // + .set(SetOperation.builder() // + .set("totalScore") + .toValueOf(ArithmeticOperators.valueOf("totalHomework").add("totalQuiz").add("extraCredit"))); + + template.update(Score.class).apply(update).all(); + + assertThat(collection(Score.class).find(new org.bson.Document()).into(new ArrayList<>())).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{\"_id\" : 1, \"student\" : \"Maya\", \"homework\" : [ 10, 5, 10 ], \"quiz\" : [ 10, 8 ], \"extraCredit\" : 0, \"totalHomework\" : 25, \"totalQuiz\" : 18, \"totalScore\" : 43, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Score\"}"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"student\" : \"Ryan\", \"homework\" : [ 5, 6, 5 ], \"quiz\" : [ 8, 8 ], \"extraCredit\" : 8, \"totalHomework\" : 16, \"totalQuiz\" : 16, \"totalScore\" : 40, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Score\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithSetToValue() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + template.insertAll(Arrays.asList(one)); + + AggregationUpdate update = AggregationUpdate.update().set("author").toValue(new Author("Ada", "Lovelace")); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder(org.bson.Document.parse( + "{\"_id\" : 1, \"author\" : {\"first\" : \"Ada\", \"last\" : \"Lovelace\"}, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void versionedAggregateUpdateWithSet() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + AggregationUpdate update = AggregationUpdate.update().set("value").toValue("changed"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 1L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void versionedAggregateUpdateTouchingVersionProperty() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + AggregationUpdate update = AggregationUpdate.update() + .set(SetOperation.builder().set("value").toValue("changed").and().set("version").toValue(10L)); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithUnset() { + + Book antelopeAntics = new Book(); + antelopeAntics.id = 1; + antelopeAntics.title = "Antelope Antics"; + antelopeAntics.isbn = "0001122223334"; + antelopeAntics.author = new Author("Auntie", "An"); + antelopeAntics.stock = new ArrayList<>(); + antelopeAntics.stock.add(new Warehouse("A", 5)); + antelopeAntics.stock.add(new Warehouse("B", 15)); + + Book beesBabble = new Book(); + beesBabble.id = 2; + beesBabble.title = "Bees Babble"; + beesBabble.isbn = "999999999333"; + beesBabble.author = new Author("Bee", "Bumble"); + beesBabble.stock = new ArrayList<>(); + beesBabble.stock.add(new Warehouse("A", 2)); + beesBabble.stock.add(new Warehouse("B", 5)); + + template.insertAll(Arrays.asList(antelopeAntics, beesBabble)); + + AggregationUpdate update = AggregationUpdate.update().unset("isbn", "stock"); + template.update(Book.class).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{ \"_id\" : 1, \"title\" : \"Antelope Antics\", \"author\" : { \"last\" : \"An\", \"first\" : \"Auntie\" }, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\" }"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"title\" : \"Bees Babble\", \"author\" : { \"last\" : \"Bumble\", \"first\" : \"Bee\" }, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\" }")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithReplaceWith() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + AggregationUpdate update = AggregationUpdate.update() + .replaceWith(ReplaceWithOperation.replaceWithValueOf("author")); + + template.update(Book.class).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder( + org.bson.Document.parse("{\"_id\" : 1, \"first\" : \"John\", \"last\" : \"Backus\"}"), + org.bson.Document.parse("{\"_id\" : 2, \"first\" : \"Grace\", \"last\" : \"Hopper\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithReplaceWithNewObject() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + AggregationUpdate update = AggregationUpdate.update().replaceWith(new Author("Ada", "Lovelace")); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder(org.bson.Document.parse( + "{\"_id\" : 1, \"first\" : \"Ada\", \"last\" : \"Lovelace\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Author\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"author\" : {\"first\" : \"Grace\", \"last\" : \"Hopper\"}, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregationUpdateUpsertsCorrectly() { + + AggregationUpdate update = AggregationUpdate.update().set("title").toValue("The Burning White"); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(1))).apply(update).upsert(); + + assertThat(all(Book.class)) + .containsExactly(org.bson.Document.parse("{\"_id\" : 1, \"title\" : \"The Burning White\" }")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateFirstMatch() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)); + + template.update(Book.class).apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).first(); + + assertThat(all(Book.class)).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void findAndModifyAppliesAggregationUpdateCorrectly() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)); + + Book retrieved = template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))) + .apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).findAndModifyValue(); + assertThat(retrieved).isEqualTo(one); + + assertThat(all(Book.class)).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + + } + + @Test // DATAMMONGO-2423 + void nullValueShouldBePropagatedToDatabase() { + + Book currentRead = new Book(); + currentRead.id = 1; + currentRead.author = new Author("Brent", "Weeks"); + currentRead.title = "The Burning White"; + + template.save(currentRead); + + template.update(Book.class).apply(new Update().set("title", null)).first(); + + assertThat(collection(Book.class).find(new org.bson.Document("_id", currentRead.id)).first()).containsEntry("title", + null); + } + + @ParameterizedTest // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + @MethodSource("sortedUpdateBookArgs") + void updateFirstWithSort(Class domainType, Sort sort, UpdateDefinition update) { + + Book one = new Book(); + one.id = 1; + one.isbn = "001 001 300"; + one.title = "News isn't fake"; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.title = "love is love"; + two.isbn = "001 001 100"; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + UpdateResult result = template.update(domainType) // + .inCollection(template.getCollectionName(Book.class))// + .matching(new Query().with(sort)).apply(update) // + .first(); + + assertThat(result.getModifiedCount()).isOne(); + assertThat(collection(Book.class).find(new org.bson.Document("_id", two.id)).first()).containsEntry("title", + "Science is real!"); + } + + @Test // GH-4918 + void updateShouldHonorVersionProvided() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + Update update = new BasicUpdate("{ '$set' : { 'value' : 'changed' }, '$inc' : { 'version' : 10 } }"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + private List all(Class type) { + return collection(type).find(new org.bson.Document()).into(new ArrayList<>()); + } + + private MongoCollection collection(Class type) { + return template.getCollection(template.getCollectionName(type)); + } + + private static Stream sortedUpdateBookArgs() { + + Update update = new Update().set("title", "Science is real!"); + AggregationUpdate aggUpdate = AggregationUpdate.update().set("title").toValue("Science is real!"); + + return Stream.of( // + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), update), // typed, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.lastname"), update), // typed, map `lastname` + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), update), // typed, raw field name + Arguments.of(Object.class, Sort.by(Direction.ASC, "isbn"), update), // untyped, requires raw field name + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), aggUpdate), // aggregation, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), aggUpdate) // aggregation, map `lastname` + ); + } + + @Document("scores") + static class Score { + + Integer id; + String student; + List homework; + List quiz; + Integer extraCredit; + + public Score(Integer id, String student, List homework, List quiz, Integer extraCredit) { + + this.id = id; + this.student = student; + this.homework = homework; + this.quiz = quiz; + this.extraCredit = extraCredit; + } + } + + static class Versioned { + + String id; + @Version Long version; + String value; + + public Versioned(String id, String value) { + this.id = id; + this.value = value; + } + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Book book = (Book) o; + return Objects.equals(id, book.id) && Objects.equals(title, book.title) && Objects.equals(isbn, book.isbn) + && Objects.equals(author, book.author) && Objects.equals(stock, book.stock); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, isbn, author, stock); + } + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + + public Author(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + } + + static class Warehouse { + + public Warehouse(String location, Integer qty) { + this.location = location; + this.qty = qty; + } + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java new file mode 100644 index 0000000000..18da8c516d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java @@ -0,0 +1,298 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.validation.Validator.*; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions.ValidationOptions; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.lang.Nullable; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.model.ValidationAction; +import com.mongodb.client.model.ValidationLevel; + +/** + * Integration tests for {@link CollectionOptions#validation(ValidationOptions)} using + * {@link org.springframework.data.mongodb.core.validation.CriteriaValidator}, + * {@link org.springframework.data.mongodb.core.validation.DocumentValidator} and + * {@link org.springframework.data.mongodb.core.validation.JsonSchemaValidator}. + * + * @author Andreas Zink + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +public class MongoTemplateValidationTests { + + static final String COLLECTION_NAME = "validation-1"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "validation-tests"; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + } + + @Autowired MongoTemplate template; + + @BeforeEach + public void setUp() { + template.dropCollection(COLLECTION_NAME); + } + + @Test // DATAMONGO-1322 + public void testCollectionWithSimpleCriteriaBasedValidation() { + + Criteria criteria = where("nonNullString").ne(null).type(2).and("rangedInteger").ne(null).type(16).gte(0).lte(122); + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().validator(criteria(criteria))); + + Document validator = getValidatorInfo(COLLECTION_NAME); + + assertThat(validator.get("nonNullString")).isEqualTo(new Document("$ne", null).append("$type", 2)); + assertThat(validator.get("rangedInteger")) + .isEqualTo(new Document("$ne", null).append("$type", 16).append("$gte", 0).append("$lte", 122)); + + template.save(new SimpleBean("hello", 101, null), COLLECTION_NAME); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.save(new SimpleBean(null, 101, null), COLLECTION_NAME)); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.save(new SimpleBean("hello", -1, null), COLLECTION_NAME)); + } + + @Test // DATAMONGO-1322 + public void testCollectionValidationActionError() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schemaValidationAction(ValidationAction.ERROR) + .validator(criteria(where("name").type(2)))); + + assertThat(getValidationActionInfo(COLLECTION_NAME)).isEqualTo(ValidationAction.ERROR.getValue()); + } + + @Test // DATAMONGO-1322 + public void testCollectionValidationActionWarn() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schemaValidationAction(ValidationAction.WARN) + .validator(criteria(where("name").type(2)))); + + assertThat(getValidationActionInfo(COLLECTION_NAME)).isEqualTo(ValidationAction.WARN.getValue()); + } + + @Test // DATAMONGO-1322 + public void testCollectionValidationLevelOff() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schemaValidationLevel(ValidationLevel.OFF) + .validator(criteria(where("name").type(2)))); + + assertThat(getValidationLevelInfo(COLLECTION_NAME)).isEqualTo(ValidationLevel.OFF.getValue()); + } + + @Test // DATAMONGO-1322 + public void testCollectionValidationLevelModerate() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schemaValidationLevel(ValidationLevel.MODERATE) + .validator(criteria(where("name").type(2)))); + + assertThat(getValidationLevelInfo(COLLECTION_NAME)).isEqualTo(ValidationLevel.MODERATE.getValue()); + } + + @Test // DATAMONGO-1322 + public void testCollectionValidationLevelStrict() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schemaValidationLevel(ValidationLevel.STRICT) + .validator(criteria(where("name").type(2)))); + + assertThat(getValidationLevelInfo(COLLECTION_NAME)).isEqualTo(ValidationLevel.STRICT.getValue()); + } + + @Test // DATAMONGO-1322 + public void mapsFieldNameCorrectlyWhenGivenDomainTypeInformation() { + + template.createCollection(SimpleBean.class, + CollectionOptions.empty().validator(criteria(where("customFieldName").type(8)))); + + assertThat(getValidatorInfo(COLLECTION_NAME)).isEqualTo(new Document("customName", new Document("$type", 8))); + } + + @Test // DATAMONGO-1322 + public void usesDocumentValidatorCorrectly() { + + Document rules = new Document("customFieldName", new Document("$type", "bool")); + + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().validator(document(rules))); + + assertThat(getValidatorInfo(COLLECTION_NAME)) + .isEqualTo(new Document("customFieldName", new Document("$type", "bool"))); + } + + @Test // DATAMONGO-1322 + public void mapsDocumentValidatorFieldsCorrectly() { + + Document rules = new Document("customFieldName", new Document("$type", "bool")); + + template.createCollection(SimpleBean.class, CollectionOptions.empty().validator(document(rules))); + + assertThat(getValidatorInfo(COLLECTION_NAME)).isEqualTo(new Document("customName", new Document("$type", "bool"))); + } + + @Test // GH-4454 + public void failsJsonSchemaValidationForEncryptedDomainEntityProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create().createSchemaFor(BeanWithEncryptedDomainEntity.class); + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schema(schema)); + + BeanWithEncryptedDomainEntity person = new BeanWithEncryptedDomainEntity(); + person.encryptedDomainEntity = new SimpleBean("some string", 100, null); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.save(person)) + .withMessageContaining("Document failed validation"); + } + + private Document getCollectionOptions(String collectionName) { + return getCollectionInfo(collectionName).get("options", Document.class); + } + + private Document getValidatorInfo(String collectionName) { + return getCollectionOptions(collectionName).get("validator", Document.class); + } + + private String getValidationActionInfo(String collectionName) { + return getCollectionOptions(collectionName).get("validationAction", String.class); + } + + private String getValidationLevelInfo(String collectionName) { + return getCollectionOptions(collectionName).get("validationLevel", String.class); + } + + private Document getCollectionInfo(String collectionName) { + + return template.execute(db -> { + Document result = db.runCommand( + new Document().append("listCollections", 1).append("filter", new Document("name", collectionName))); + return (Document) result.get("cursor", Document.class).get("firstBatch", List.class).get(0); + }); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME) + static class SimpleBean { + + private @Nullable String nonNullString; + private @Nullable Integer rangedInteger; + private @Field("customName") Object customFieldName; + + public SimpleBean(@Nullable String nonNullString, @Nullable Integer rangedInteger, Object customFieldName) { + this.nonNullString = nonNullString; + this.rangedInteger = rangedInteger; + this.customFieldName = customFieldName; + } + + @Nullable + public String getNonNullString() { + return this.nonNullString; + } + + @Nullable + public Integer getRangedInteger() { + return this.rangedInteger; + } + + public Object getCustomFieldName() { + return this.customFieldName; + } + + public void setNonNullString(@Nullable String nonNullString) { + this.nonNullString = nonNullString; + } + + public void setRangedInteger(@Nullable Integer rangedInteger) { + this.rangedInteger = rangedInteger; + } + + public void setCustomFieldName(Object customFieldName) { + this.customFieldName = customFieldName; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SimpleBean that = (SimpleBean) o; + return Objects.equals(nonNullString, that.nonNullString) && Objects.equals(rangedInteger, that.rangedInteger) && Objects.equals(customFieldName, that.customFieldName); + } + + @Override + public int hashCode() { + return Objects.hash(nonNullString, rangedInteger, customFieldName); + } + + public String toString() { + return "MongoTemplateValidationTests.SimpleBean(nonNullString=" + this.getNonNullString() + ", rangedInteger=" + this.getRangedInteger() + ", customFieldName=" + this.getCustomFieldName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME) + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + static class BeanWithEncryptedDomainEntity { + @Encrypted SimpleBean encryptedDomainEntity; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java new file mode 100644 index 0000000000..15fe90a34a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java @@ -0,0 +1,211 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.CollectionInfo; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for Views. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateViewTests { + + static @Client MongoClient client; + static final String DB_NAME = "mongo-template-view-tests"; + + private MongoTemplate template; + + Student alex = new Student(22001L, "Alex", 1, 4.0D); + Student bernie = new Student(21001L, "bernie", 2, 3.7D); + Student chris = new Student(20010L, "Chris", 3, 2.5D); + Student drew = new Student(22021L, "Drew", 1, 3.2D); + Student harley1 = new Student(17301L, "harley", 6, 3.1D); + Student farmer = new Student(21022L, "Farmer", 1, 2.2D); + Student george = new Student(20020L, "george", 3, 2.8D); + Student harley2 = new Student(18020, "Harley", 5, 2.8D); + + List students = Arrays.asList(alex, bernie, chris, drew, harley1, farmer, george, harley2); + + @BeforeEach + void beforeEach() { + template = new MongoTemplate(client, DB_NAME); + } + + @AfterEach + void afterEach() { + client.getDatabase(DB_NAME).drop(); + } + + @Test // GH-2594 + void createsViewFromPipeline() { + + template.insertAll(students); + + template.createView("firstYears", Student.class, match(where("year").is(1))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewTarget()).isEqualTo("student"); + assertThat(collectionInfo.getViewPipeline()).containsExactly(new Document("$match", new Document("year", 1))); + } + + @Test // GH-2594 + void mapsPipelineAgainstDomainObject() { + + template.insertAll(students); + + template.createView("fakeStudents", Student.class, match(where("studentID").gte("22"))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("sID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void takesPipelineAsIsIfNoTypeDefined() { + + template.insertAll(students); + + template.createView("fakeStudents", "student", AggregationPipeline.of(match(where("studentID").gte("22"))), + ViewOptions.none()); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("studentID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void readsFromView() { + + template.insertAll(students); + client.getDatabase(DB_NAME).createView("firstYears", "student", + Arrays.asList(new Document("$match", new Document("year", 1)))); + + assertThat(template.query(Student.class).inCollection("firstYears").all()).containsExactlyInAnyOrder(alex, drew, + farmer); + } + + @Test // GH-2594 + void appliesCollationToView() { + + template.insertAll(students); + + template.createView("firstYears", Student.class, AggregationPipeline.of(match(where("year").is(1))), + new ViewOptions().collation(Collation.of("en_US"))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getCollation().getLocale()).isEqualTo("en_US"); + } + + private static class Student { + + @Field("sID") Long studentID; + + int year; + + double score; + + String name; + + public Student(long studentID, String name, int year, double score) { + this.studentID = studentID; + this.name = name; + this.year = year; + this.score = score; + } + + public Long getStudentID() { + return this.studentID; + } + + public int getYear() { + return this.year; + } + + public double getScore() { + return this.score; + } + + public String getName() { + return this.name; + } + + public void setStudentID(Long studentID) { + this.studentID = studentID; + } + + public void setYear(int year) { + this.year = year; + } + + public void setScore(double score) { + this.score = score; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Student student = (Student) o; + return year == student.year && Double.compare(student.score, score) == 0 + && Objects.equals(studentID, student.studentID) && Objects.equals(name, student.name); + } + + @Override + public int hashCode() { + return Objects.hash(studentID, year, score, name); + } + + public String toString() { + return "MongoTemplateViewTests.Student(studentID=" + this.getStudentID() + ", year=" + this.getYear() + ", score=" + + this.getScore() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java index de8b22176c..8604fd960d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,39 +15,47 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Collections; import java.util.Map; +import java.util.Optional; +import java.util.Set; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for DATAMONGO-1289. - * + * * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class NoExplicitIdTests { + static @Client MongoClient mongoClient; + @Configuration - @EnableMongoRepositories(considerNestedRepositories = true) - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(considerNestedRepositories = true, includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = TypeWithoutExplicitIdPropertyRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -55,24 +63,31 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } @Autowired MongoOperations mongoOps; @Autowired TypeWithoutExplicitIdPropertyRepository repo; - @Before + @BeforeEach public void setUp() { mongoOps.dropCollection(TypeWithoutIdProperty.class); } - /** - * @see DATAMONGO-1289 - */ - @Test - public void saveAndRetrieveTypeWithoutIdPorpertyViaTemplate() { + @Test // DATAMONGO-1289 + public void saveAndRetrieveTypeWithoutIdPropertyViaTemplate() { TypeWithoutIdProperty noid = new TypeWithoutIdProperty(); noid.someString = "o.O"; @@ -82,14 +97,11 @@ public void saveAndRetrieveTypeWithoutIdPorpertyViaTemplate() { TypeWithoutIdProperty retrieved = mongoOps.findOne(query(where("someString").is(noid.someString)), TypeWithoutIdProperty.class); - assertThat(retrieved.someString, is(noid.someString)); + assertThat(retrieved.someString).isEqualTo(noid.someString); } - /** - * @see DATAMONGO-1289 - */ - @Test - public void saveAndRetrieveTypeWithoutIdPorpertyViaRepository() { + @Test // DATAMONGO-1289 + public void saveAndRetrieveTypeWithoutIdPropertyViaRepository() { TypeWithoutIdProperty noid = new TypeWithoutIdProperty(); noid.someString = "o.O"; @@ -97,15 +109,12 @@ public void saveAndRetrieveTypeWithoutIdPorpertyViaRepository() { repo.save(noid); TypeWithoutIdProperty retrieved = repo.findBySomeString(noid.someString); - assertThat(retrieved.someString, is(noid.someString)); + assertThat(retrieved.someString).isEqualTo(noid.someString); } - /** - * @see DATAMONGO-1289 - */ - @Test + @Test // DATAMONGO-1289 @SuppressWarnings("unchecked") - public void saveAndRetrieveTypeWithoutIdPorpertyViaRepositoryFindOne() { + public void saveAndRetrieveTypeWithoutIdPropertyViaRepositoryFindOne() { TypeWithoutIdProperty noid = new TypeWithoutIdProperty(); noid.someString = "o.O"; @@ -115,8 +124,8 @@ public void saveAndRetrieveTypeWithoutIdPorpertyViaRepositoryFindOne() { Map map = mongoOps.findOne(query(where("someString").is(noid.someString)), Map.class, "typeWithoutIdProperty"); - TypeWithoutIdProperty retrieved = repo.findOne(map.get("_id").toString()); - assertThat(retrieved.someString, is(noid.someString)); + Optional retrieved = repo.findById(map.get("_id").toString()); + assertThat(retrieved.get().someString).isEqualTo(noid.someString); } static class TypeWithoutIdProperty { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java index 3d6f2148a2..bc126e05f0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,11 @@ package org.springframework.data.mongodb.core; import org.bson.types.ObjectId; +import org.springframework.lang.Nullable; public class Person { - private final ObjectId id; + private ObjectId id; private String firstName; @@ -89,13 +90,8 @@ public boolean isActive() { return active; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java index acfa990ddd..cacd564056 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java @@ -1,96 +1,98 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.AnnotationConfigApplicationContext; -import org.springframework.context.support.AbstractApplicationContext; - -/** - * @author Jon Brisbin - * @author Oliver Gierke - */ -public class PersonExample { - - private static final Logger LOGGER = LoggerFactory.getLogger(PersonExample.class); - - @Autowired private MongoOperations mongoOps; - - public static void main(String[] args) { - AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext(PersonExampleAppConfig.class); - PersonExample example = applicationContext.getBean(PersonExample.class); - example.doWork(); - applicationContext.close(); - } - - public void doWork() { - mongoOps.dropCollection("personexample"); - - PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString(); - p.setFirstName("Sven"); - p.setAge(22); - - mongoOps.save(p); - - PersonWithIdPropertyOfTypeString p2 = new PersonWithIdPropertyOfTypeString(); - p2.setFirstName("Jon"); - p2.setAge(23); - - mongoOps.save(p2); - - LOGGER.debug("Saved: " + p); - - p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class); - - LOGGER.debug("Found: " + p); - - // mongoOps.updateFirst(new Query(where("firstName").is("Sven")), new Update().set("age", 24)); - - // mongoOps.updateFirst(new Query(where("firstName").is("Sven")), update("age", 24)); - - p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class); - LOGGER.debug("Updated: " + p); - - List folks = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class); - LOGGER.debug("Querying for all people..."); - for (PersonWithIdPropertyOfTypeString element : folks) { - LOGGER.debug(element.toString()); - } - - // mongoOps.remove( query(whereId().is(p.getId())), p.getClass()); - - mongoOps.remove(p); - - List people = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class); - - LOGGER.debug("Number of people = : " + people.size()); - - } - - public void doWork2() { - mongoOps.dropCollection("personexample"); - - PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString(); - p.setFirstName("Sven"); - p.setAge(22); - - } - -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.support.AbstractApplicationContext; + +/** + * @author Jon Brisbin + * @author Oliver Gierke + */ +public class PersonExample { + + private static final Log LOGGER = LogFactory.getLog(PersonExample.class); + + @Autowired private MongoOperations mongoOps; + + public static void main(String[] args) { + AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext( + PersonExampleAppConfig.class); + PersonExample example = applicationContext.getBean(PersonExample.class); + example.doWork(); + applicationContext.close(); + } + + public void doWork() { + mongoOps.dropCollection("personexample"); + + PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString(); + p.setFirstName("Sven"); + p.setAge(22); + + mongoOps.save(p); + + PersonWithIdPropertyOfTypeString p2 = new PersonWithIdPropertyOfTypeString(); + p2.setFirstName("Jon"); + p2.setAge(23); + + mongoOps.save(p2); + + LOGGER.debug("Saved: " + p); + + p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class); + + LOGGER.debug("Found: " + p); + + // mongoOps.updateFirst(new Query(where("firstName").is("Sven")), new Update().set("age", 24)); + + // mongoOps.updateFirst(new Query(where("firstName").is("Sven")), update("age", 24)); + + p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class); + LOGGER.debug("Updated: " + p); + + List folks = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class); + LOGGER.debug("Querying for all people..."); + for (PersonWithIdPropertyOfTypeString element : folks) { + LOGGER.debug(element.toString()); + } + + // mongoOps.remove( query(whereId().is(p.getId())), p.getClass()); + + mongoOps.remove(p); + + List people = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class); + + LOGGER.debug("Number of people = : " + people.size()); + + } + + public void doWork2() { + mongoOps.dropCollection("personexample"); + + PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString(); + p.setFirstName("Sven"); + p.setAge(22); + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java index 28fa3b066d..31afdb91b6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java @@ -1,41 +1,41 @@ -/* - * Copyright 2002-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - -@Configuration -public class PersonExampleAppConfig { - - @Bean - public Mongo mongo() throws Exception { - return new MongoClient("localhost"); - } - - @Bean - public MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongo(), "database"); - } - - @Bean - public PersonExample personExample() { - return new PersonExample(); - } -} +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.MongoClient; + +@Configuration +public class PersonExampleAppConfig { + + @Bean + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Bean + public MongoTemplate mongoTemplate() throws Exception { + return new MongoTemplate(mongoClient(), "database"); + } + + @Bean + public PersonExample personExample() { + return new PersonExample(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonReadConverter.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonReadConverter.java index a4fd1021e6..888e659f1d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonReadConverter.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonReadConverter.java @@ -1,14 +1,12 @@ package org.springframework.data.mongodb.core; +import org.bson.Document; import org.bson.types.ObjectId; - import org.springframework.core.convert.converter.Converter; -import com.mongodb.DBObject; - -public class PersonReadConverter implements Converter { +public class PersonReadConverter implements Converter { - public Person convert(DBObject source) { + public Person convert(Document source) { Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); p.setAge((Integer) source.get("age")); return p; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java index bc54929ec5..cb347aa3e7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java index ac633465ac..dd397643ec 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java index 0df1b21873..0cc26b2419 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java index 9f7f76e1a9..36c8ea56ed 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java index cbe3822907..bfdb86b9af 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java index d70e59e114..f8eb961f27 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java index 5357a3eaf6..47caf5db27 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java index 05375d0b2f..fa5bb5d8ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java new file mode 100644 index 0000000000..f9db1e4d04 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java @@ -0,0 +1,72 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; +import java.util.UUID; + +public class PersonWithIdPropertyOfTypeUUID { + + private UUID id; + private String firstName; + private int age; + + public UUID getId() { + return this.id; + } + + public String getFirstName() { + return this.firstName; + } + + public int getAge() { + return this.age; + } + + public void setId(UUID id) { + this.id = id; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonWithIdPropertyOfTypeUUID that = (PersonWithIdPropertyOfTypeUUID) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstName, that.firstName); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstName, age); + } + + public String toString() { + return "PersonWithIdPropertyOfTypeUUID(id=" + this.getId() + ", firstName=" + this.getFirstName() + ", age=" + + this.getAge() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java index aef61e4719..132f0830d4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +23,7 @@ public class PersonWithVersionPropertyOfTypeInteger { String firstName; int age; - @Version - Integer version; + @Version Integer version; @Override public String toString() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java index f1653ca120..41ef862dd0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -27,7 +27,7 @@ public class PersonWithVersionPropertyOfTypeLong { @Override public String toString() { - return "PersonWithVersionPropertyOfTypeInteger [id=" + id + ", firstName=" + firstName + ", age=" + age - + ", version=" + version + "]"; + return "PersonWithVersionPropertyOfTypeLong [id=" + id + ", firstName=" + firstName + ", age=" + age + ", version=" + + version + "]"; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java index 399fa166cf..8f2ca7d9ff 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java index 89468f24bc..43bfe53dc0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java index 6a7a8c64e4..5017a6947e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java @@ -1,18 +1,36 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core; +import org.bson.Document; import org.springframework.core.convert.converter.Converter; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +/** + * @author Thomas Risberg + * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + */ +public class PersonWriteConverter implements Converter { -public class PersonWriteConverter implements Converter { - - public DBObject convert(Person source) { - DBObject dbo = new BasicDBObject(); - dbo.put("_id", source.getId()); - dbo.put("name", source.getFirstName()); - dbo.put("age", source.getAge()); - return dbo; + public Document convert(Person source) { + Document document = new Document(); + document.put("_id", source.getId()); + document.put("name", source.getFirstName()); + document.put("age", source.getAge()); + return document; } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java index 45ebcc6784..22ba43f9be 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java @@ -1,73 +1,73 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public class Portfolio { - - private String portfolioName; - private User user; - private List trades; - private Map positions; - private Map portfolioManagers; - - public Map getPortfolioManagers() { - return portfolioManagers; - } - - public void setPortfolioManagers(Map portfolioManagers) { - this.portfolioManagers = portfolioManagers; - } - - public Map getPositions() { - return positions; - } - - public void setPositions(Map positions) { - this.positions = positions; - } - - public Portfolio() { - trades = new ArrayList(); - } - - public String getPortfolioName() { - return portfolioName; - } - - public void setPortfolioName(String portfolioName) { - this.portfolioName = portfolioName; - } - - public List getTrades() { - return trades; - } - - public void setTrades(List trades) { - this.trades = trades; - } - - public User getUser() { - return user; - } - - public void setUser(User user) { - this.user = user; - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class Portfolio { + + private String portfolioName; + private User user; + private List trades; + private Map positions; + private Map portfolioManagers; + + public Map getPortfolioManagers() { + return portfolioManagers; + } + + public void setPortfolioManagers(Map portfolioManagers) { + this.portfolioManagers = portfolioManagers; + } + + public Map getPositions() { + return positions; + } + + public void setPositions(Map positions) { + this.positions = positions; + } + + public Portfolio() { + trades = new ArrayList(); + } + + public String getPortfolioName() { + return portfolioName; + } + + public void setPortfolioName(String portfolioName) { + this.portfolioName = portfolioName; + } + + public List getTrades() { + return trades; + } + + public void setTrades(List trades) { + this.trades = trades; + } + + public User getUser() { + return user; + } + + public void setUser(User user) { + this.user = user; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java new file mode 100644 index 0000000000..52ee79aa1f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java @@ -0,0 +1,308 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.ExampleMatcher; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for Query-by-example. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Oliver Gierke + */ +@ExtendWith(MongoTemplateExtension.class) +public class QueryByExampleTests { + + @Template(initialEntitySet = Person.class) // + static MongoTestTemplate operations; + + Person p1, p2, p3; + + @BeforeEach + public void setUp() { + + operations.flush(); + + p1 = new Person(); + p1.firstname = "bran"; + p1.middlename = "a"; + p1.lastname = "stark"; + + p2 = new Person(); + p2.firstname = "jon"; + p2.lastname = "snow"; + + p3 = new Person(); + p3.firstname = "arya"; + p3.lastname = "stark"; + + operations.save(p1); + operations.save(p2); + operations.save(p3); + } + + @Test // DATAMONGO-1245 + public void findByExampleShouldWorkForSimpleProperty() { + + Person sample = new Person(); + sample.lastname = "stark"; + + Query query = new Query(new Criteria().alike(Example.of(sample))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactlyInAnyOrder(p1, p3); + } + + @Test // DATAMONGO-1245 + public void findByExampleShouldWorkForMultipleProperties() { + + Person sample = new Person(); + sample.lastname = "stark"; + sample.firstname = "arya"; + + Query query = new Query(new Criteria().alike(Example.of(sample))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactly(p3); + } + + @Test // DATAMONGO-1245 + public void findByExampleShouldWorkForIdProperty() { + + Person p4 = new Person(); + operations.save(p4); + + Person sample = new Person(); + sample.id = p4.id; + + Query query = new Query(new Criteria().alike(Example.of(sample))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactly(p4); + } + + @Test // DATAMONGO-1245 + public void findByExampleShouldReturnEmptyListIfNotMatching() { + + Person sample = new Person(); + sample.firstname = "jon"; + sample.firstname = "stark"; + + Query query = new Query(new Criteria().alike(Example.of(sample))); + List result = operations.find(query, Person.class); + + assertThat(result).isEmpty(); + } + + @Test // DATAMONGO-1245 + public void findByExampleShouldReturnEverythingWhenSampleIsEmpty() { + + Person sample = new Person(); + + Query query = new Query(new Criteria().alike(Example.of(sample))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactlyInAnyOrder(p1, p2, p3); + } + + @Test // DATAMONGO-1245, GH-3544 + public void findByExampleWithCriteria() { + + Person sample = new Person(); + sample.lastname = "stark"; + + Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex(".*n.*")); + assertThat(operations.find(query, Person.class)).containsExactly(p1); + } + + @Test // DATAMONGO-1459 + public void findsExampleUsingAnyMatch() { + + Person probe = new Person(); + probe.lastname = "snow"; + probe.middlename = "a"; + + Query query = Query.query(Criteria.byExample(Example.of(probe, ExampleMatcher.matchingAny()))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactlyInAnyOrder(p1, p2); + } + + @Test // DATAMONGO-1768 + public void typedExampleMatchesNothingIfTypesDoNotMatch() { + + NotAPersonButStillMatchingFields probe = new NotAPersonButStillMatchingFields(); + probe.lastname = "stark"; + + Query query = new Query(new Criteria().alike(Example.of(probe))); + List result = operations.find(query, Person.class); + + assertThat(result).isEmpty(); + } + + @Test // DATAMONGO-1768 + public void exampleIgnoringClassTypeKeyMatchesCorrectly() { + + NotAPersonButStillMatchingFields probe = new NotAPersonButStillMatchingFields(); + probe.lastname = "stark"; + + Query query = new Query( + new Criteria().alike(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_class")))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactlyInAnyOrder(p1, p3); + } + + @Test // DATAMONGO-1768 + public void untypedExampleMatchesCorrectly() { + + NotAPersonButStillMatchingFields probe = new NotAPersonButStillMatchingFields(); + probe.lastname = "stark"; + + Query query = new Query(new Criteria().alike(Example.of(probe, UntypedExampleMatcher.matching()))); + List result = operations.find(query, Person.class); + + assertThat(result).containsExactlyInAnyOrder(p1, p3); + } + + @Test // DATAMONGO-2314 + public void alikeShouldWorkOnNestedProperties() { + + PersonWrapper source1 = new PersonWrapper(); + source1.id = "with-child-doc-1"; + source1.child = p1; + + PersonWrapper source2 = new PersonWrapper(); + source2.id = "with-child-doc-2"; + source2.child = p2; + + operations.save(source1); + operations.save(source2); + + Query query = new Query( + new Criteria("child").alike(Example.of(p1, ExampleMatcher.matching().withIgnorePaths("_class")))); + List result = operations.find(query, PersonWrapper.class); + + assertThat(result).containsExactly(source1); + } + + @Document("dramatis-personae") + static class Person { + + @Id String id; + String firstname, middlename; + @Field("last_name") String lastname; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(middlename, person.middlename) && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, middlename, lastname); + } + + public String toString() { + return "QueryByExampleTests.Person(id=" + this.id + ", firstname=" + this.firstname + ", middlename=" + + this.middlename + ", lastname=" + this.lastname + ")"; + } + } + + static class NotAPersonButStillMatchingFields { + + String firstname, middlename; + @Field("last_name") String lastname; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NotAPersonButStillMatchingFields that = (NotAPersonButStillMatchingFields) o; + return Objects.equals(firstname, that.firstname) && Objects.equals(middlename, that.middlename) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(firstname, middlename, lastname); + } + + public String toString() { + return "QueryByExampleTests.NotAPersonButStillMatchingFields(firstname=" + this.firstname + ", middlename=" + + this.middlename + ", lastname=" + this.lastname + ")"; + } + } + + @Document("dramatis-personae") + static class PersonWrapper { + + @Id String id; + Person child; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonWrapper that = (PersonWrapper) o; + return Objects.equals(id, that.id) && Objects.equals(child, that.child); + } + + @Override + public int hashCode() { + return Objects.hash(id, child); + } + + public String toString() { + return "QueryByExampleTests.PersonWrapper(id=" + this.id + ", child=" + this.child + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java index 333cfd7a72..8c1ef8348b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,125 +15,129 @@ */ package org.springframework.data.mongodb.core; -import static org.mockito.Matchers.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import java.util.concurrent.TimeUnit; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoTemplate.QueryCursorPreparer; -import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; -import com.mongodb.DBCursor; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.FindIterable; /** * Unit tests for {@link QueryCursorPreparer}. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch + * @author Anton Barkan */ -@RunWith(MockitoJUnitRunner.class) -public class QueryCursorPreparerUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class QueryCursorPreparerUnitTests { + + @Mock MongoDatabaseFactory factory; + @Mock MongoExceptionTranslator exceptionTranslatorMock; + @Mock FindIterable cursor; + + @BeforeEach + void setUp() { + + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(cursor.batchSize(anyInt())).thenReturn(cursor); + when(cursor.comment(anyString())).thenReturn(cursor); + when(cursor.allowDiskUse(anyBoolean())).thenReturn(cursor); + when(cursor.maxTime(anyLong(), any())).thenReturn(cursor); + when(cursor.hint(any())).thenReturn(cursor); + when(cursor.noCursorTimeout(anyBoolean())).thenReturn(cursor); + when(cursor.collation(any())).thenReturn(cursor); + } - @Mock MongoDbFactory factory; - @Mock DBCursor cursor; + @Test // DATAMONGO-185 + void appliesHintsCorrectly() { - @Mock DBCursor cursorToUse; + Query query = query(where("foo").is("bar")).withHint("{ age: 1 }"); + prepare(query); - @Before - public void setUp() { - when(cursor.copy()).thenReturn(cursorToUse); + verify(cursor).hint(new Document("age", 1)); } - /** - * @see DATAMONGO-185 - */ - @Test - public void appliesHintsCorrectly() { + @Test // DATAMONGO-2365 + void appliesIndexNameAsHintCorrectly() { - Query query = query(where("foo").is("bar")).withHint("hint"); + Query query = query(where("foo").is("bar")).withHint("idx-1"); + prepare(query); - pepare(query); - - verify(cursorToUse).hint("hint"); + verify(cursor).hintString("idx-1"); } - /** - * @see DATAMONGO-957 - */ - @Test - public void doesNotApplyMetaWhenEmpty() { - - Query query = query(where("foo").is("bar")); - query.setMeta(new Meta()); + @Test // DATAMONGO-2319 + void appliesDocumentHintsCorrectly() { - pepare(query); + Query query = query(where("foo").is("bar")).withHint(Document.parse("{ age: 1 }")); + prepare(query); - verify(cursor, never()).copy(); - verify(cursorToUse, never()).addSpecial(any(String.class), anyObject()); + verify(cursor).hint(new Document("age", 1)); } - /** - * @see DATAMONGO-957 - */ - @Test - public void appliesMaxScanCorrectly() { - - Query query = query(where("foo").is("bar")).maxScan(100); + @Test // DATAMONGO-957 + void appliesCommentCorrectly() { - pepare(query); + Query query = query(where("foo").is("bar")).comment("spring data"); + prepare(query); - verify(cursorToUse).addSpecial(eq("$maxScan"), eq(100L)); + verify(cursor).comment("spring data"); } - /** - * @see DATAMONGO-957 - */ - @Test - public void appliesMaxTimeCorrectly() { - - Query query = query(where("foo").is("bar")).maxTime(1, TimeUnit.SECONDS); + @Test // DATAMONGO-2659 + void appliesAllowDiskUseCorrectly() { - pepare(query); + Query query = query(where("foo").is("bar")).allowDiskUse(true); + prepare(query); - verify(cursorToUse).addSpecial(eq("$maxTimeMS"), eq(1000L)); + verify(cursor).allowDiskUse(true); } - /** - * @see DATAMONGO-957 - */ - @Test - public void appliesCommentCorrectly() { + @Test // DATAMONGO-1480 + void appliesNoCursorTimeoutCorrectly() { - Query query = query(where("foo").is("bar")).comment("spring data"); + Query query = query(where("foo").is("bar")).noCursorTimeout(); - pepare(query); + prepare(query); - verify(cursorToUse).addSpecial(eq("$comment"), eq("spring data")); + verify(cursor).noCursorTimeout(eq(true)); } - /** - * @see DATAMONGO-957 - */ - @Test - public void appliesSnapshotCorrectly() { + @Test // DATAMONGO-1518 + void appliesCollationCorrectly() { + + prepare(new BasicQuery("{}").collation(Collation.of("fr"))); + + verify(cursor).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } - Query query = query(where("foo").is("bar")).useSnapshot(); + @Test // DATAMONGO-1311 + void appliesBatchSizeCorrectly() { - pepare(query); + prepare(new BasicQuery("{}").cursorBatchSize(100)); - verify(cursorToUse).addSpecial(eq("$snapshot"), eq(true)); + verify(cursor).batchSize(100); } - private DBCursor pepare(Query query) { + private FindIterable prepare(Query query) { CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query, null); return preparer.prepare(cursor); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java new file mode 100644 index 0000000000..4e103c17be --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Unit tests for {@link QueryOperations}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class QueryOperationsUnitTests { + + static final AggregationOptions NO_MAPPING = AggregationOptions.builder().noMapping().build(); + static final AggregationOptions STRICT_MAPPING = AggregationOptions.builder().strictMapping().build(); + + @Mock QueryMapper queryMapper; + @Mock UpdateMapper updateMapper; + @Mock EntityOperations entityOperations; + @Mock PropertyOperations propertyOperations; + @Mock MongoDatabaseFactory mongoDbFactory; + @Mock MongoMappingContext mappingContext; + + QueryOperations queryOperations; + + @BeforeEach + void beforeEach() { + + when(queryMapper.getMappingContext()).thenReturn((MappingContext) mappingContext); + + queryOperations = new QueryOperations(queryMapper, updateMapper, entityOperations, propertyOperations, + mongoDbFactory); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForUntypedAggregationsWhenNoInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, (Class) null); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForTypedAggregationsWhenNoInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Person.class, Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, Person.class); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForUntypedAggregationsWhenInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, Person.class); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesDefaultIfNoMappingDesired() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")).withOptions(NO_MAPPING); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, Person.class); + + assertThat(ctx.getAggregationOperationContext()).isEqualTo(Aggregation.DEFAULT_CONTEXT); + } + + @Test // GH-3542 + void createAggregationContextUsesStrictlyTypedContextForTypedAggregationsWhenRequested() { + + Aggregation aggregation = Aggregation.newAggregation(Person.class, Aggregation.project("name")) + .withOptions(STRICT_MAPPING); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, (Class) null); + + assertThat(ctx.getAggregationOperationContext()).isInstanceOf(TypeBasedAggregationOperationContext.class); + } + + @Test // GH-4026 + void insertContextDoesNotAddIdIfNoPersistentEntityCanBeFound() { + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddIdIfNoIdPropertyCanBeFound() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + when(entity.getIdProperty()).thenReturn(null); + when(mappingContext.getPersistentEntity(eq(Person.class))).thenReturn((MongoPersistentEntity) entity); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddConvertedIdForNonExplicitFieldTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextAddsConvertedIdForExplicitFieldTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(true); + doReturn(String.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + when(queryMapper.convertId(any(), eq(String.class))).thenReturn("☮"); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one").append("_id", "☮")); + }); + } + + @Test // GH-4026 + void insertContextAddsConvertedIdForMongoIdTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + when(property.isAnnotationPresent(eq(MongoId.class))).thenReturn(true); + doReturn(String.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + when(queryMapper.convertId(any(), eq(String.class))).thenReturn("☮"); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one").append("_id", "☮")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddConvertedIdForMongoIdTypesTargetingObjectId() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + when(property.isAnnotationPresent(eq(MongoId.class))).thenReturn(true); + doReturn(ObjectId.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4184 + void insertContextDoesNotOverrideExistingId() { + + assertThat(queryOperations.createInsertContext(new Document("_id", "abc")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("_id", "abc")); + }); + } + + static class Person { + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java new file mode 100644 index 0000000000..9d4ed339b5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; + +/** + * Unit tests for {@link ReactiveAggregationOperationSupport}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveAggregationOperationSupportUnitTests { + + @Mock ReactiveMongoTemplate template; + private ReactiveAggregationOperationSupport opSupport; + + @BeforeEach + void setUp() { + opSupport = new ReactiveAggregationOperationSupport(template); + } + + @Test // DATAMONGO-1719 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(null)); + } + + @Test // DATAMONGO-1719 + void throwsExceptionOnNullCollectionWhenUsed() { + assertThatIllegalArgumentException() + .isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1719 + void throwsExceptionOnEmptyCollectionWhenUsed() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection("")); + } + + @Test // DATAMONGO-1719 + void throwsExceptionOnNullAggregation() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).by(null)); + } + + @Test // DATAMONGO-1719 + void aggregateWithUntypedAggregationAndExplicitCollection() { + + opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + verify(template).aggregate(any(Aggregation.class), eq("star-wars"), captor.capture()); + assertThat(captor.getValue()).isEqualTo(Person.class); + } + + @Test // DATAMONGO-1719 + void aggregateWithUntypedAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); + } + + @Test // DATAMONGO-1719 + void aggregateWithTypeAggregation() { + + when(template.getCollectionName(any(Class.class))).thenReturn("person"); + + opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).all(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + + assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); + } + + static class Person {} + + static class Jedi {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java new file mode 100644 index 0000000000..23d1d03b43 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java @@ -0,0 +1,169 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplSetClient; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Tests for {@link ReactiveChangeStreamOperation}. + * + * @author Christoph Strobl + * @currentRead Dawn Cook - The Decoy Princess + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class ReactiveChangeStreamOperationSupportTests { + + static final String DATABASE_NAME = "rx-change-stream"; + static @ReplSetClient MongoClient mongoClient; + + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(mongoClient, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollectionNow(DATABASE_NAME, "person", mongoClient); + } + + @AfterEach + public void tearDown() { + MongoTestUtils.dropCollectionNow(DATABASE_NAME, "person", mongoClient); + } + + @Test // DATAMONGO-2089 + public void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Document.class) // + .watchCollection("person") // + .listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1).delayElement(Duration.ofMillis(2)), + template.insert(person2).delayElement(Duration.ofMillis(2)), + template.insert(person3).delayElement(Duration.ofMillis(2))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3) + .allMatch(Document.class::isInstance); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Person.class).listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1).delayElement(Duration.ofMillis(2)), + template.insert(person2).delayElement(Duration.ofMillis(2)), + template.insert(person3).delayElement(Duration.ofMillis(2))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).containsOnly(person1, + person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + public void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Person.class) // + .watchCollection(Person.class) // + .filter(where("age").gte(38)) // + .listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + Flux.merge(template.save(person1), template.save(person2).delayElement(Duration.ofMillis(50)), + template.save(person3).delayElement(Duration.ofMillis(100))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).containsOnly(person1, + person3); + } finally { + disposable.dispose(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java new file mode 100644 index 0000000000..46838d6da9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java @@ -0,0 +1,165 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link ReactiveChangeStreamOperationSupport}. + * + * @author Christoph Strobl + * @currentRead Dawn Cook - The Decoy Princess + */ +@ExtendWith(MockitoExtension.class) +class ReactiveChangeStreamOperationSupportUnitTests { + + @Mock ReactiveMongoTemplate template; + private ReactiveChangeStreamOperationSupport changeStreamSupport; + + @BeforeEach + void setUp() { + when(template.changeStream(any(), any(), any())).thenReturn(Flux.empty()); + changeStreamSupport = new ReactiveChangeStreamOperationSupport(template); + } + + @Test // DATAMONGO-2089 + void listenWithoutDomainTypeUsesDocumentAsDefault() { + + changeStreamSupport.changeStream(Document.class).listen().subscribe(); + + verify(template).changeStream(isNull(), eq(ChangeStreamOptions.empty()), eq(Document.class)); + } + + @Test // DATAMONGO-2089 + void listenWithDomainTypeUsesSourceAsTarget() { + + changeStreamSupport.changeStream(Person.class).listen().subscribe(); + + verify(template).changeStream(isNull(), eq(ChangeStreamOptions.empty()), eq(Person.class)); + } + + @Test // DATAMONGO-2089 + void collectionNameIsPassedOnCorrectly() { + + changeStreamSupport.changeStream(Person.class).watchCollection("star-wars").listen().subscribe(); + + verify(template).changeStream(eq("star-wars"), eq(ChangeStreamOptions.empty()), eq(Person.class)); + } + + @Test // DATAMONGO-2089 + void listenWithDomainTypeCreatesTypedAggregation() { + + Criteria criteria = where("operationType").is("insert"); + changeStreamSupport.changeStream(Person.class).filter(criteria).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Person.class)); + + assertThat(optionsArgumentCaptor.getValue().getFilter()).hasValueSatisfying(it -> { + + assertThat(it).isInstanceOf(TypedAggregation.class); + TypedAggregation aggregation = (TypedAggregation) it; + + assertThat(aggregation.getInputType()).isEqualTo(Person.class); + assertThat(extractPipeline(aggregation)) + .containsExactly(new Document("$match", new Document("operationType", "insert"))); + }); + } + + @Test // DATAMONGO-2089 + void listenWithoutDomainTypeCreatesUntypedAggregation() { + + Criteria criteria = where("operationType").is("insert"); + changeStreamSupport.changeStream(Document.class).filter(criteria).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue().getFilter()).hasValueSatisfying(it -> { + + assertThat(it).isInstanceOf(Aggregation.class); + assertThat(it).isNotInstanceOf(TypedAggregation.class); + + Aggregation aggregation = (Aggregation) it; + + assertThat(extractPipeline(aggregation)) + .containsExactly(new Document("$match", new Document("operationType", "insert"))); + }); + } + + @Test // DATAMONGO-2089 + void optionsShouldBePassedOnCorrectly() { + + Document filter = new Document("$match", new Document("operationType", "insert")); + + changeStreamSupport.changeStream(Document.class).withOptions(options -> { + options.filter(filter); + }).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue()).satisfies(it -> { + assertThat(it.getFilter().get()).isEqualTo(Collections.singletonList(filter)); + }); + } + + @Test // DATAMONGO-2089 + void optionsShouldBeCombinedCorrectly() { + + Document filter = new Document("$match", new Document("operationType", "insert")); + Instant resumeTimestamp = Instant.now(); + + changeStreamSupport.changeStream(Document.class).withOptions(options -> { + options.filter(filter); + }).resumeAt(resumeTimestamp).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue()).satisfies(it -> { + + assertThat(it.getFilter().get()).isEqualTo(Collections.singletonList(filter)); + assertThat(it.getResumeTimestamp()).contains(resumeTimestamp); + }); + } + + private static List extractPipeline(Aggregation aggregation) { + return aggregation.toDocument("person", Aggregation.DEFAULT_CONTEXT).get("pipeline", ArrayList.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java new file mode 100644 index 0000000000..9c49a3a743 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java @@ -0,0 +1,190 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class ReactiveClientSessionTests { + + static final String DATABASE_NAME = "reflective-client-session-tests"; + static final String COLLECTION_NAME = "test"; + + static @Client MongoClient client; + + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(client, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, COLLECTION_NAME, client) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.insert(new Document("_id", "id-1").append("value", "spring"), COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1880 + public void shouldApplyClientSession() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + assertThat(session.getOperationTime()).isNull(); + + template.withSession(() -> session) // + .execute(action -> action.findAll(Document.class, COLLECTION_NAME)) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-1880 + public void useMonoInCallback() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + assertThat(session.getOperationTime()).isNull(); + + template.withSession(() -> session).execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-1880 + public void reusesClientSessionInSessionScopedCallback() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + CountingSessionSupplier sessionSupplier = new CountingSessionSupplier(session); + + ReactiveSessionScoped sessionScoped = template.withSession(sessionSupplier); + + sessionScoped.execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)).blockFirst(); + assertThat(sessionSupplier.getInvocationCount()).isEqualTo(1); + + sessionScoped.execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)).blockFirst(); + assertThat(sessionSupplier.getInvocationCount()).isEqualTo(1); + } + + @Test // DATAMONGO-1970 + public void addsClientSessionToContext() { + + template.withSession(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())) + .execute(action -> ReactiveMongoContext.getSession()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-2001 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void countInTransactionShouldReturnCount() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + template.withSession(() -> session).execute(action -> { + + session.startTransaction(); + + return action.insert(new Document("_id", "id-2").append("value", "in transaction"), COLLECTION_NAME) // + .then(action.count(query(where("value").is("in transaction")), Document.class, COLLECTION_NAME)) // + .flatMap(it -> Mono.from(session.commitTransaction()).then(Mono.just(it))); + + }).as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + template.withSession(() -> session).execute(action -> { + + session.startTransaction(); + + return action.insert(new Document("value", "in transaction"), COLLECTION_NAME) // + .then(action.count(query(where("value").is("foo")), Document.class, COLLECTION_NAME)) // + .flatMap(it -> Mono.from(session.commitTransaction()).then(Mono.just(it))); + + }).as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + static class CountingSessionSupplier implements Supplier { + + AtomicInteger invocationCount = new AtomicInteger(0); + final ClientSession session; + + public CountingSessionSupplier(ClientSession session) { + this.session = session; + } + + @Override + public ClientSession get() { + + invocationCount.incrementAndGet(); + return session; + } + + int getInvocationCount() { + return invocationCount.get(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java new file mode 100644 index 0000000000..f23e973202 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java @@ -0,0 +1,901 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; + +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.util.Date; +import java.util.Objects; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link ReactiveFindOperationSupport}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Juergen Zimmermann + */ +@ExtendWith({ MongoClientExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ReactiveFindOperationSupportTests implements StateFunctions { + + private static final String STAR_WARS = "star-wars"; + private MongoTemplate blocking; + private ReactiveMongoTemplate template; + + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + private Person han; + private Person luke; + + void setUp() { + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableFindOperationSupportTests")); + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableFindOperationSupportTests"); + } + + @Override + public void clear() { + if (blocking == null) { + setUp(); + } + recreateCollection(STAR_WARS, false); + } + + @Override + public void setupState() { + if (blocking == null) { + setUp(); + } + insertObjects(); + } + + void insertObjects() { + + han = new Person(); + han.firstname = "han"; + han.lastname = "solo"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.lastname = "skywalker"; + luke.id = "id-2"; + + blocking.save(han); + blocking.save(luke); + } + + void recreateCollection(String collectionName, boolean capped) { + + blocking.dropCollection(STAR_WARS); + + CollectionOptions options = CollectionOptions.empty(); + if (capped) { + options = options.capped().size(1024 * 1024); + } + + blocking.createCollection(STAR_WARS, options); + } + + @Test // DATAMONGO-1719 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(null)); + } + + @Test // DATAMONGO-1719 + void returnTypeIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).as(null)); + } + + @Test // DATAMONGO-1719 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1719 + void findAll() { + + template.query(Person.class).all().collectList().as(StepVerifier::create).consumeNextWith(actual -> { + assertThat(actual).containsExactlyInAnyOrder(han, luke); + }).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllWithCollection() { + template.query(Human.class).inCollection(STAR_WARS).all().as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); + } + + @Test // DATAMONGO-2323 + void findAllAsDocumentDocument() { + template.query(Document.class).inCollection(STAR_WARS).all().as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllWithProjection() { + + template.query(Person.class).as(Jedi.class).all().map(it -> it.getClass().getName()).as(StepVerifier::create) // + .expectNext(Jedi.class.getName(), Jedi.class.getName()) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllBy() { + + template.query(Person.class).matching(query(where("firstname").is("luke"))).all().as(StepVerifier::create) // + .expectNext(luke) // + .verifyComplete(); + } + + @Test // DATAMONGO-2416 + void findAllByCriteria() { + + template.query(Person.class).matching(where("firstname").is("luke")).all().as(StepVerifier::create) // + .expectNext(luke) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllByWithCollectionUsingMappingInformation() { + + template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllByWithCollection() { + + template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllByWithProjection() { + + template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllByWithClosedInterfaceProjection() { + + template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonProjection.class); + assertThat(it.getFirstname()).isEqualTo("luke"); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findAllByWithOpenInterfaceProjection() { + + template.query(Person.class).as(PersonSpELProjection.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonSpELProjection.class); + assertThat(it.getName()).isEqualTo("luke"); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findBy() { + + template.query(Person.class).matching(query(where("firstname").is("luke"))).one().as(StepVerifier::create) + .expectNext(luke) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findByNoMatch() { + + template.query(Person.class).matching(query(where("firstname").is("spock"))).one().as(StepVerifier::create) + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void findByTooManyResults() { + + template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one().as(StepVerifier::create) + .expectError(IncorrectResultSizeDataAccessException.class) // + .verify(); + } + + @Test // DATAMONGO-1719 + @DirtiesState + void findAllNearBy() { + + blocking.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + blocking.save(alderan); + blocking.save(dantooine); + + template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)).all().as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual.getDistance()).isNotNull(); + }) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + @DirtiesState + void findAllNearByWithCollectionAndProjection() { + + blocking.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + blocking.save(alderan); + blocking.save(dantooine); + + template.query(Object.class).inCollection(STAR_WARS).as(Human.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all().as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual.getDistance()).isNotNull(); + assertThat(actual.getContent()).isInstanceOf(Human.class); + assertThat(actual.getContent().getId()).isEqualTo("alderan"); + }) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + @DirtiesState + void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { + + blocking.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + blocking.save(alderan); + blocking.save(dantooine); + + template.query(Planet.class).as(PlanetProjection.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)).all() + .as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it.getDistance()).isNotNull(); + assertThat(it.getContent()).isInstanceOf(PlanetProjection.class); + assertThat(it.getContent().getName()).isEqualTo("alderan"); + }) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + @DirtiesState + void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { + + blocking.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + blocking.save(alderan); + blocking.save(dantooine); + + template.query(Planet.class).as(PlanetSpELProjection.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)) + .all().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it.getDistance()).isNotNull(); + assertThat(it.getContent()).isInstanceOf(PlanetSpELProjection.class); + assertThat(it.getContent().getId()).isEqualTo("alderan"); + }) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tail() throws InterruptedException { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + BlockingQueue collector = new LinkedBlockingQueue<>(); + Flux tail = template.query(Person.class) + .matching(query(new Criteria().orOperator(where("firstname").is("chewbacca"), where("firstname").is("luke")))) + .tail().doOnNext(collector::add); + + Disposable subscription = tail.subscribe(); + + assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(luke); + assertThat(collector).isEmpty(); + + Person chewbacca = new Person(); + chewbacca.firstname = "chewbacca"; + chewbacca.lastname = "chewie"; + chewbacca.id = "id-3"; + + blocking.save(chewbacca); + + assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(chewbacca); + + subscription.dispose(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + .thenCancel() // + .verify(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithClosedInterfaceProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonProjection.class); + assertThat(it.getFirstname()).isEqualTo("luke"); + }) // + .thenCancel() // + .verify(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithOpenInterfaceProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(PersonSpELProjection.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonSpELProjection.class); + assertThat(it.getName()).isEqualTo("luke"); + }) // + .thenCancel() // + .verify(); + } + + @Test // DATAMONGO-1719 + void firstShouldReturnFirstEntryInCollection() { + template.query(Person.class).first().as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { + template.query(Person.class).count().as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void countShouldReturnNrOfElementsMatchingQuery() { + + template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).count() + .as(StepVerifier::create).expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { + template.query(Person.class).exists().as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1719 + @DirtiesState + void existsShouldReturnFalseIfNoElementExistsInCollection() { + + blocking.remove(new BasicQuery("{}"), STAR_WARS); + + template.query(Person.class).exists().as(StepVerifier::create).expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { + + template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).exists() + .as(StepVerifier::create).expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void existsShouldReturnFalseWhenNoElementMatchesQuery() { + + template.query(Person.class).matching(query(where("firstname").is("spock"))).exists().as(StepVerifier::create) + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + void distinctReturnsEmptyListIfNoMatchFound() { + + template.query(Person.class).distinct("actually-not-property-in-use").as(String.class).all() + .as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.lastname = luke.lastname; + + blocking.save(anakin); + + template.query(Person.class).distinct("lastname").as(String.class).all().as(StepVerifier::create) + .assertNext(in("solo", "skywalker")).assertNext(in("solo", "skywalker")) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsSimpleFieldValuesCorrectly() { + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = "dark-lord"; + + Person padme = new Person(); + padme.firstname = "padme"; + padme.ability = 42L; + + Person jaja = new Person(); + jaja.firstname = "jaja"; + jaja.ability = new Date(); + + blocking.save(anakin); + blocking.save(padme); + blocking.save(jaja); + + Consumer containedInAbilities = in(anakin.ability, padme.ability, jaja.ability); + + template.query(Person.class).distinct("ability").all().as(StepVerifier::create) // + .assertNext(containedInAbilities) // + .assertNext(containedInAbilities) // + .assertNext(containedInAbilities) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsComplexValuesCorrectly() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + blocking.save(anakin); + + template.query(Person.class).distinct("ability").all().as(StepVerifier::create) // + .expectNext(anakin.ability) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + blocking.save(anakin); + + template.query(Person.class).distinct("ability").as(Sith.class).all().as(StepVerifier::create) // + .expectNext(sith) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsComplexValuesCorrectlyReturnTypeDocumentSpecified() { + + Sith sith = new Sith(); + sith.rank = "lord"; + + Person anakin = new Person(); + anakin.firstname = "anakin"; + anakin.ability = sith; + + blocking.save(anakin); + + template.query(Person.class).distinct("ability").as(Document.class).all().as(StepVerifier::create) + .expectNext(new Document("rank", "lord").append("_class", Sith.class.getName())) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + void distinctMapsFieldNameCorrectly() { + + template.query(Jedi.class).inCollection(STAR_WARS).distinct("name").as(String.class).all().as(StepVerifier::create) + .assertNext(in("han", "luke")).assertNext(in("han", "luke")) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { + + Consumer inValues = in(new BsonString("solo"), new BsonString("skywalker")); + template.query(Person.class).distinct("lastname").as(BsonValue.class).all().as(StepVerifier::create) + .assertNext(inValues) // + .assertNext(inValues) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { + + blocking.save(new Document("darth", "vader"), STAR_WARS); + + template.query(Person.class).distinct("darth").all().as(StepVerifier::create) // + .expectNext("vader") // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsMappedDomainTypeForProjections() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + blocking.save(luke); + + template.query(Person.class).distinct("father").as(Jedi.class).all().as(StepVerifier::create) + .expectNext(new Jedi("anakin")) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctAlllowsQueryUsingObjectSourceType() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + blocking.save(luke); + + template.query(Object.class).inCollection(STAR_WARS).distinct("father").as(Jedi.class).all() + .as(StepVerifier::create).expectNext(new Jedi("anakin")) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + @DirtiesState + void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + blocking.save(luke); + + Person expected = new Person(); + expected.firstname = luke.father.firstname; + + template.query(Person.class).distinct("father").all().as(StepVerifier::create) // + .expectNext(expected) // + .verifyComplete(); + } + + @Test // DATAMONGO-1761 + void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { + + template.query(Person.class).distinct("firstname").as(Long.class).all().as(StepVerifier::create) + .expectError(InvalidDataAccessApiUsageException.class) // + .verify(); + } + + @Test // DATAMONGO-2507 + void distinctAppliesFilterQuery() { + + template.query(Person.class).inCollection(STAR_WARS).distinct("firstname") // + .matching(where("lastname").is(luke.lastname)) // + .as(String.class) // + .all() // + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isEqualTo("luke")) // + .verifyComplete(); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + this.getFather() + + ")"; + } + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + static class Human { + + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Human(id=" + this.getId() + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } + + static class Sith { + + String rank; + + public String getRank() { + return this.rank; + } + + public void setRank(String rank) { + this.rank = rank; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sith sith = (Sith) o; + return Objects.equals(rank, sith.rank); + } + + @Override + public int hashCode() { + return Objects.hash(rank); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Sith(rank=" + this.getRank() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Planet { + + @Id String name; + Point coordinates; + + public Planet(String name, Point coordinates) { + this.name = name; + this.coordinates = coordinates; + } + + public String getName() { + return this.name; + } + + public Point getCoordinates() { + return this.coordinates; + } + + public void setName(String name) { + this.name = name; + } + + public void setCoordinates(Point coordinates) { + this.coordinates = coordinates; + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Planet(name=" + this.getName() + ", coordinates=" + + this.getCoordinates() + ")"; + } + } + + interface PlanetProjection { + String getName(); + } + + interface PlanetSpELProjection { + + @Value("#{target.name}") + String getId(); + } + + static Consumer in(T... values) { + return (val) -> { + assertThat(values).contains(val); + }; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java new file mode 100644 index 0000000000..b417430934 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java @@ -0,0 +1,147 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.Arrays; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; + +/** + * Unit tests for {@link ExecutableInsertOperationSupport}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveInsertOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + + @Mock ReactiveMongoTemplate template; + + private ReactiveInsertOperationSupport ops; + + private Person luke, han; + + @BeforeEach + void setUp() { + + ops = new ReactiveInsertOperationSupport(template); + + luke = new Person(); + luke.id = "id-1"; + luke.firstname = "luke"; + + han = new Person(); + han.firstname = "han"; + han.id = "id-2"; + } + + @Test // DATAMONGO-1719 + void nullCollectionShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1719 + void insertShouldUseDerivedCollectionName() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + + ops.insert(Person.class).one(luke); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); + + verify(template).getCollectionName(captor.capture()); + verify(template).insert(eq(luke), eq(STAR_WARS)); + + assertThat(captor.getAllValues()).containsExactly(Person.class); + } + + @Test // DATAMONGO-1719 + void insertShouldUseExplicitCollectionName() { + + ops.insert(Person.class).inCollection(STAR_WARS).one(luke); + + verify(template, never()).getCollectionName(any(Class.class)); + verify(template).insert(eq(luke), eq(STAR_WARS)); + } + + @Test // DATAMONGO-1719 + void insertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + + ops.insert(Person.class).all(Arrays.asList(luke, han)); + + verify(template).getCollectionName(any(Class.class)); + verify(template).insert(anyList(), eq(STAR_WARS)); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveInsertOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java new file mode 100644 index 0000000000..609a456912 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java @@ -0,0 +1,237 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ReactiveMapReduceOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveMapReduceOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + private static final String MAP_FUNCTION = "function() { emit(this.id, this.firstname) }"; + private static final String REDUCE_FUNCTION = "function(id, name) { return sum(id, name); }"; + + @Mock ReactiveMongoTemplate template; + + private ReactiveMapReduceOperationSupport mapReduceOpsSupport; + + @BeforeEach + void setUp() { + mapReduceOpsSupport = new ReactiveMapReduceOperationSupport(template); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMapReduceOperationSupport(null)); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> mapReduceOpsSupport.mapReduce(null)); + } + + @Test // DATAMONGO-1929 + void usesExtractedCollectionName() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesExplicitCollectionName() { + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .inCollection("the-night-angel").all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq("the-night-angel"), eq(Person.class), + eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesMapReduceOptionsWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + MapReduceOptions options = MapReduceOptions.options(); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).with(options).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), eq(options)); + } + + @Test // DATAMONGO-1929 + void usesQueryWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + Query query = new BasicQuery("{ 'lastname' : 'skywalker' }"); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).matching(query).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-2416 + void usesCriteriaWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + Query query = Query.query(where("lastname").is("skywalker")); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .matching(where("lastname").is("skywalker")).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesProjectionWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).as(Jedi.class).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Jedi.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ReactiveMapReduceOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + + this.getFather() + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ReactiveMapReduceOperationSupportUnitTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java new file mode 100644 index 0000000000..effdc931df --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration +public class ReactiveMongoTemplateCollationTests { + + public static final String COLLECTION_NAME = "collation-1"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractReactiveMongoConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "collation-tests"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } + } + + @Autowired ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + template.dropCollection(COLLECTION_NAME).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1693 + public void createCollectionWithCollation() { + + template.createCollection(COLLECTION_NAME, CollectionOptions.just(Collation.of("en_US"))).as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Mono collation = getCollationInfo(COLLECTION_NAME); + collation.as(StepVerifier::create) // + .consumeNextWith(document -> assertThat(document.get("locale")).isEqualTo("en_US")) // + .verifyComplete(); + + } + + private Mono getCollationInfo(String collectionName) { + + return getCollectionInfo(collectionName) // + .map(it -> it.get("options", Document.class)) // + .map(it -> it.get("collation", Document.class)); + } + + @SuppressWarnings("unchecked") + private Mono getCollectionInfo(String collectionName) { + + return template.execute(db -> { + + return Flux.from(db.runCommand(new Document() // + .append("listCollections", 1) // + .append("filter", new Document("name", collectionName)))) // + .map(it -> it.get("cursor", Document.class)) + .flatMapIterable(it -> (List) it.get("firstBatch", List.class)); + }).next(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java new file mode 100644 index 0000000000..3bf9035a44 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java @@ -0,0 +1,194 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.data.Offset.offset; +import static org.junit.Assume.*; + +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import org.bson.Document; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.util.Version; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Integration test for {@link ReactiveMongoTemplate} execute methods. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMongoTemplateExecuteTests { + + private static final Version THREE = Version.parse("3.0"); + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoOperations operations; + + Version mongoVersion; + + @Before + public void setUp() { + + Flux cleanup = operations.dropCollection("person") // + .mergeWith(operations.dropCollection("execute_test")) // + .mergeWith(operations.dropCollection("execute_test1")) // + .mergeWith(operations.dropCollection("execute_test2")); + + cleanup.as(StepVerifier::create).verifyComplete(); + + if (mongoVersion == null) { + mongoVersion = operations.executeCommand("{ buildInfo: 1 }") // + .map(it -> it.get("version").toString())// + .map(Version::parse) // + .block(); + } + } + + @Test // DATAMONGO-1444 + public void executeCommandJsonCommandShouldReturnSingleResponse() { + + operations.executeCommand("{ buildInfo: 1 }").as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual).containsKey("version"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void executeCommandDocumentCommandShouldReturnSingleResponse() { + + operations.executeCommand(new Document("buildInfo", 1)).as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual).containsKey("version"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void executeCommandJsonCommandShouldReturnMultipleResponses() { + + assumeTrue(mongoVersion.isGreaterThan(THREE)); + + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").as(StepVerifier::create) + .expectNextCount(1).verifyComplete(); + + operations.executeCommand("{ find: 'execute_test'}").as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.get("ok", Double.class)).isCloseTo(1D, offset(0D)); + assertThat(actual).containsKey("cursor"); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void executeCommandJsonCommandShouldTranslateExceptions() { + + operations.executeCommand("{ unknown: 1 }").as(StepVerifier::create) // + .expectError(InvalidDataAccessApiUsageException.class) // + .verify(); + } + + @Test // DATAMONGO-1444 + public void executeCommandDocumentCommandShouldTranslateExceptions() { + + operations.executeCommand(new Document("unknown", 1)).as(StepVerifier::create) // + .expectError(InvalidDataAccessApiUsageException.class) // + .verify(); + + } + + @Test // DATAMONGO-1444 + public void executeCommandWithReadPreferenceCommandShouldTranslateExceptions() { + + operations.executeCommand(new Document("unknown", 1), ReadPreference.nearest()).as(StepVerifier::create) // + .expectError(InvalidDataAccessApiUsageException.class) // + .verify(); + } + + @Test // DATAMONGO-1444 + public void executeOnDatabaseShouldExecuteCommand() { + + Flux documentFlux = operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}") + .mergeWith(operations.executeCommand("{ insert: 'execute_test1', documents: [{},{},{}]}")) + .mergeWith(operations.executeCommand("{ insert: 'execute_test2', documents: [{},{},{}]}")); + + documentFlux.as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + Flux execute = operations.execute(MongoDatabase::listCollections); + + execute.filter(document -> document.getString("name").startsWith("execute_test")).as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void executeOnDatabaseShouldDeferExecution() { + + operations.execute(db -> { + throw new MongoException(50, "hi there"); + }); + + // the assertion here is that the exception is not thrown + } + + @Test // DATAMONGO-1444 + public void executeOnDatabaseShouldShouldTranslateExceptions() { + + Flux execute = operations.execute(db -> { + throw new MongoException(50, "hi there"); + }); + + execute.as(StepVerifier::create).expectError(UncategorizedMongoDbException.class).verify(); + } + + @Test // DATAMONGO-1444 + public void executeOnCollectionWithTypeShouldReturnFindResults() { + + operations.executeCommand("{ insert: 'person', documents: [{},{},{}]}").as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.execute(Person.class, MongoCollection::find).as(StepVerifier::create).expectNextCount(3) + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void executeOnCollectionWithNameShouldReturnFindResults() { + + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.execute("execute_test", MongoCollection::find).as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java new file mode 100644 index 0000000000..75b38390cb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java @@ -0,0 +1,326 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.data.Index.atIndex; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.RepeatFailedTest; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.Indexed; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Integration test for index creation via {@link ReactiveMongoTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MongoClientExtension.class) +public class ReactiveMongoTemplateIndexTests { + + private static @Client MongoClient client; + + private SimpleReactiveMongoDatabaseFactory factory; + private ReactiveMongoTemplate template; + + @BeforeEach + void setUp() { + + factory = new SimpleReactiveMongoDatabaseFactory(client, "reactive-template-index-tests"); + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + template = new ReactiveMongoTemplate(factory, new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "person", client); + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "indexfail", client); + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "indexedSample", client); + } + + @AfterEach + void cleanUp() {} + + @RepeatFailedTest(3) // DATAMONGO-1444 + void testEnsureIndexShouldCreateIndex() { + + Person p1 = new Person("Oliver"); + p1.setAge(25); + template.insert(p1); + Person p2 = new Person("Sven"); + p2.setAge(40); + template.insert(p2); + + template.indexOps(Person.class) // + .ensureIndex(new Index().on("age", Direction.DESC).unique()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.getCollection(template.getCollectionName(Person.class)).flatMapMany(MongoCollection::listIndexes) + .collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + + assertThat(indexInfo).hasSize(2); + Object indexKey = null; + boolean unique = false; + for (Document ix : indexInfo) { + + if ("age_-1".equals(ix.get("name"))) { + indexKey = ix.get("key"); + unique = (Boolean) ix.get("unique"); + } + } + assertThat((Document) indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); + }).verifyComplete(); + } + + @RepeatFailedTest(3) // DATAMONGO-1444 + void getIndexInfoShouldReturnCorrectIndex() { + + Person p1 = new Person("Oliver"); + p1.setAge(25); + template.insert(p1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.indexOps(Person.class) // + .ensureIndex(new Index().on("age", Direction.DESC).unique()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.indexOps(Person.class).getIndexInfo().collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfos -> { + + assertThat(indexInfos).hasSize(2); + + IndexInfo ii = indexInfos.get(1); + assertThat(ii.isUnique()).isTrue(); + assertThat(ii.isSparse()).isFalse(); + + assertThat(ii.getIndexFields()).contains(IndexField.create("age", Direction.DESC), atIndex(0)); + }).verifyComplete(); + } + + @RepeatFailedTest(3) // DATAMONGO-1444, DATAMONGO-2264 + void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() { + + template.indexOps(Person.class).dropAllIndexes() // + .as(StepVerifier::create) // + .verifyComplete(); + + template.indexOps(Person.class).getIndexInfo() // + .as(StepVerifier::create) // + .verifyComplete(); + + factory.getMongoDatabase() // + .flatMapMany(db -> db.getCollection(template.getCollectionName(Person.class)) + .createIndex(new Document("age", -1), new IndexOptions().unique(true).sparse(true))) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.getCollection(template.getCollectionName(Person.class)).flatMapMany(MongoCollection::listIndexes) + .collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfos -> { + + Document indexKey = null; + boolean unique = false; + + for (Document document : indexInfos) { + + if ("age_-1".equals(document.get("name"))) { + indexKey = (org.bson.Document) document.get("key"); + unique = (Boolean) document.get("unique"); + } + } + + assertThat(indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); + }).verifyComplete(); + + Flux.from(template.indexOps(Person.class).getIndexInfo().collectList()) // + .as(StepVerifier::create) // + .consumeNextWith(indexInfos -> { + + IndexInfo info = indexInfos.get(1); + assertThat(info.isUnique()).isTrue(); + assertThat(info.isSparse()).isTrue(); + + assertThat(info.getIndexFields()).contains(IndexField.create("age", Direction.DESC), atIndex(0)); + }).verifyComplete(); + } + + @RepeatFailedTest(3) // DATAMONGO-1928 + void shouldCreateIndexOnAccess() { + + template.getCollection("indexedSample").flatMapMany(it -> it.listIndexes(Document.class)) // + .as(StepVerifier::create) // + .expectNextCount(0) // + .verifyComplete(); + + template.findAll(IndexedSample.class).defaultIfEmpty(new IndexedSample()) // + .delayElements(Duration.ofMillis(500)) // TODO: check if 4.2.0 server GA still requires this timeout + .then() + .as(StepVerifier::create) // + .verifyComplete(); + + template.getCollection("indexedSample").flatMapMany(it -> it.listIndexes(Document.class)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @RepeatFailedTest(3) // DATAMONGO-1928, DATAMONGO-2264 + void indexCreationShouldFail() throws InterruptedException { + + factory.getMongoDatabase() // + .flatMapMany(db -> db.getCollection("indexfail") // + .createIndex(new Document("field", 1), new IndexOptions().name("foo").unique(true).sparse(true))) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + BlockingQueue queue = new LinkedBlockingQueue<>(); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory, this.template.getConverter(), queue::add); + + template.findAll(IndexCreationShouldFail.class).subscribe(); + + Throwable failure = queue.poll(10, TimeUnit.SECONDS); + + assertThat(failure).isNotNull().isInstanceOf(DataIntegrityViolationException.class); + } + + static class Sample { + + @Id String id; + String field; + + public Sample() {} + + public Sample(String id, String field) { + this.id = id; + this.field = field; + } + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.Sample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document + static class IndexedSample { + + @Id String id; + @Indexed String field; + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.IndexedSample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document("indexfail") + static class IndexCreationShouldFail { + + @Id String id; + @Indexed(name = "foo") String field; + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.IndexCreationShouldFail(id=" + this.getId() + ", field=" + this.getField() + + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java new file mode 100644 index 0000000000..86433ab338 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java @@ -0,0 +1,329 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.ReplaceOptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.bson.BsonInt64; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.reactivestreams.Publisher; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.model.Filters; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class ReactiveMongoTemplateReplaceTests { + + static final String DB_NAME = "mongo-template-replace-tests"; + static final String RESTAURANT_COLLECTION = "restaurant"; + + static @Client MongoClient client; + private ReactiveMongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new ReactiveMongoTemplate(client, DB_NAME); + template.setEntityLifecycleEventsEnabled(false); + + initTestData(); + } + + @AfterEach() + void afterEach() { + clearTestData(); + } + + @Test // GH-4462 + void replacesExistingDocument() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant("Central Pork Cafe", "Manhattan")); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesFirstOnMoreThanOneMatch() { + + Mono result = template.replace(query(where("violations").exists(true)), + new Restaurant("Central Pork Cafe", "Manhattan")); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDoc() { + + Mono result = template.replace(query(where("r-name").is("Central Perk Cafe")), + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), + template.getCollectionName(Restaurant.class)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDocMappingQueryAgainstDomainType() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), Restaurant.class, + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), ReplaceOptions.none(), template.getCollectionName(Restaurant.class)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithMatchingId() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(1L, "Central Pork Cafe", "Manhattan", 0)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithNewIdThrowsDataIntegrityViolationException() { + + template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(4L, "Central Pork Cafe", "Manhattan", 0)) + .as(StepVerifier::create) + .expectError(DataIntegrityViolationException.class) + .verify(); + } + + @Test // GH-4462 + void doesNothingIfNoMatchFoundAndUpsertSetToFalse/* by default */() { + + Mono result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(null, "Pizza Rat's Pizzaria", "Manhattan", 8)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(0); + assertThat(it.getModifiedCount()).isEqualTo(0); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("r-name", "Pizza Rat's Pizzaria")).first()) + .as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-4462 + void insertsIfNoMatchFoundAndUpsertSetToTrue() { + + Mono result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(4L, "Pizza Rat's Pizzaria", "Manhattan", 8), replaceOptions().upsert()); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(0); + assertThat(it.getModifiedCount()).isEqualTo(0); + assertThat(it.getUpsertedId()).isEqualTo(new BsonInt64(4L)); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 4)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Pizza Rat's Pizzaria"); + }) + .verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + void replaceConsidersSort() { + + template.replace(new Query().with(Sort.by(Direction.DESC, "name")), new Restaurant("resist", "Manhattan")) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "resist"); + }).verifyComplete(); + } + + void initTestData() { + + List testData = Stream.of( // + "{ '_id' : 1, 'r-name' : 'Central Perk Cafe', 'Borough' : 'Manhattan' }", + "{ '_id' : 2, 'r-name' : 'Rock A Feller Bar and Grill', 'Borough' : 'Queens', 'violations' : 2 }", + "{ '_id' : 3, 'r-name' : 'Empire State Pub', 'Borough' : 'Brooklyn', 'violations' : 0 }") // + .map(Document::parse).collect(Collectors.toList()); + + doInCollection(collection -> collection.insertMany(testData)); + } + + void clearTestData() { + doInCollection(collection -> collection.deleteMany(new Document())); + } + + void doInCollection(Function, Publisher> fkt) { + retrieve(collection -> Mono.from(fkt.apply(collection))).then().as(StepVerifier::create).verifyComplete(); + } + + Mono retrieve(Function, Publisher> fkt) { + return Mono.from(fkt.apply(client.getDatabase(DB_NAME).getCollection(RESTAURANT_COLLECTION))); + } + + @org.springframework.data.mongodb.core.mapping.Document(RESTAURANT_COLLECTION) + static class Restaurant { + + Long id; + + @Field("r-name") String name; + String borough; + Integer violations; + + Restaurant() {} + + Restaurant(String name, String borough) { + + this.name = name; + this.borough = borough; + } + + Restaurant(Long id, String name, String borough, Integer violations) { + + this.id = id; + this.name = name; + this.borough = borough; + this.violations = violations; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getRName() { + return name; + } + + public void setRName(String rName) { + this.name = rName; + } + + public String getBorough() { + return borough; + } + + public void setBorough(String borough) { + this.borough = borough; + } + + public int getViolations() { + return violations; + } + + public void setViolations(int violations) { + this.violations = violations; + } + + @Override + public String toString() { + return "Restaurant{" + "id=" + id + ", name='" + name + '\'' + ", borough='" + borough + '\'' + ", violations=" + + violations + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Restaurant that = (Restaurant) o; + return violations == that.violations && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(borough, that.borough); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, borough, violations); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java new file mode 100644 index 0000000000..0e6e94bdf7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java @@ -0,0 +1,255 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link Window} queries. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +class ReactiveMongoTemplateScrollTests { + + static @Client MongoClient client; + + public static final String DB_NAME = "mongo-template-scroll-tests"; + + ConfigurableApplicationContext context = new GenericApplicationContext(); + + private ReactiveMongoTestTemplate template = new ReactiveMongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + }); + }); + + @BeforeEach + void setUp() { + + template.remove(Person.class).all() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.remove(WithRenamedField.class).all() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @ParameterizedTest // GH-4308 + @MethodSource("positions") + public void shouldApplyCursoringCorrectly(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)) // + .as(StepVerifier::create) // + .expectNextCount(6) // + .verifyComplete(); + + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")).limit(2); + q.with(scrollPosition); + + Window window = template.scroll(q, resultType, "person").block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(jane_20), assertionConverter.apply(jane_40)); + + window = template.scroll(q.limit(3).with(window.positionAt(window.size() - 1)), resultType, "person") + .block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(3); + assertThat(window).contains(assertionConverter.apply(jane_42), assertionConverter.apply(john20)); + assertThat(window).containsAnyOf(assertionConverter.apply(john40_1), assertionConverter.apply(john40_2)); + + window = template.scroll(q.limit(1).with(window.positionAt(window.size() - 1)), resultType, "person") + .block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsAnyOf(assertionConverter.apply(john40_1), assertionConverter.apply(john40_2)); + } + + @ParameterizedTest // GH-4308 + @MethodSource("renamedFieldProjectTargets") + void scrollThroughResultsWithRenamedField(Class resultType, Function assertionConverter) { + + WithRenamedField one = new WithRenamedField("id-1", "v1", null); + WithRenamedField two = new WithRenamedField("id-2", "v2", null); + WithRenamedField three = new WithRenamedField("id-3", "v3", null); + + template.insertAll(Arrays.asList(one, two, three)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + Query q = new Query(where("value").regex("v.*")).with(Sort.by(Sort.Direction.DESC, "value")).limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(ScrollPosition.keyset()).block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(three), assertionConverter.apply(two)); + + window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(window.positionAt(window.size() - 1)).block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(assertionConverter.apply(one)); + } + + static Stream positions() { + + return Stream.of(args(ScrollPosition.keyset(), Person.class, Function.identity()), // + args(ScrollPosition.keyset(), Document.class, ReactiveMongoTemplateScrollTests::toDocument), // + args(ScrollPosition.offset(), Person.class, Function.identity())); + } + + static Stream renamedFieldProjectTargets() { + return Stream.of(Arguments.of(WithRenamedField.class, Function.identity()), + Arguments.of(Document.class, new Function() { + @Override + public Document apply(WithRenamedField withRenamedField) { + return new Document("_id", withRenamedField.getId()).append("_val", withRenamedField.getValue()) + .append("_class", WithRenamedField.class.getName()); + } + })); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + return Arguments.of(scrollPosition, resultType, assertionConverter); + } + + static Document toDocument(Person person) { + return new Document("_class", person.getClass().getName()).append("_id", person.getId()).append("active", true) + .append("firstName", person.getFirstName()).append("age", person.getAge()); + } + + static class WithRenamedField { + + String id; + + @Field("_val") String value; + + WithRenamedField nested; + + public WithRenamedField() {} + + public WithRenamedField(String id, String value, WithRenamedField nested) { + this.id = id; + this.value = value; + this.nested = nested; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public WithRenamedField getNested() { + return this.nested; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setNested(WithRenamedField nested) { + this.nested = nested; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithRenamedField that = (WithRenamedField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(nested, that.nested); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, nested); + } + + public String toString() { + return "ReactiveMongoTemplateScrollTests.WithRenamedField(id=" + this.getId() + ", value=" + this.getValue() + + ", nested=" + this.getNested() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java new file mode 100644 index 0000000000..f87227cdde --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java @@ -0,0 +1,2073 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.bson.BsonDocument; +import org.bson.BsonTimestamp; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.geo.Metrics; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoTemplateTests.Address; +import org.springframework.data.mongodb.core.MongoTemplateTests.PersonWithConvertedId; +import org.springframework.data.mongodb.core.MongoTemplateTests.VersionedPerson; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.IndexOperationsAdapter; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; + +import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Integration test for {@link MongoTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, MongoServerCondition.class }) +public class ReactiveMongoTemplateTests { + + private static final String DB_NAME = "reactive-mongo-template-tests"; + private static @Client MongoClient client; + + private ConfigurableApplicationContext context = new GenericApplicationContext(); + private ReactiveMongoTestTemplate template = new ReactiveMongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + }); + }); + + @BeforeEach + void setUp() { + + template + .flush(Person.class, MyPerson.class, Sample.class, Venue.class, PersonWithVersionPropertyOfTypeInteger.class, + RawStringId.class) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.flush("people", "collection", "personX", "unique_person").as(StepVerifier::create).verifyComplete(); + } + + private ReactiveMongoDatabaseFactory factory = template.getDatabaseFactory(); + + @Test // DATAMONGO-1444 + void insertSetsId() { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void insertAllSetsId() { + + PersonWithAList person = new PersonWithAList(); + + template.insertAll(Collections.singleton(person)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void insertCollectionSetsId() { + + PersonWithAList person = new PersonWithAList(); + + template.insert(Collections.singleton(person), PersonWithAList.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // GH-4944 + void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).blockLast(); + template.execute(RawStringId.class, MongoCollection::find) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> assertThat(returned.id).isEqualTo(actual.get("_id"))) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void saveSetsId() { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // GH-4026 + void saveShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.save(source).then().as(StepVerifier::create).verifyComplete(); + + template.execute(RawStringId.class, collection -> { + return collection.find(new org.bson.Document()).first(); + }) // + .map(it -> it.get("_id")) // + .as(StepVerifier::create) // + .consumeNextWith(id -> { + assertThat(id).isInstanceOf(String.class); + }).verifyComplete(); + } + + @Test // GH-4026 + void insertShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.insert(source).then().as(StepVerifier::create).verifyComplete(); + + template.execute(RawStringId.class, collection -> { + return collection.find(new org.bson.Document()).first(); + }) // + .map(it -> it.get("_id")) // + .as(StepVerifier::create) // + .consumeNextWith(id -> { + assertThat(id).isInstanceOf(String.class); + }).verifyComplete(); + } + + @Test // GH-4184 + void insertHonorsExistingRawId() { + + MongoTemplateTests.RawStringId source = new MongoTemplateTests.RawStringId(); + source.id = "abc"; + source.value = "new value"; + + template.insert(source) + .then(template.execute(db -> Flux.from( + db.getCollection(template.getCollectionName(MongoTemplateTests.RawStringId.class)).find().limit(1).first())) + .next()) + .as(StepVerifier::create).consumeNextWith(result -> { + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo("abc"); + }); + } + + @Test // DATAMONGO-1444 + void insertsSimpleEntityCorrectly() { + + Person person = new Person("Mark"); + person.setAge(35); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.find(new Query(where("_id").is(person.getId())), Person.class) // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void simpleInsertDoesNotAllowArrays() { + + Person person = new Person("Mark"); + person.setAge(35); + + assertThatIllegalArgumentException().isThrownBy(() -> template.insert(new Person[] { person })); + } + + @Test // DATAMONGO-1444 + void simpleInsertDoesNotAllowCollections() { + + Person person = new Person("Mark"); + person.setAge(35); + + assertThatIllegalArgumentException().isThrownBy(() -> template.insert(Collections.singletonList(person))); + } + + @Test // DATAMONGO-1444 + void insertsSimpleEntityWithSuppliedCollectionNameCorrectly() { + + Person person = new Person("Homer"); + person.setAge(35); + template.insert(person, "people") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.find(new Query(where("_id").is(person.getId())), Person.class, "people") // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void insertBatchCorrectly() { + + List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insertAll(people) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.find(new Query().with(Sort.by("firstname")), Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) /// + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void insertBatchWithSuppliedCollectionNameCorrectly() { + + List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insert(people, "people") // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.find(new Query().with(Sort.by("firstname")), Person.class, "people") // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void insertBatchWithSuppliedEntityTypeCorrectly() { + + List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insert(people, Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.find(new Query().with(Sort.by("firstname")), Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void testAddingToList() { + + PersonWithAList person = createPersonWithAList("Sven", 22); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query query = new Query(where("id").is(person.getId())); + + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getWishList()).isEmpty(); + }).verifyComplete(); + + person.addToWishList("please work"); + + template.save(person).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getWishList()).hasSize(1); + }).verifyComplete(); + + Friend friend = new Friend(); + person.setFirstName("Erik"); + person.setAge(21); + + person.addFriend(friend); + template.save(person).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getWishList()).hasSize(1); + assertThat(actual.getFriends()).hasSize(1); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void testFindOneWithSort() { + + PersonWithAList sven = createPersonWithAList("Sven", 22); + PersonWithAList erik = createPersonWithAList("Erik", 21); + PersonWithAList mark = createPersonWithAList("Mark", 40); + + template.insertAll(Arrays.asList(sven, erik, mark)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + // test query with a sort + Query query = new Query(where("age").gt(10)); + query.with(Sort.by(Direction.DESC, "age")); + + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getFirstName()).isEqualTo("Mark"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void bogusUpdateDoesNotTriggerException() { + + ReactiveMongoTemplate mongoTemplate = new ReactiveMongoTemplate(factory); + mongoTemplate.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + Person oliver = new Person("Oliver2", 25); + template.insert(oliver) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query q = new Query(where("BOGUS").gt(22)); + Update u = new Update().set("firstName", "Sven"); + + mongoTemplate.updateFirst(q, u, Person.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void updateFirstByEntityTypeShouldUpdateObject() { + + Person person = new Person("Oliver2", 25); + template.insert(person) // + .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), Person.class)) // + .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class)) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getFirstName()).isEqualTo("Sven"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void updateFirstByCollectionNameShouldUpdateObjects() { + + Person person = new Person("Oliver2", 25); + template.insert(person, "people") // + .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), "people")) // + .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class, "people")) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getFirstName()).isEqualTo("Sven"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void updateMultiByEntityTypeShouldUpdateObjects() { + + Query query = new Query( + new Criteria().orOperator(where("firstName").is("Walter Jr"), where("firstName").is("Walter"))); + + template + .insertAll( + Mono.just(Arrays.asList(new Person("Walter", 50), new Person("Skyler", 43), new Person("Walter Jr", 16)))) // + .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class)) // + .thenMany(template.find(new Query(where("firstName").is("Walt")), Person.class)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void updateMultiByCollectionNameShouldUpdateObject() { + + Query query = new Query( + new Criteria().orOperator(where("firstName").is("Walter Jr"), where("firstName").is("Walter"))); + + List people = Arrays.asList(new Person("Walter", 50), // + new Person("Skyler", 43), // + new Person("Walter Jr", 16)); + + Flux personFlux = template.insertAll(Mono.just(people), "people") // + .collectList() // + .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class, "people")) // + .flatMapMany(p -> template.find(new Query(where("firstName").is("Walt")), Person.class, "people")); + + personFlux // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void throwsExceptionForDuplicateIds() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + Person person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.insert(person) // + .as(StepVerifier::create) // + .expectError(DataIntegrityViolationException.class) // + .verify(); + } + + @Test // DATAMONGO-1444 + void throwsExceptionForUpdateWithInvalidPushOperator() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + ObjectId id = new ObjectId(); + Person person = new Person(id, "Amol"); + person.setAge(28); + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query query = new Query(where("firstName").is("Amol")); + Update upd = new Update().push("age", 29); + + template.updateFirst(query, upd, Person.class) // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); + } + + @Test // DATAMONGO-1444 + void rejectsDuplicateIdInInsertAll() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + ObjectId id = new ObjectId(); + Person person = new Person(id, "Amol"); + person.setAge(28); + + template.insertAll(Arrays.asList(person, person)) // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); + } + + @Test // DATAMONGO-1444 + void testFindAndUpdate() { + + template.insertAll(Arrays.asList(new Person("Tom", 21), new Person("Dick", 22), new Person("Harry", 23))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Query query = new Query(where("firstName").is("Harry")); + Update update = new Update().inc("age", 1); + + Person p = template.findAndModify(query, update, Person.class).block(); // return old + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge()).isEqualTo(24); + + p = template.findAndModify(query, update, Person.class, "person").block(); + assertThat(p.getAge()).isEqualTo(24); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge()).isEqualTo(25); + + p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class).block(); + assertThat(p.getAge()).isEqualTo(26); + + p = template.findAndModify(query, update, FindAndModifyOptions.none(), Person.class, "person").block(); + assertThat(p.getAge()).isEqualTo(26); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge()).isEqualTo(27); + + Query query2 = new Query(where("firstName").is("Mary")); + p = template.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class) + .block(); + assertThat(p.getFirstName()).isEqualTo("Mary"); + assertThat(p.getAge()).isEqualTo(1); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceDocument() { + + org.bson.Document doc = new org.bson.Document("foo", "bar"); + template.save(doc, "findandreplace").as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + template + .findAndReplace(query(where("foo").is("bar")), replacement, FindAndReplaceOptions.options(), + org.bson.Document.class, "findandreplace") // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).containsEntry("foo", "bar"); + }).verifyComplete(); + + template.findOne(query(where("foo").is("baz")), org.bson.Document.class, "findandreplace") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnIdPresent() { + + template.save(new MyPerson("Walter")).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + MyPerson replacement = new MyPerson("Heisenberg"); + replacement.id = "invalid-id"; + + template.findAndReplace(query(where("name").is("Walter")), replacement) // + .as(StepVerifier::create) // + .expectError(InvalidDataAccessApiUsageException.class); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnSkip() { + + assertThatIllegalArgumentException().isThrownBy(() -> template + .findAndReplace(query(where("name").is("Walter")).skip(10), new MyPerson("Heisenberg")).subscribe()); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnLimit() { + + assertThatIllegalArgumentException().isThrownBy(() -> template + .findAndReplace(query(where("name").is("Walter")).limit(10), new MyPerson("Heisenberg")).subscribe()); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldConsiderSortAndUpdateFirstIfMultipleFound() { + + MyPerson walter1 = new MyPerson("Walter 1"); + MyPerson walter2 = new MyPerson("Walter 2"); + + template.save(walter1).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.save(walter2).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + MyPerson replacement = new MyPerson("Heisenberg"); + + template.findAndReplace(query(where("name").regex("Walter.*")).with(Sort.by(Direction.DESC, "name")), replacement) + .as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findAll(MyPerson.class).buffer(10).as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).hasSize(2).contains(walter1).doesNotContain(walter2)).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceObject() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Walter"); + }).verifyComplete(); + + template.findOne(query(where("name").is("Heisenberg")), MyPerson.class) // + .as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldConsiderFields() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query query = query(where("name").is("Walter")); + query.fields().include("address"); + + template.findAndReplace(query, new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it.getName()).isNull(); + assertThat(it.getAddress()).isEqualTo(person.address); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceNonExistingWithUpsertFalse() { + + template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.findAll(MyPerson.class).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceNonExistingWithUpsertTrue() { + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().upsert()) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.findAll(MyPerson.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldProjectReturnedObjectCorrectly() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), FindAndReplaceOptions.empty(), + MyPerson.class, MyPersonProjection.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Walter"); + }).verifyComplete(); + } + + @Test // GH-4300 + public void findAndReplaceShouldAllowNativeDomainTypesAndReturnAProjection() { + + MongoTemplateTests.MyPerson person = new MongoTemplateTests.MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new org.bson.Document("name", "Heisenberg"), + FindAndReplaceOptions.options(), org.bson.Document.class, "myPerson", MongoTemplateTests.MyPerson.class) + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getAddress()).isEqualTo(person.address); + }).verifyComplete(); + + template.execute(MongoTemplateTests.MyPerson.class, collection -> { + return collection.find(new org.bson.Document("name", "Heisenberg")).first(); + }).as(StepVerifier::create) // + .consumeNextWith(loaded -> { + assertThat(loaded.get("_id")).isEqualTo(new ObjectId(person.id)); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceObjectReturingNew() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().returnNew()) + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Heisenberg"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Query qry = query(where("field").in("spring", "mongodb")); + + template.findAllAndRemove(qry, Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + template.findOne(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(data) // + .verifyComplete(); + } + + @Test // DATAMONGO-2219 + void testFindAllAndRemoveReturnsEmptyWithoutMatches() { + + Query qry = query(where("field").in("spring", "mongodb")); + template.findAllAndRemove(qry, Sample.class) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.count(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(0L).verifyComplete(); + } + + @Test // DATAMONGO-1774 + void testFindAllAndRemoveByCollectionReturnsAndRemovesDocuments() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Query qry = query(where("field").in("spring", "mongodb")); + + template.findAllAndRemove(qry, "sample") // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + template.findOne(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(data) // + .verifyComplete(); + } + + @Test // DATAMONGO-1774 + void removeWithNullShouldThrowError() { + assertThatIllegalArgumentException().isThrownBy(() -> template.remove((Object) null).subscribe()); + } + + @Test // DATAMONGO-1774 + void removeWithEmptyMonoShouldDoNothing() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.remove(Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.count(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(3L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1774 + void removeWithMonoShouldDeleteElement() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.remove(Mono.just(spring)).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.count(new Query(), Sample.class).as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // DATAMONGO-1774 + void removeWithMonoAndCollectionShouldDeleteElement() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.remove(Mono.just(spring), template.getCollectionName(Sample.class)) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + template.count(new Query(), Sample.class).as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // DATAMONGO-2195 + void removeVersionedEntityConsidersVersion() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); + + template.update(PersonWithVersionPropertyOfTypeInteger.class).matching(query(where("id").is(person.id))) + .apply(new Update().set("firstName", "Walter")).first() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.remove(person).as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.wasAcknowledged()).isTrue(); + assertThat(actual.getDeletedCount()).isZero(); + }).verifyComplete(); + template.count(new Query(), PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void optimisticLockingHandling() { + + // Init version + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.age = 29; + person.firstName = "Patryk"; + + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findAll(PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.version).isZero(); + }).verifyComplete(); + + template.findAll(PersonWithVersionPropertyOfTypeInteger.class).flatMap(p -> { + + // Version change + person.firstName = "Patryk2"; + return template.save(person); + }) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + assertThat(person.version).isOne(); + + template.findAll(PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.version).isOne(); + }).verifyComplete(); + + // Optimistic lock exception + person.version = 0; + person.firstName = "Patryk3"; + + template.save(person).as(StepVerifier::create).expectError(OptimisticLockingFailureException.class).verify(); + } + + @Test // DATAMONGO-1444 + void doesNotFailOnVersionInitForUnversionedEntity() { + + Document dbObject = new Document(); + dbObject.put("firstName", "Oliver"); + + template.insert(dbObject, // + template.getCollectionName(PersonWithVersionPropertyOfTypeInteger.class)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void removesObjectFromExplicitCollection() { + + String collectionName = "explicit"; + template.remove(new Query(), collectionName).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + PersonWithConvertedId person = new PersonWithConvertedId(); + person.name = "Dave"; + + template.save(person, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findAll(PersonWithConvertedId.class, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + template.remove(person, collectionName).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findAll(PersonWithConvertedId.class, collectionName).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void savesMapCorrectly() { + + Map map = new HashMap<>(); + map.put("key", "value"); + + template.save(map, "maps") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test + // DATAMONGO-1444, DATAMONGO-1730, DATAMONGO-2150 + void savesMongoPrimitiveObjectCorrectly() { + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save(new Object(), "collection")); + } + + @Test // DATAMONGO-1444 + void savesPlainDbObjectCorrectly() { + + Document dbObject = new Document("foo", "bar"); + + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(dbObject.containsKey("_id")).isTrue(); + } + + @Test // DATAMONGO-1444, DATAMONGO-1730 + void rejectsPlainObjectWithOutExplicitCollection() { + + Document dbObject = new Document("foo", "bar"); + + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> template.findById(dbObject.get("_id"), Document.class)); + } + + @Test // DATAMONGO-1444 + void readsPlainDbObjectById() { + + Document dbObject = new Document("foo", "bar"); + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findById(dbObject.get("_id"), Document.class, "collection") // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.get("foo")).isEqualTo(dbObject.get("foo")); + assertThat(actual.get("_id")).isEqualTo(dbObject.get("_id")); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void geoNear() { + + List venues = Arrays.asList(TestEntities.geolocation().pennStation(), // + TestEntities.geolocation().tenGenOffice(), // + TestEntities.geolocation().flatironBuilding(), // + TestEntities.geolocation().maplewoodNJ()); + + template.insertAll(venues) // + .as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + + IndexOperationsAdapter.blocking(template.indexOps(Venue.class)) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D)); + + NearQuery geoFar = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150, Metrics.KILOMETERS); + + template.geoNear(geoFar, Venue.class) // + .as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(120, Metrics.KILOMETERS); + + template.geoNear(geoNear, Venue.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void writesPlainString() { + + template.save("{ 'foo' : 'bar' }", "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444, DATAMONGO-2150 + void rejectsNonJsonStringForSave() { + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save("Foobar", "collection")); + } + + @Test // DATAMONGO-1444 + void initializesVersionOnInsert() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.version).isZero(); + } + + @Test // DATAMONGO-1444 + void initializesVersionOnBatchInsert() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insertAll(Collections.singleton(person)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.version).isZero(); + } + + @Test // DATAMONGO-1992 + void initializesIdAndVersionAndOfImmutableObject() { + + ImmutableVersioned versioned = new ImmutableVersioned(); + + template.insert(versioned) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual).isNotSameAs(versioned); + assertThat(versioned.id).isNull(); + assertThat(versioned.version).isNull(); + + assertThat(actual.id).isNotNull(); + assertThat(actual.version).isEqualTo(0); + + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void queryCanBeNull() { + + template.findAll(PersonWithIdPropertyOfTypeObjectId.class) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.find(null, PersonWithIdPropertyOfTypeObjectId.class) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void versionsObjectIntoDedicatedCollection() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); + + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isOne(); + } + + @Test // DATAMONGO-1444 + void correctlySetsLongVersionProperty() { + + PersonWithVersionPropertyOfTypeLong person = new PersonWithVersionPropertyOfTypeLong(); + person.firstName = "Dave"; + + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); + } + + @Test // DATAMONGO-1444 + void throwsExceptionForIndexViolationIfConfigured() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + template.indexOps("unique_person") // + .ensureIndex(new Index().on("firstName", Direction.DESC).unique()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Person person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + template.save(person, "unique_person") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + template.save(person, "unique_person") // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); + + // safeguard to clean up previous state + template.dropCollection(Person.class).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void preventsDuplicateInsert() { + + template.setWriteConcern(WriteConcern.MAJORITY); + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); + + person.version = null; + template.save(person) // + .as(StepVerifier::create) // + .verifyError(DuplicateKeyException.class); + } + + @Test // DATAMONGO-1444 + void countAndFindWithoutTypeInformation() { + + Person person = new Person(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query query = query(where("_id").is(person.getId())); + String collectionName = template.getCollectionName(Person.class); + + template.find(query, HashMap.class, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.count(query, collectionName) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void nullsPropertiesForVersionObjectUpdates() { + + VersionedPerson person = new VersionedPerson(); + person.firstname = "Dave"; + person.lastname = "Matthews"; + + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.id).isNotNull(); + + person.lastname = null; + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findOne(query(where("id").is(person.id)), VersionedPerson.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.lastname).isNull(); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void nullsValuesForUpdatesOfUnversionedEntity() { + + Person person = new Person("Dave"); + template.save(person). // + as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + person.setFirstName(null); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findOne(query(where("id").is(person.getId())), Person.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getFirstName()).isNull(); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void savesJsonStringCorrectly() { + + Document dbObject = new Document().append("first", "first").append("second", "second"); + + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findAll(Document.class, "collection") // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.containsKey("first")).isTrue(); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void executesExistsCorrectly() { + + Sample sample = new Sample(); + template.save(sample).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + Query query = query(where("id").is(sample.id)); + + template.exists(query, Sample.class) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + template.exists(query, Sample.class, template.getCollectionName(Sample.class)) // + .as(StepVerifier::create).expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void tailStreamsData() throws InterruptedException { + + template.dropCollection("capped").then(template.createCollection("capped", // + CollectionOptions.empty().size(1000).maxDocuments(10).capped())) + .then(template.insert(new Document("random", Math.random()).append("key", "value"), // + "capped")) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + BlockingQueue documents = new LinkedBlockingQueue<>(1000); + + Flux capped = template.tail(null, Document.class, "capped"); + + Disposable disposable = capped.doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); + + disposable.dispose(); + } + + @Test // DATAMONGO-1444 + void tailStreamsDataUntilCancellation() throws InterruptedException { + + template.dropCollection("capped").then(template.createCollection("capped", // + CollectionOptions.empty().size(1000).maxDocuments(10).capped())) + .then(template.insert(new Document("random", Math.random()).append("key", "value"), // + "capped")) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + BlockingQueue documents = new LinkedBlockingQueue<>(1000); + + Flux capped = template.tail(null, Document.class, "capped"); + + Disposable disposable = capped.doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); + + template.insert(new Document("random", Math.random()).append("key", "value"), "capped") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + + disposable.dispose(); + + template.insert(new Document("random", Math.random()).append("key", "value"), "capped") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(documents.poll(1, TimeUnit.SECONDS)).isNull(); + } + + @Test // DATAMONGO-1761 + void testDistinct() { + + Person person1 = new Person("Christoph", 38); + Person person2 = new Person("Christine", 39); + Person person3 = new Person("Christoph", 37); + + template.insertAll(Arrays.asList(person1, person2, person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.findDistinct("firstName", Person.class, String.class) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // DATAMONGO-1803 + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Document.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3) + .allMatch(val -> val instanceof Document); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person1, person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", + ChangeStreamOptions.builder().filter(newAggregation(Person.class, match(where("age").gte(38)))).build(), + Person.class).doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + Flux.merge(template.save(person1), template.save(person2), template.save(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person1, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + void mapsReservedWordsCorrectly() throws InterruptedException { + + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template + .changeStream("person", + ChangeStreamOptions.builder() + .filter(newAggregation(Person.class, match(where("operationType").is("replace")))).build(), + Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + + Flux.merge(template.insert(person1), template.insert(person2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + Person replacement = new Person(person2.getId(), "BDognoM"); + replacement.setAge(person2.getAge()); + + template.save(replacement) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(replacement); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + disposable.dispose(); + + BsonDocument resumeToken = documents.take().getRaw().getResumeToken(); + + BlockingQueue> resumeDocuments = new LinkedBlockingQueue<>(100); + template.changeStream("person", ChangeStreamOptions.builder().resumeToken(resumeToken).build(), Person.class) + .doOnNext(resumeDocuments::add).subscribe(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person2, person3); + } finally { + disposable.dispose(); + } + + } + + @Test // DATAMONGO-1870 + void removeShouldConsiderLimit() { + + List samples = IntStream.range(0, 100) // + .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // + .collect(Collectors.toList()); + + template.insertAll(samples) // + .as(StepVerifier::create) // + .expectNextCount(100) // + .verifyComplete(); + + template.remove(query(where("field").is("lannister")).limit(25), Sample.class) // + .as(StepVerifier::create) // + .assertNext(wr -> assertThat(wr.getDeletedCount()).isEqualTo(25L)).verifyComplete(); + } + + @Test // DATAMONGO-1870 + void removeShouldConsiderSkipAndSort() { + + List samples = IntStream.range(0, 100) // + .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // + .collect(Collectors.toList()); + + template.insertAll(samples).as(StepVerifier::create).expectNextCount(100).verifyComplete(); + + template.remove(new Query().skip(25).with(Sort.by("field")), Sample.class) // + .as(StepVerifier::create) // + .assertNext(wr -> assertThat(wr.getDeletedCount()).isEqualTo(75L)).verifyComplete(); + + template.count(query(where("field").is("lannister")), Sample.class).as(StepVerifier::create).expectNext(25L) + .verifyComplete(); + template.count(query(where("field").is("stark")), Sample.class).as(StepVerifier::create).expectNext(0L) + .verifyComplete(); + } + + @Test // DATAMONGO-2189 + void afterSaveEventContainsSavedObjectUsingInsert() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insert(source) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved.get()).isNotNull().isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + } + + @Test // DATAMONGO-2189 + void afterSaveEventContainsSavedObjectUsingInsertAll() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insertAll(Collections.singleton(new ImmutableVersioned())) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved.get()).isNotNull().isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + } + + @Test // GH-4107 + void afterSaveEventCanBeDisabled() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.setEntityLifecycleEventsEnabled(false); + template.insertAll(Collections.singleton(new ImmutableVersioned())) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved).hasValue(null); + } + + @Test // DATAMONGO-2012 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void watchesDatabaseCorrectly() throws InterruptedException { + + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.dropCollection("personX").onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.createCollection("personX").as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream(ChangeStreamOptions.empty(), Person.class).doOnNext(documents::add) + .subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + template.save(person3, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person1, person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-2012, DATAMONGO-2113 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void resumesAtTimestampCorrectly() throws InterruptedException { + + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1).delayElement(Duration.ofSeconds(1)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + + .verifyComplete(); + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + disposable.dispose(); + + documents.take(); // skip first + Instant resumeAt = documents.take().getTimestamp(); // take 2nd + + template.save(person3).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> resumeDocuments = new LinkedBlockingQueue<>(100); + template.changeStream("person", ChangeStreamOptions.builder().resumeAt(resumeAt).build(), Person.class) + .doOnNext(resumeDocuments::add).subscribe(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-2115 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void resumesAtBsonTimestampCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1).delayElement(Duration.ofSeconds(1)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + documents.take(); // skip first + BsonTimestamp resumeAt = documents.take().getBsonTimestamp(); // take 2nd + + disposable.dispose(); + + template.save(person3).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.changeStream("person", ChangeStreamOptions.builder().resumeAt(resumeAt).build(), Person.class) + .map(ChangeStreamEvent::getBody) // + .buffer(2) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).containsExactly(person2, person3); + }).thenCancel() // + .verify(); + } + + private PersonWithAList createPersonWithAList(String firstname, int age) { + + PersonWithAList p = new PersonWithAList(); + p.setFirstName(firstname); + p.setAge(age); + + return p; + } + + private AtomicReference createAfterSaveReference() { + + AtomicReference saved = new AtomicReference<>(); + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + saved.set(event.getSource()); + } + }); + + return saved; + } + + static class ImmutableVersioned { + + final @Id String id; + final @Version Long version; + + ImmutableVersioned() { + id = null; + version = null; + } + + public ImmutableVersioned(String id, Long version) { + this.id = id; + this.version = version; + } + + public ImmutableVersioned withId(String id) { + return this.id == id ? this : new ImmutableVersioned(id, this.version); + } + + public ImmutableVersioned withVersion(Long version) { + return this.version == version ? this : new ImmutableVersioned(this.id, version); + } + } + + static class Sample { + + @Id String id; + String field; + + Sample() {} + + Sample(String id, String field) { + this.id = id; + this.field = field; + } + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(field, sample.field); + } + + @Override + public int hashCode() { + return Objects.hash(id, field); + } + + public String toString() { + return "ReactiveMongoTemplateTests.Sample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + public static class MyPerson { + + String id; + String name; + Address address; + + public MyPerson() {} + + MyPerson(String name) { + this.name = name; + } + + public MyPerson(String id, String name, Address address) { + this.id = id; + this.name = name; + this.address = address; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Address getAddress() { + return this.address; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MyPerson myPerson = (MyPerson) o; + return Objects.equals(id, myPerson.id) && Objects.equals(name, myPerson.name) + && Objects.equals(address, myPerson.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, address); + } + + public String toString() { + return "ReactiveMongoTemplateTests.MyPerson(id=" + this.getId() + ", name=" + this.getName() + ", address=" + + this.getAddress() + ")"; + } + } + + interface MyPersonProjection { + String getName(); + } + + static class RawStringId { + + @MongoId String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "ReactiveMongoTemplateTests.RawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java new file mode 100644 index 0000000000..5a7271e2b4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java @@ -0,0 +1,317 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.ReactiveMongoTransactionManager; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.transaction.ReactiveTransaction; +import org.springframework.transaction.reactive.TransactionCallback; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for Mongo Transactions using {@link ReactiveMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + * @currentRead The Core - Peter V. Brett + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +public class ReactiveMongoTemplateTransactionTests { + + static final String DATABASE_NAME = "reactive-template-tx-tests"; + static final String COLLECTION_NAME = "test"; + static final Document DOCUMENT = new Document("_id", "id-1").append("value", "spring"); + static final Query ID_QUERY = query(where("_id").is("id-1")); + + static final Person AHMANN = new Person("ahmann", 32); + static final Person ARLEN = new Person("arlen", 24); + static final Person LEESHA = new Person("leesha", 22); + static final Person RENNA = new Person("renna", 22); + + static @Client MongoClient client; + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(client, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, COLLECTION_NAME, client).as(StepVerifier::create) // + .verifyComplete(); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, "person", client).as(StepVerifier::create).verifyComplete(); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, "personWithVersionPropertyOfTypeInteger", client) + .as(StepVerifier::create) // + .verifyComplete(); + + template.insert(DOCUMENT, COLLECTION_NAME).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.insertAll(Arrays.asList(AHMANN, ARLEN, LEESHA, RENNA)) // + .as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionWithExplicitTransactionStart() { + + Publisher sessionPublisher = client + .startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + ClientSession clientSession = Mono.from(sessionPublisher).block(); + + template.withSession(Mono.just(clientSession)) + .execute(action -> ReactiveMongoContext.getSession().flatMap(session -> { + + session.startTransaction(); + return action.remove(ID_QUERY, Document.class, COLLECTION_NAME); + + })).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + assertThat(clientSession.hasActiveTransaction()).isTrue(); + StepVerifier.create(clientSession.commitTransaction()).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionsCommitOnComplete() { + + initTx().transactional(template.remove(ID_QUERY, Document.class, COLLECTION_NAME)).as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionsAbortOnError() { + + initTx().transactional( + template.remove(ID_QUERY, Document.class, COLLECTION_NAME).flatMap(result -> Mono.fromSupplier(() -> { + throw new RuntimeException("¯\\_(ツ)_/¯"); + }))).as(StepVerifier::create) // + .expectError() // + .verify(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void withSessionDoesNotManageTransactions() { + + Mono.from(client.startSession()).flatMap(session -> { + + session.startTransaction(); + return template.withSession(session).remove(ID_QUERY, Document.class, COLLECTION_NAME); + }).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void changesNotVisibleOutsideTransaction() { + + initTx().execute(new TransactionCallback<>() { + @Override + public Publisher doInTransaction(ReactiveTransaction status) { + return template.remove(ID_QUERY, Document.class, COLLECTION_NAME).flatMapMany(val -> { + + // once we use the collection directly we're no longer participating in the tx + return client.getDatabase(DATABASE_NAME).getCollection(COLLECTION_NAME).find(ID_QUERY.getQueryObject()) + .first(); + }); + } + }).as(StepVerifier::create).expectNext(DOCUMENT).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void executeCreatesNewTransaction() { + + ReactiveSessionScoped sessionScoped = template.withSession(client.startSession()); + + sessionScoped.execute(action -> { + return action.remove(ID_QUERY, Document.class, COLLECTION_NAME); + }) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + + sessionScoped.execute(action -> { + return action.insert(DOCUMENT, COLLECTION_NAME); + }) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void takeDoesNotAbortTransaction() { + + initTx() + .transactional(template.find(query(where("age").exists(true)).with(Sort.by("age")), Person.class).take(3) + .flatMap(template::remove)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.count(query(where("age").exists(true)), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void errorInFlowOutsideTransactionDoesNotAbortIt() { + + initTx().execute(new TransactionCallback<>() { + @Override + public Publisher doInTransaction(ReactiveTransaction status) { + return template.find(query(where("age").is(22)).with(Sort.by("age")), Person.class).buffer(2) + .flatMap(values -> { + + return template + .remove(query(where("id").in(values.stream().map(Person::getId).collect(Collectors.toList()))), + Person.class) + .then(Mono.just(values)); + }); + } + }).collectList() // completes the above computation + .flatMap(deleted -> { + throw new RuntimeException("error outside the transaction does not influence it."); + }).as(StepVerifier::create) // + .verifyError(); + + template.count(query(where("age").exists(true)), Person.class) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.firstName = "rojer"; + + PersonWithVersionPropertyOfTypeInteger saved = template.insert(rojer).block(); + + initTx().transactional(template.remove(saved)) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getDeletedCount()).isOne()) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.firstName = "rojer"; + + PersonWithVersionPropertyOfTypeInteger saved = template.insert(rojer).block(); + saved.version = 5; + + initTx().transactional(template.remove(saved)) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.wasAcknowledged()).isTrue(); + assertThat(actual.getDeletedCount()).isZero(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteNonExistingWithVersion() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.id = "deceased"; + rojer.firstName = "rojer"; + rojer.version = 5; + + initTx().transactional(template.remove(rojer)) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getDeletedCount()).isZero()) // + .verifyComplete(); + } + + TransactionalOperator initTx() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + return TransactionalOperator.create(txmgr, new DefaultTransactionDefinition()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java new file mode 100644 index 0000000000..f89b2fa8c1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -0,0 +1,2055 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; + +import org.assertj.core.api.Assertions; +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoTemplateUnitTests.AutogenerateableId; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators.Gte; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; +import org.springframework.data.mongodb.core.aggregation.Fields; +import org.springframework.data.mongodb.core.aggregation.SetOperation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.lang.Nullable; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.CollectionUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.InsertManyResult; +import com.mongodb.client.result.InsertOneResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Unit tests for {@link ReactiveMongoTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Roman Puchkovskiy + * @author Mathieu Ouellet + * @author Yadhukrishna S Pai + * @author Ben Foster + */ +@SuppressWarnings({ "unchecked", "rawtypes" }) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveMongoTemplateUnitTests { + + private ReactiveMongoTemplate template; + + @Mock SimpleReactiveMongoDatabaseFactory factory; + @Mock MongoClient mongoClient; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + @Mock FindPublisher findPublisher; + @Mock AggregatePublisher aggregatePublisher; + @Mock Publisher runCommandPublisher; + @Mock Publisher updateResultPublisher; + @Mock Publisher findAndUpdatePublisher; + @Mock Publisher successPublisher; + @Mock DistinctPublisher distinctPublisher; + @Mock Publisher deletePublisher; + @Mock MapReducePublisher mapReducePublisher; + @Mock ChangeStreamPublisher changeStreamPublisher; + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + + @BeforeEach + void beforeEach() { + + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(db.getCollection(any())).thenReturn(collection); + when(db.getCollection(any(), any())).thenReturn(collection); + when(db.runCommand(any(), any(Class.class))).thenReturn(runCommandPublisher); + when(db.createCollection(any(), any(CreateCollectionOptions.class))).thenReturn(runCommandPublisher); + when(collection.withReadPreference(any())).thenReturn(collection); + when(collection.withReadConcern(any())).thenReturn(collection); + when(collection.find(any(Class.class))).thenReturn(findPublisher); + when(collection.find(any(Document.class), any(Class.class))).thenReturn(findPublisher); + when(collection.aggregate(anyList())).thenReturn(aggregatePublisher); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(aggregatePublisher); + when(collection.countDocuments(any(), any(CountOptions.class))).thenReturn(Mono.just(0L)); + when(collection.estimatedDocumentCount(any())).thenReturn(Mono.just(0L)); + when(collection.updateOne(any(), any(Bson.class), any(UpdateOptions.class))).thenReturn(updateResultPublisher); + when(collection.updateMany(any(Bson.class), any(Bson.class), any())).thenReturn(updateResultPublisher); + when(collection.updateOne(any(), anyList(), any())).thenReturn(updateResultPublisher); + when(collection.updateMany(any(), anyList(), any())).thenReturn(updateResultPublisher); + when(collection.findOneAndUpdate(any(), any(Bson.class), any(FindOneAndUpdateOptions.class))) + .thenReturn(findAndUpdatePublisher); + when(collection.findOneAndReplace(any(Bson.class), any(), any())).thenReturn(findPublisher); + when(collection.findOneAndDelete(any(), any(FindOneAndDeleteOptions.class))).thenReturn(findPublisher); + when(collection.distinct(anyString(), any(Document.class), any())).thenReturn(distinctPublisher); + when(collection.deleteMany(any(Bson.class), any())).thenReturn(deletePublisher); + when(collection.findOneAndUpdate(any(), any(Bson.class), any(FindOneAndUpdateOptions.class))) + .thenReturn(findAndUpdatePublisher); + when(collection.mapReduce(anyString(), anyString(), any())).thenReturn(mapReducePublisher); + when(collection.replaceOne(any(Bson.class), any(), any(ReplaceOptions.class))).thenReturn(updateResultPublisher); + when(collection.insertOne(any(Bson.class))).thenReturn(successPublisher); + when(collection.insertMany(anyList())).thenReturn(successPublisher); + when(findPublisher.projection(any())).thenReturn(findPublisher); + when(findPublisher.limit(anyInt())).thenReturn(findPublisher); + when(findPublisher.collation(any())).thenReturn(findPublisher); + when(findPublisher.first()).thenReturn(findPublisher); + when(findPublisher.allowDiskUse(anyBoolean())).thenReturn(findPublisher); + when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher); + when(aggregatePublisher.collation(any())).thenReturn(aggregatePublisher); + when(aggregatePublisher.maxTime(anyLong(), any())).thenReturn(aggregatePublisher); + when(aggregatePublisher.first()).thenReturn(findPublisher); + + this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + this.template = new ReactiveMongoTemplate(factory, converter); + } + + @Test // DATAMONGO-1444 + void rejectsNullDatabaseName() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReactiveMongoTemplate(mongoClient, null)); + } + + @Test // DATAMONGO-1444 + void rejectsNullMongo() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReactiveMongoTemplate(null, "database")); + } + + @Test // DATAMONGO-1444 + void defaultsConverterToMappingMongoConverter() throws Exception { + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "database"); + assertThat(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter).isTrue(); + } + + @Test // DATAMONGO-1912 + void autogeneratesIdForMap() { + + ReactiveMongoTemplate template = spy(this.template); + doReturn(Mono.just(new ObjectId())).when(template).saveDocument(any(String.class), any(Document.class), + any(Class.class)); + + Map entity = new LinkedHashMap<>(); + template.save(entity, "foo").as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(entity).containsKey("_id"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1311 + void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class).subscribe(); + + verify(findPublisher).batchSize(1234); + } + + @Test // DATAMONGO-2659 + void executeQueryShouldUseAllowDiskSizeWhenPresent() { + + when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher); + + Query query = new Query().allowDiskUse(true); + template.find(query, Person.class).subscribe(); + + verify(findPublisher).allowDiskUse(true); + } + + @Test // DATAMONGO-1518 + void findShouldUseCollationWhenPresent() { + + template.find(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + // + @Test // DATAMONGO-1518 + void findOneShouldUseCollationWhenPresent() { + + template.findOne(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void existsShouldUseCollationWhenPresent() { + + template.exists(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518 + void findAndModfiyShoudUseCollationWhenPresent() { + + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); + + template.findAndModify(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void findAndRemoveShouldUseCollationWhenPresent() { + + when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(Mono.empty()); + + template.findAndRemove(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void findAndRemoveManyShouldUseCollationWhenPresent() { + + when(collection.deleteMany(any(Bson.class), any())).thenReturn(Mono.empty()); + + template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void updateOneShouldUseCollationWhenPresent() { + + when(collection.updateOne(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), + AutogenerateableId.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518 + void updateManyShouldUseCollationWhenPresent() { + + when(collection.updateMany(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), + AutogenerateableId.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-3218 + void updateUsesHintStringFromQuery() { + + template.updateFirst(new Query().withHint("index-1"), new Update().set("spring", "data"), Person.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-1"); + } + + @Test // GH-3218 + void updateUsesHintDocumentFromQuery() { + + template.updateFirst(new Query().withHint("{ firstname : 1 }"), new Update().set("spring", "data"), Person.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(new Document("firstname", 1)); + } + + @Test // DATAMONGO-1518 + void replaceOneShouldUseCollationWhenPresent() { + + when(collection.replaceOne(any(Bson.class), any(), any(ReplaceOptions.class))).thenReturn(Mono.empty()); + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1518, DATAMONGO-2257 + void mapReduceShouldUseCollationWhenPresent() { + + template.mapReduce(new BasicQuery("{}"), AutogenerateableId.class, AutogenerateableId.class, "", "", + MapReduceOptions.options().collation(Collation.of("fr"))).subscribe(); + + verify(mapReducePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1518, DATAMONGO-2264 + void geoNearShouldUseCollationWhenPresent() { + + NearQuery query = NearQuery.near(0D, 0D).query(new BasicQuery("{}").collation(Collation.of("fr"))); + template.geoNear(query, AutogenerateableId.class).subscribe(); + + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // GH-4277 + void geoNearShouldHonorReadPreferenceFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadPreference(ReadPreference.secondary()); + + template.geoNear(query, Wrapper.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.secondary())); + } + + @Test // GH-4277 + void geoNearShouldHonorReadConcernFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadConcern(ReadConcern.SNAPSHOT); + + template.geoNear(query, Wrapper.class).subscribe(); + + verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT)); + } + + @Test // DATAMONGO-1719 + void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher).projection(eq(new Document("firstname", 1))); + } + + @Test // DATAMONGO-1719 + void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, + PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher).projection(eq(new Document("bar", 1))); + } + + @Test // DATAMONGO-1719 + void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonSpELProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher, never()).projection(any()); + } + + @Test // DATAMONGO-1719, DATAMONGO-2041 + void appliesFieldsToDtoProjection() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher).projection(eq(new Document("firstname", 1))); + } + + @Test // DATAMONGO-1719 + void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, + Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher).projection(eq(new Document("bar", 1))); + } + + @Test // DATAMONGO-1719 + void doesNotApplyFieldsWhenTargetIsNotAProjection() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + Person.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher, never()).projection(any()); + } + + @Test // DATAMONGO-1719 + void doesNotApplyFieldsWhenTargetExtendsDomainType() { + + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonExtended.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + + verify(findPublisher, never()).projection(any()); + } + + @Test // DATAMONGO-1783 + void countShouldUseSkipFromQuery() { + + template.count(new Query().skip(10), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getSkip()).isEqualTo(10); + } + + @Test // DATAMONGO-1783 + void countShouldUseLimitFromQuery() { + + template.count(new Query().limit(100), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getLimit()).isEqualTo(100); + } + + @Test // DATAMONGO-2360 + void countShouldApplyQueryHintIfPresent() { + + Document queryHint = new Document("age", 1); + template.count(new Query().withHint(queryHint), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(queryHint); + } + + @Test // DATAMONGO-2365 + void countShouldApplyQueryHintAsIndexNameIfPresent() { + + template.count(new Query().withHint("idx-1"), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("idx-1"); + } + + @Test // DATAMONGO-2215 + void updateShouldApplyArrayFilters() { + + template.updateFirst(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-2215 + void findAndModifyShouldApplyArrayFilters() { + + template.findAndModify(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-1854 + void findShouldNotUseCollationWhenNoDefaultPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Jedi.class).subscribe(); + + verify(findPublisher, never()).collation(any()); + } + + @Test // DATAMONGO-1854 + void findShouldUseDefaultCollationWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findOneShouldUseDefaultCollationWhenPresent() { + + template.findOne(new BasicQuery("{'foo' : 'bar'}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void existsShouldUseDefaultCollationWhenPresent() { + + template.exists(new BasicQuery("{}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findAndModfiyShoudUseDefaultCollationWhenPresent() { + + template.findAndModify(new BasicQuery("{}"), new Update(), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void findAndRemoveShouldUseDefaultCollationWhenPresent() { + + template.findAndRemove(new BasicQuery("{}"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldNotCollationIfNotPresent() { + + template.createCollection(AutogenerateableId.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + Assertions.assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldApplyDefaultCollation() { + + template.createCollection(Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldFavorExplicitOptionsOverDefaultCollation() { + + template.createCollection(Sith.class, CollectionOptions.just(Collation.of("en_US"))).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldUseDefaultCollationIfCollectionOptionsAreNull() { + + template.createCollection(Sith.class, null).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class).subscribe(); + + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // GH-4277 + void aggreateShouldUseReadConcern() { + + AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build(); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); + } + + @Test // GH-4286 + void aggreateShouldUseReadReadPreference() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.primaryPreferred()).build(); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(collection).withReadPreference(ReadPreference.primaryPreferred()); + } + + @Test // GH-4543 + void aggregateDoesNotLimitBackpressure() { + + reset(collection); + + AtomicLong request = new AtomicLong(); + Publisher realPublisher = Flux.just(new Document()).doOnRequest(request::addAndGet); + + doAnswer(invocation -> { + Subscriber subscriber = invocation.getArgument(0); + realPublisher.subscribe(subscriber); + return null; + }).when(aggregatePublisher).subscribe(any()); + + when(collection.aggregate(anyList())).thenReturn(aggregatePublisher); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(aggregatePublisher); + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class).subscribe(); + + assertThat(request).hasValueGreaterThan(128); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template + .aggregate( + newAggregation(Sith.class, project("id")) + .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2153 + void aggregateShouldHonorOptionsComment() { + + AggregationOptions options = AggregationOptions.builder().comment("expensive").build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).comment("expensive"); + } + + @Test // DATAMONGO-1836 + void aggregateShouldHonorOptionsHint() { + + Document hint = new Document("dummyHint", 1); + AggregationOptions options = AggregationOptions.builder().hint(hint).build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).hint(hint); + } + + @Test // GH-4238 + void aggregateShouldHonorOptionsHintString() { + + AggregationOptions options = AggregationOptions.builder().hint("index-1").build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).hintString("index-1"); + } + + @Test // DATAMONGO-2390 + void aggregateShouldNoApplyZeroOrNegativeMaxTime() { + + template + .aggregate(newAggregation(MongoTemplateUnitTests.Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ZERO).build()), AutogenerateableId.class, Document.class) + .subscribe(); + template + .aggregate( + newAggregation(MongoTemplateUnitTests.Sith.class, project("id")) + .withOptions(newAggregationOptions().maxTime(Duration.ofSeconds(-1)).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher, never()).maxTime(anyLong(), any()); + } + + @Test // DATAMONGO-2390 + void aggregateShouldApplyMaxTimeIfSet() { + + template + .aggregate( + newAggregation(MongoTemplateUnitTests.Sith.class, project("id")) + .withOptions(newAggregationOptions().maxTime(Duration.ofSeconds(10)).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher).maxTime(eq(10000L), eq(TimeUnit.MILLISECONDS)); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new Jedi()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseDefaultCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}"), new Sith()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationEvenIfDefaultCollationIsPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new MongoTemplateUnitTests.Sith()) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findDistinctShouldUseDefaultCollationWhenPresent() { + + template.findDistinct(new BasicQuery("{}"), "name", Sith.class, String.class).subscribe(); + + verify(distinctPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findDistinctPreferCollationFromQueryOverDefaultCollation() { + + template.findDistinct(new BasicQuery("{}").collation(Collation.of("fr")), "name", Sith.class, String.class) + .subscribe(); + + verify(distinctPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldUseDefaultCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldUseDefaultCollationWhenPresent() { + + template.updateMulti(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldUseDefaultCollationWhenPresent() { + + template.remove(new BasicQuery("{}"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldPreferExplicitCollationOverDefaultCollation() { + + template.remove(new BasicQuery("{}").collation(Collation.of("fr")), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-2261 + void saveShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.save(entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.insert(entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertAllShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + template.insertAll(Arrays.asList(entity1, entity2)).subscribe(); + + verify(beforeConvertCallback, times(2)).onBeforeConvert(any(), anyString()); + verify(beforeSaveCallback, times(2)).onBeforeSave(any(), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void findAndReplaceShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.findAndReplace(new Query(), entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void entityCallbacksAreNotSetByDefault() { + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2261 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2261 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + ReactiveEntityCallbacks callbacks = ReactiveEntityCallbacks.create(); + template.setEntityCallbacks(callbacks); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2261 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + ReactiveEntityCallbacks callbacks = ReactiveEntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + template.setEntityCallbacks(callbacks); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFind() { + + template.find(new Query().allowSecondaryReads(), AutogenerateableId.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindOne() { + + template.findOne(new Query().allowSecondaryReads(), AutogenerateableId.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { + + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class) + .subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update().set("total") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { total : { $sum : [ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowMultipleAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update() // + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) // + .set("grade").toValue(ConditionalOperators.switchCases( // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D") // + ) // + .defaultTo("F"));// + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).containsExactly(Document.parse("{ $set: { average : { $avg: \"$tests\" } } }"), + Document.parse("{ $set: { grade: { $switch: {\n" + " branches: [\n" + + " { case: { $gte: [ \"$average\", 90 ] }, then: \"A\" },\n" + + " { case: { $gte: [ \"$average\", 80 ] }, then: \"B\" },\n" + + " { case: { $gte: [ \"$average\", 70 ] }, then: \"C\" },\n" + + " { case: { $gte: [ \"$average\", 60 ] }, then: \"D\" }\n" + + " ],\n" + " default: \"F\"\n" + " } } } }")); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationExpressionToDomainType() { + + AggregationUpdate update = AggregationUpdate.update().set("name") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { firstname : { $sum:[ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldPassOnUnsetCorrectly() { + + SetOperation setOperation = SetOperation.builder().set("status").toValue("Modified").and().set("comments") + .toValue(Fields.fields("misc1").and("misc2").asList()); + AggregationUpdate update = AggregationUpdate.update(); + update.set(setOperation); + update.unset("misc1", "misc2"); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Arrays.asList(Document.parse("{ $set: { status: \"Modified\", comments: [ \"$misc1\", \"$misc2\" ] } }"), + Document.parse("{ $unset: [ \"misc1\", \"misc2\" ] }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationUnsetToDomainType() { + + AggregationUpdate update = AggregationUpdate.update(); + update.unset("name"); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo(Collections.singletonList(Document.parse("{ $unset : \"firstname\" }"))); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyIfNotPresentInFilter() { + + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromGivenDocumentIfShardKeyIsImmutable() { + + template.save(new ShardedEntityWithNonDefaultImmutableShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + + verifyNoInteractions(findPublisher); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyToVersionedEntityIfNotPresentInFilter() { + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(UpdateResult.acknowledged(1, 1L, null))); + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("version", 1L).append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromExistingDocumentIfNotPresentInFilter() { + + when(findPublisher.first()) + .thenReturn(Mono.just(new Document("_id", "id-1").append("country", "US").append("userid", 4230))); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1")); + } + + @Test // DATAMONGO-2341 + void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(findPublisher.first()) + .thenReturn(Mono.just(new Document("_id", "id-1").append("country", "US").append("userid", 4230))); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + verify(findPublisher).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2341 + void saveVersionedShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(UpdateResult.acknowledged(1, 1L, null))); + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)).subscribe(); + + verify(findPublisher).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + when(converter.getProjectionFactory()).thenReturn(new SpelAwareProxyProjectionFactory()); + template = new ReactiveMongoTemplate(factory, converter); + + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(new Document()); + + template.find(new Query(), Person.class).as(StepVerifier::create).verifyError(MappingException.class); + } + + @Test // DATAMONGO-2479 + void findShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + + List results = template.find(new Query(), Person.class).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // GH-4543 + void findShouldNotLimitBackpressure() { + + AtomicLong request = new AtomicLong(); + stubFindSubscribe(new Document(), request); + + template.find(new Query(), Person.class).subscribe(); + + assertThat(request).hasValueGreaterThan(128); + } + + @Test // DATAMONGO-2479 + void findByIdShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(any(Bson.class), eq(Document.class))).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findById("init", Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findOneShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(any(Bson.class), eq(Document.class))).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findOne(new Query(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAllShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + + List results = template.findAll(Person.class).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndModifyShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findAndModify(new Query(), new Update(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndRemoveShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findAndRemove(new Query(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAllAndRemoveShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + when(collection.deleteMany(any(Bson.class), any(DeleteOptions.class))) + .thenReturn(Mono.just(spy(DeleteResult.class))); + + List results = template.findAllAndRemove(new Query(), Person.class).timeout(Duration.ofSeconds(1)) + .toStream().collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(findPublisher); + stubFindSubscribe(new Document("_id", "init").append("firstname", "luke")); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void saveShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(mock(UpdateResult.class))); + + Person entity = new Person("init", "luke"); + + Person saved = template.save(entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.insertOne(any())).thenReturn(Mono.just(mock(InsertOneResult.class))); + + Person entity = new Person("init", "luke"); + + Person saved = template.insert(entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertAllShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + when(collection.insertMany(anyList())).then(invocation -> { + List list = invocation.getArgument(0); + return Flux.fromIterable(list).map(i -> mock(InsertManyResult.class)); + }); + + List saved = template.insertAll(Arrays.asList(entity1, entity2)).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterSaveCallback, times(2)).onAfterSave(any(), any(), anyString()); + assertThat(saved.get(0).id).isEqualTo("after-save"); + assertThat(saved.get(1).id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(findPublisher); + stubFindSubscribe(new Document("_id", "init").append("firstname", "luke")); + + Person entity = new Person("init", "luke"); + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldEmitAfterSaveEvent() { + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "after-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(Mono.just(document)); + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + assertThat(saved.id).isEqualTo("after-save-event"); + } + + @Test // DATAMONGO-2556 + void esitmatedCountShouldBeDelegatedCorrectly() { + + template.estimatedCount(Person.class).subscribe(); + + verify(db).getCollection("star-wars", Document.class); + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void usedCountDocumentsForEmptyQueryByDefault() { + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-3522 + void delegatesToEstimatedCountForEmptyQueryIfEnabled() { + + template.useEstimatedCount(true); + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void stillUsesCountDocumentsForNonEmptyQueryEvenIfEstimationEnabled() { + + template.useEstimatedCount(true); + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }"), Person.class).subscribe(); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-4374 + void countConsidersMaxTimeMs() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").maxTimeMsec(5000), Person.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getMaxTime(TimeUnit.MILLISECONDS)).isEqualTo(5000); + } + + @Test // GH-4374 + void countPassesOnComment() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").comment("rocks!"), Person.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getComment()).isEqualTo(BsonUtils.simpleToBsonValue("rocks!")); + } + + @Test // GH-2911 + void insertErrorsOnPublisher() { + + Publisher publisher = Mono.just("data"); + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.insert(publisher)); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); + } + + @Test // GH-4167 + void changeStreamOptionStartAftershouldApplied() { + + when(factory.getMongoDatabase(anyString())).thenReturn(Mono.just(db)); + + when(collection.watch(any(Class.class))).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.batchSize(anyInt())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.startAfter(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocument(any())).thenReturn(changeStreamPublisher); + + BsonDocument token = new BsonDocument("token", new BsonString("id")); + template + .changeStream("database", "collection", ChangeStreamOptions.builder().startAfter(token).build(), Object.class) + .subscribe(); + + verify(changeStreamPublisher).startAfter(eq(token)); + } + + @Test // GH-4495 + void changeStreamOptionFullDocumentBeforeChangeShouldBeApplied() { + + when(factory.getMongoDatabase(anyString())).thenReturn(Mono.just(db)); + + when(collection.watch(any(Class.class))).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.batchSize(anyInt())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.startAfter(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocument(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocumentBeforeChange(any())).thenReturn(changeStreamPublisher); + + ChangeStreamOptions options = ChangeStreamOptions.builder() + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED).build(); + template.changeStream("database", "collection", options, Object.class).subscribe(); + + verify(changeStreamPublisher).fullDocumentBeforeChange(FullDocumentBeforeChange.REQUIRED); + + } + + @Test // GH-4462 + void replaceShouldUseCollationWhenPresent() { + + template.replace(new BasicQuery("{}").collation(Collation.of("fr")), new Jedi()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-4462 + void replaceShouldNotUpsertByDefault() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + } + + @Test // GH-4462 + void replaceShouldUpsert() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isTrue(); + } + + @Test // GH-4462 + void replaceShouldUseDefaultCollationWhenPresent() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // GH-4462 + void replaceShouldUseHintIfPresent() { + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-to-use"); + } + + @Test // GH-4462 + void replaceShouldApplyWriteConcern() { + + template.setWriteConcernResolver(new WriteConcernResolver() { + public WriteConcern resolve(MongoAction action) { + + assertThat(action.getMongoActionOperation()).isEqualTo(MongoActionOperation.REPLACE); + return WriteConcern.UNACKNOWLEDGED; + } + }); + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromString() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsPlainString.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(10); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromIso8601String() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsIso8601Style.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.DAYS)) + .isEqualTo(1); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpression() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpression.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(11); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpressionReturningDuration() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(100); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithInvalidTimeoutExpiration() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> + template.createCollection(TimeSeriesTypeWithInvalidExpireAfter.class).subscribe() + ); + } + + private void stubFindSubscribe(Document document) { + stubFindSubscribe(document, new AtomicLong()); + } + + private void stubFindSubscribe(Document document, AtomicLong request) { + + Publisher realPublisher = Flux.just(document).doOnRequest(request::addAndGet); + + doAnswer(invocation -> { + Subscriber subscriber = invocation.getArgument(0); + realPublisher.subscribe(subscriber); + return null; + }).when(findPublisher).subscribe(any()); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") + static class Person { + + @Id String id; + String firstname; + + public Person() {} + + public Person(String id, String firstname) { + this.id = id; + this.firstname = firstname; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveMongoTemplateUnitTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + ")"; + } + } + + class Wrapper { + + AutogenerateableId foo; + } + + static class PersonExtended extends Person { + + String lastname; + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveMongoTemplateUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + + @Field("firstname") String name; + } + + static class EntityWithListOfSimple { + List grades; + } + + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") Instant timestamp; + Object meta; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "10m") + static class TimeSeriesTypeWithExpireAfterAsPlainString { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "P1D") + static class TimeSeriesTypeWithExpireAfterAsIso8601Style { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{10 + 1 + 's'}") + static class TimeSeriesTypeWithExpireAfterAsExpression { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") + static class TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "123ops") + static class TimeSeriesTypeWithInvalidExpireAfter { + + String id; + Instant timestamp; + } + + static class ValueCapturingEntityCallback { + + private final List values = new ArrayList<>(1); + + protected void capture(T value) { + values.add(value); + } + + public List getValues() { + return values; + } + + @Nullable + public T getValue() { + return CollectionUtils.lastElement(values); + } + } + + static class ValueCapturingBeforeConvertCallback extends ValueCapturingEntityCallback + implements ReactiveBeforeConvertCallback { + + @Override + public Mono onBeforeConvert(Person entity, String collection) { + + capture(entity); + return Mono.just(entity); + } + } + + static class ValueCapturingBeforeSaveCallback extends ValueCapturingEntityCallback + implements ReactiveBeforeSaveCallback { + + @Override + public Mono onBeforeSave(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(entity); + } + } + + static class ValueCapturingAfterConvertCallback extends ValueCapturingEntityCallback + implements ReactiveAfterConvertCallback { + + @Override + public Mono onAfterConvert(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(new Person() { + { + id = "after-convert"; + firstname = entity.firstname; + } + }); + } + } + + static class ValueCapturingAfterSaveCallback extends ValueCapturingEntityCallback + implements ReactiveAfterSaveCallback { + + @Override + public Mono onAfterSave(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(new Person() { + { + id = "after-save"; + firstname = entity.firstname; + } + }); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java new file mode 100644 index 0000000000..35c27815ff --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java @@ -0,0 +1,416 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.SetOperation; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") +public class ReactiveMongoTemplateUpdateTests { + + static final String DB_NAME = "reactive-update-test"; + + static @Client MongoClient client; + ReactiveMongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(client, DB_NAME)); + + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Score.class), client).then() + .as(StepVerifier::create).verifyComplete(); + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Versioned.class), client).then() + .as(StepVerifier::create).verifyComplete(); + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Book.class), client).then() + .as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithSet() { + + Score score1 = new Score(1, "Maya", Arrays.asList(10, 5, 10), Arrays.asList(10, 8), 0); + Score score2 = new Score(2, "Ryan", Arrays.asList(5, 6, 5), Arrays.asList(8, 8), 8); + + template.insertAll(Arrays.asList(score1, score2)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().set(SetOperation.builder() // + .set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()).and() // + .set("totalQuiz").toValueOf(ArithmeticOperators.valueOf("quiz").sum())) // + .set(SetOperation.builder() // + .set("totalScore") + .toValueOf(ArithmeticOperators.valueOf("totalHomework").add("totalQuiz").add("extraCredit"))); + + template.update(Score.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Score.class).find(new org.bson.Document())).collectList().as(StepVerifier::create) + .consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{\"_id\" : 1, \"student\" : \"Maya\", \"homework\" : [ 10, 5, 10 ], \"quiz\" : [ 10, 8 ], \"extraCredit\" : 0, \"totalHomework\" : 25, \"totalQuiz\" : 18, \"totalScore\" : 43, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Score\"}"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"student\" : \"Ryan\", \"homework\" : [ 5, 6, 5 ], \"quiz\" : [ 8, 8 ], \"extraCredit\" : 8, \"totalHomework\" : 16, \"totalQuiz\" : 16, \"totalScore\" : 40, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Score\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void versionedAggregateUpdateWithSet() { + + Versioned source = new Versioned("id-1", "value-0"); + template.insert(Versioned.class).one(source).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().set("value").toValue("changed"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first() + .then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1)).collectList() + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly( + new org.bson.Document("_id", source.id).append("version", 1L).append("value", "changed").append("_class", + "org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Versioned")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void versionedAggregateUpdateTouchingVersionProperty() { + + Versioned source = new Versioned("id-1", "value-0"); + template.insert(Versioned.class).one(source).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update() + .set(SetOperation.builder().set("value").toValue("changed").and().set("version").toValue(10L)); + + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first() + .then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1)).collectList() + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly( + new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed").append("_class", + "org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Versioned")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithUnset() { + + Book antelopeAntics = new Book(); + antelopeAntics.id = 1; + antelopeAntics.title = "Antelope Antics"; + antelopeAntics.isbn = "0001122223334"; + antelopeAntics.author = new Author("Auntie", "An"); + antelopeAntics.stock = new ArrayList<>(); + antelopeAntics.stock.add(new Warehouse("A", 5)); + antelopeAntics.stock.add(new Warehouse("B", 15)); + + Book beesBabble = new Book(); + beesBabble.id = 2; + beesBabble.title = "Bees Babble"; + beesBabble.isbn = "999999999333"; + beesBabble.author = new Author("Bee", "Bumble"); + beesBabble.stock = new ArrayList<>(); + beesBabble.stock.add(new Warehouse("A", 2)); + beesBabble.stock.add(new Warehouse("B", 5)); + + template.insertAll(Arrays.asList(antelopeAntics, beesBabble)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().unset("isbn", "stock"); + template.update(Book.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{ \"_id\" : 1, \"title\" : \"Antelope Antics\", \"author\" : { \"last\" : \"An\", \"first\" : \"Auntie\" }, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\" }"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"title\" : \"Bees Babble\", \"author\" : { \"last\" : \"Bumble\", \"first\" : \"Bee\" }, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\" }")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithReplaceWith() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update() + .replaceWith(ReplaceWithOperation.replaceWithValueOf("author")); + + template.update(Book.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( + org.bson.Document.parse("{\"_id\" : 1, \"first\" : \"John\", \"last\" : \"Backus\"}"), + org.bson.Document.parse("{\"_id\" : 2, \"first\" : \"Grace\", \"last\" : \"Hopper\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregationUpdateUpsertsCorrectly() { + + AggregationUpdate update = AggregationUpdate.update().set("title").toValue("The Burning White"); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(1))).apply(update).upsert().then() + .as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly(org.bson.Document.parse("{\"_id\" : 1, \"title\" : \"The Burning White\" }")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateFirstMatch() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(Book.class).apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).first() + .then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void findAndModifyAppliesAggregationUpdateCorrectly() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(Book.class) // + .matching(Query.query(Criteria.where("id").is(one.id))) // + .apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")) // + .findAndModify() // + .as(StepVerifier::create) // + .expectNext(one) // + .verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}")); + }).verifyComplete(); + + } + + @ParameterizedTest // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + @MethodSource("sortedUpdateBookArgs") + void updateFirstWithSort(Class domainType, Sort sort, UpdateDefinition update) { + + Book one = new Book(); + one.id = 1; + one.isbn = "001 001 300"; + one.title = "News isn't fake"; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.title = "love is love"; + two.isbn = "001 001 100"; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(domainType) // + .inCollection(template.getCollectionName(Book.class))// + .matching(new Query().with(sort)).apply(update) // + .first().as(StepVerifier::create) // + .assertNext(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + Mono.from(collection(Book.class).find(new org.bson.Document("_id", two.id)).first()) // + .as(StepVerifier::create) // + .assertNext(document -> assertThat(document).containsEntry("title", "Science is real!")) // + .verifyComplete(); + } + + + private Flux all(Class type) { + return Flux.from(collection(type).find(new org.bson.Document())); + } + + private MongoCollection collection(Class type) { + return client.getDatabase(DB_NAME).getCollection(template.getCollectionName(type)); + } + + private static Stream sortedUpdateBookArgs() { + + Update update = new Update().set("title", "Science is real!"); + AggregationUpdate aggUpdate = AggregationUpdate.update().set("title").toValue("Science is real!"); + + return Stream.of( // + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), update), // typed, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.lastname"), update), // typed, map `lastname` + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), update), // typed, raw field name + Arguments.of(Object.class, Sort.by(Direction.ASC, "isbn"), update), // untyped, requires raw field name + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), aggUpdate), // aggregation, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), aggUpdate) // aggregation, map `lastname` + ); + } + + @Document("scores") + static class Score { + + Integer id; + String student; + List homework; + List quiz; + Integer extraCredit; + + public Score(Integer id, String student, List homework, List quiz, Integer extraCredit) { + + this.id = id; + this.student = student; + this.homework = homework; + this.quiz = quiz; + this.extraCredit = extraCredit; + } + } + + static class Versioned { + + String id; + @Version Long version; + String value; + + public Versioned(String id, String value) { + this.id = id; + this.value = value; + } + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Book book = (Book) o; + return Objects.equals(id, book.id) && Objects.equals(title, book.title) && Objects.equals(isbn, book.isbn) + && Objects.equals(author, book.author) && Objects.equals(stock, book.stock); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, isbn, author, stock); + } + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + + public Author(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + } + + static class Warehouse { + + public Warehouse(String location, Integer qty) { + this.location = location; + this.qty = qty; + } + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java new file mode 100644 index 0000000000..0841ddc37f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java @@ -0,0 +1,216 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.CollectionInfo; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class ReactiveMongoTemplateViewTests { + + static @Client com.mongodb.client.MongoClient client; + static @Client MongoClient reactiveClient; + static final String DB_NAME = "reactive-mongo-template-view-tests"; + + private ReactiveMongoTemplate template; + + Student alex = new Student(22001L, "Alex", 1, 4.0D); + Student bernie = new Student(21001L, "bernie", 2, 3.7D); + Student chris = new Student(20010L, "Chris", 3, 2.5D); + Student drew = new Student(22021L, "Drew", 1, 3.2D); + Student harley1 = new Student(17301L, "harley", 6, 3.1D); + Student farmer = new Student(21022L, "Farmer", 1, 2.2D); + Student george = new Student(20020L, "george", 3, 2.8D); + Student harley2 = new Student(18020, "Harley", 5, 2.8D); + + List students = Arrays.asList(alex, bernie, chris, drew, harley1, farmer, george, harley2); + + @BeforeEach + void beforeEach() { + template = new ReactiveMongoTemplate(reactiveClient, DB_NAME); + } + + @AfterEach + void afterEach() { + client.getDatabase(DB_NAME).drop(); + } + + @Test // GH-2594 + void createsViewFromPipeline() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("firstYears", Student.class, match(where("year").is(1))).then().as(StepVerifier::create) + .verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewTarget()).isEqualTo("student"); + assertThat(collectionInfo.getViewPipeline()).containsExactly(new Document("$match", new Document("year", 1))); + } + + @Test // GH-2594 + void mapsPipelineAgainstDomainObject() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("fakeStudents", Student.class, match(where("studentID").gte("22"))).then() + .as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("sID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void takesPipelineAsIsIfNoTypeDefined() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("fakeStudents", "student", AggregationPipeline.of(match(where("studentID").gte("22"))), + ViewOptions.none()).then().as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("studentID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void readsFromView() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + client.getDatabase(DB_NAME).createView("firstYears", "student", + Arrays.asList(new Document("$match", new Document("year", 1)))); + + template.query(Student.class).inCollection("firstYears").all().collectList().as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).containsExactlyInAnyOrder(alex, drew, farmer)); + } + + @Test // GH-2594 + void appliesCollationToView() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("firstYears", Student.class, AggregationPipeline.of(match(where("year").is(1))), + new ViewOptions().collation(Collation.of("en_US"))).then().as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getCollation().getLocale()).isEqualTo("en_US"); + } + + private static class Student { + + @Field("sID") Long studentID; + + int year; + + double score; + + String name; + + public Student() {} + + public Student(long studentID, String name, int year, double score) { + this.studentID = studentID; + this.name = name; + this.year = year; + this.score = score; + } + + public Long getStudentID() { + return this.studentID; + } + + public int getYear() { + return this.year; + } + + public double getScore() { + return this.score; + } + + public String getName() { + return this.name; + } + + public void setStudentID(Long studentID) { + this.studentID = studentID; + } + + public void setYear(int year) { + this.year = year; + } + + public void setScore(double score) { + this.score = score; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Student student = (Student) o; + return year == student.year && Double.compare(student.score, score) == 0 + && Objects.equals(studentID, student.studentID) && Objects.equals(name, student.name); + } + + @Override + public int hashCode() { + return Objects.hash(studentID, year, score, name); + } + + public String toString() { + return "ReactiveMongoTemplateViewTests.Student(studentID=" + this.getStudentID() + ", year=" + this.getYear() + + ", score=" + this.getScore() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java new file mode 100644 index 0000000000..5659869705 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java @@ -0,0 +1,189 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link ReactiveRemoveOperationSupport}. + * + * @author Mark Paluch + */ +@ExtendWith(MongoClientExtension.class) +class ReactiveRemoveOperationSupportTests { + + private static final String STAR_WARS = "star-wars"; + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + private MongoTemplate blocking; + private ReactiveMongoTemplate template; + + private Person han; + private Person luke; + + @BeforeEach + void setUp() { + + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableRemoveOperationSupportTests")); + blocking.dropCollection(STAR_WARS); + + han = new Person(); + han.firstname = "han"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.id = "id-2"; + + blocking.save(han); + blocking.save(luke); + + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableRemoveOperationSupportTests"); + } + + @Test // DATAMONGO-1719 + void removeAll() { + + template.remove(Person.class).all().as(StepVerifier::create).consumeNextWith(actual -> { + assertThat(actual.getDeletedCount()).isEqualTo(2L); + }).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void removeAllMatching() { + + template.remove(Person.class).matching(query(where("firstname").is("han"))).all().as(StepVerifier::create) + .consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void removeAllMatchingCriteria() { + + template.remove(Person.class).matching(where("firstname").is("han")).all().as(StepVerifier::create) + .consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void removeAllMatchingWithAlternateDomainTypeAndCollection() { + + template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)) + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void removeAndReturnAllMatching() { + + template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove().as(StepVerifier::create) + .expectNext(han).verifyComplete(); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveRemoveOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveRemoveOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java new file mode 100644 index 0000000000..73970d2ad3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java @@ -0,0 +1,329 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Proxy; + +import org.bson.Document; +import org.bson.codecs.BsonValueCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.reactivestreams.Publisher; + +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate.ReactiveSessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; + +/** + * Unit tests for {@link ReactiveSessionBoundMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@SuppressWarnings("unchecked") +@RunWith(MockitoJUnitRunner.Silent.class) +public class ReactiveSessionBoundMongoTemplateUnitTests { + + private static final String COLLECTION_NAME = "collection-1"; + + ReactiveSessionBoundMongoTemplate template; + MongoMappingContext mappingContext; + MappingMongoConverter converter; + + ReactiveMongoDatabaseFactory factory; + + @Mock MongoCollection collection; + @Mock MongoDatabase database; + @Mock ClientSession clientSession; + @Mock FindPublisher findPublisher; + Publisher collectionNamesPublisher; + @Mock AggregatePublisher aggregatePublisher; + @Mock DistinctPublisher distinctPublisher; + @Mock Publisher resultPublisher; + @Mock MapReducePublisher mapReducePublisher; + @Mock MongoClient client; + @Mock CodecRegistry codecRegistry; + + @Before + public void setUp() { + + mock(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(database).collectionNamePublisherType()); + when(client.getDatabase(anyString())).thenReturn(database); + when(codecRegistry.get(any(Class.class))).thenReturn(new BsonValueCodec()); + when(database.getCodecRegistry()).thenReturn(codecRegistry); + when(database.getCollection(anyString())).thenReturn(collection); + when(database.getCollection(anyString(), any())).thenReturn(collection); + doReturn(collectionNamesPublisher).when(database).listCollectionNames(any(ClientSession.class)); + when(database.createCollection(any(ClientSession.class), any(), any())).thenReturn(resultPublisher); + when(database.runCommand(any(ClientSession.class), any(), any(Class.class))).thenReturn(resultPublisher); + when(collection.find(any(ClientSession.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(Document.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(Class.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(), any())).thenReturn(findPublisher); + when(collection.deleteMany(any(ClientSession.class), any(), any())).thenReturn(resultPublisher); + when(collection.insertOne(any(ClientSession.class), any(Document.class))).thenReturn(resultPublisher); + when(collection.aggregate(any(ClientSession.class), anyList(), any(Class.class))).thenReturn(aggregatePublisher); + when(collection.countDocuments(any(ClientSession.class), any(), any(CountOptions.class))) + .thenReturn(resultPublisher); + when(collection.drop(any(ClientSession.class))).thenReturn(resultPublisher); + when(collection.findOneAndUpdate(any(ClientSession.class), any(), any(Bson.class), any())) + .thenReturn(resultPublisher); + when(collection.distinct(any(ClientSession.class), any(), any(Bson.class), any())).thenReturn(distinctPublisher); + when(collection.updateOne(any(ClientSession.class), any(), any(Bson.class), any(UpdateOptions.class))) + .thenReturn(resultPublisher); + when(collection.updateMany(any(ClientSession.class), any(), any(Bson.class), any(UpdateOptions.class))) + .thenReturn(resultPublisher); + when(collection.dropIndex(any(ClientSession.class), anyString())).thenReturn(resultPublisher); + when(collection.mapReduce(any(ClientSession.class), any(), any(), any())).thenReturn(mapReducePublisher); + when(findPublisher.projection(any())).thenReturn(findPublisher); + when(findPublisher.limit(anyInt())).thenReturn(findPublisher); + when(findPublisher.collation(any())).thenReturn(findPublisher); + when(findPublisher.first()).thenReturn(resultPublisher); + when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher); + + factory = new SimpleReactiveMongoDatabaseFactory(client, "foo"); + + this.mappingContext = new MongoMappingContext(); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + this.template = new ReactiveSessionBoundMongoTemplate(clientSession, new ReactiveMongoTemplate(factory, converter)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedCollectionInCallback() { + + template.execute("collection", MongoCollection::find).subscribe(); + + verify(collection, never()).find(); + verify(collection).find(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedDatabaseInCallback() { + + template.execute(MongoDatabase::listCollectionNames).subscribe(); + + verify(database, never()).listCollectionNames(); + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findOneUsesProxiedCollection() { + + template.findOne(new Query(), Person.class).subscribe(); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findShouldUseProxiedCollection() { + + template.find(new Query(), Person.class).subscribe(); + + verify(collection).find(eq(clientSession), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void findAllShouldUseProxiedCollection() { + + template.findAll(Person.class).subscribe(); + + verify(collection).find(eq(clientSession), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void executeCommandShouldUseProxiedDatabase() { + + template.executeCommand("{}").subscribe(); + + verify(database).runCommand(eq(clientSession), any(), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void removeShouldUseProxiedCollection() { + + template.remove(new Query(), Person.class).subscribe(); + + verify(collection).deleteMany(eq(clientSession), any(), any(DeleteOptions.class)); + } + + @Test // DATAMONGO-1880 + public void insertShouldUseProxiedCollection() { + + template.insert(new Person()).subscribe(); + + verify(collection).insertOne(eq(clientSession), any(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateShouldUseProxiedCollection() { + + template.aggregate(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class) + .subscribe(); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void collectionExistsShouldUseProxiedDatabase() { + + template.collectionExists(Person.class).subscribe(); + + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldUseProxiedCollection() { + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // GH-3522 + public void countShouldDelegateToExactCountNoMatterWhat() { + + template.useEstimatedCount(true); + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880 + public void createCollectionShouldUseProxiedDatabase() { + + template.createCollection(Person.class).subscribe(); + + verify(database).createCollection(eq(clientSession), anyString(), any()); + } + + @Test // DATAMONGO-1880 + public void dropShouldUseProxiedCollection() { + + template.dropCollection(Person.class).subscribe(); + + verify(collection).drop(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findAndModifyShouldUseProxiedCollection() { + + template.findAndModify(new Query(), new Update().set("foo", "bar"), Person.class).subscribe(); + + verify(collection).findOneAndUpdate(eq(clientSession), any(), any(Bson.class), any(FindOneAndUpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void findDistinctShouldUseProxiedCollection() { + + template.findDistinct(new Query(), "firstName", Person.class, String.class).subscribe(); + + verify(collection).distinct(eq(clientSession), anyString(), any(), any()); + } + + @Test // DATAMONGO-1880, DATAMONGO-2264 + public void geoNearShouldUseProxiedDatabase() { + + template.geoNear(NearQuery.near(new Point(0, 0), Metrics.NEUTRAL), Person.class).subscribe(); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880, DATAMONGO-1890, DATAMONGO-257 + public void mapReduceShouldUseProxiedCollection() { + + template.mapReduce(new BasicQuery("{}"), Person.class, COLLECTION_NAME, Person.class, "foo", "bar", + MapReduceOptions.options()).subscribe(); + + verify(collection).mapReduce(eq(clientSession), anyString(), anyString(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void updateFirstShouldUseProxiedCollection() { + + template.updateFirst(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void updateMultiShouldUseProxiedCollection() { + + template.updateMulti(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateMany(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void upsertShouldUseProxiedCollection() { + + template.upsert(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void getCollectionShouldShouldJustReturnTheCollection/*No ClientSession binding*/() { + assertThat(template.getCollection(COLLECTION_NAME).block()).isNotInstanceOf(Proxy.class) + .isInstanceOf(MongoCollection.class); + } + + @Test // DATAMONGO-1880 + public void getDbShouldJustReturnTheDatabase/*No ClientSession binding*/() { + assertThat(template.getMongoDatabase().block()).isNotInstanceOf(Proxy.class).isInstanceOf(MongoDatabase.class); + } + + @Test // DATAMONGO-1880 + public void indexOpsShouldUseProxiedCollection() { + + template.indexOps(COLLECTION_NAME).dropIndex("index-name").subscribe(); + + verify(collection).dropIndex(eq(clientSession), eq("index-name")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java new file mode 100644 index 0000000000..3ac99c2b6d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java @@ -0,0 +1,357 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Objects; + +import org.bson.BsonString; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link ReactiveUpdateOperationSupport}. + * + * @author Mark Paluch + */ +@ExtendWith(MongoClientExtension.class) +class ReactiveUpdateOperationSupportTests { + + private static final String STAR_WARS = "star-wars"; + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + private MongoTemplate blocking; + private ReactiveMongoTemplate template; + + private Person han; + private Person luke; + + @BeforeEach + void setUp() { + + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableUpdateOperationSupportTests")); + blocking.dropCollection(STAR_WARS); + + han = new Person(); + han.firstname = "han"; + han.id = "id-1"; + + luke = new Person(); + luke.firstname = "luke"; + luke.id = "id-2"; + + blocking.save(han); + blocking.save(luke); + + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableUpdateOperationSupportTests"); + } + + @Test // DATAMONGO-1719 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(null)); + } + + @Test // DATAMONGO-1719 + void updateIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).apply(null)); + } + + @Test // DATAMONGO-1719 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).inCollection(null)); + } + + @Test // DATAMONGO-1719 + void findAndModifyOptionsAreRequiredOnSet() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.update(Person.class).apply(new Update()).withOptions(null)); + } + + @Test // DATAMONGO-1719 + void updateFirst() { + + template.update(Person.class).apply(new Update().set("firstname", "Han")).first().as(StepVerifier::create) + .consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(1L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + + } + + @Test // DATAMONGO-1719 + void updateAll() { + + template.update(Person.class).apply(new Update().set("firstname", "Han")).all().as(StepVerifier::create) + .consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(2L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void updateAllMatching() { + + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).all() + .as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(1L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2416 + void updateAllMatchingCriteria() { + + template.update(Person.class).matching(where("id").is(han.getId())).apply(new Update().set("firstname", "Han")) + .all().as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(1L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void updateWithDifferentDomainClassAndCollection() { + + template.update(Jedi.class).inCollection(STAR_WARS).matching(query(where("_id").is(han.getId()))) + .apply(new Update().set("name", "Han")).all().as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(1L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + + assertThat(blocking.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1719 + void findAndModify() { + + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).findAndModify() + .as(StepVerifier::create).expectNext(han).verifyComplete(); + + assertThat(blocking.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1719 + void findAndModifyWithDifferentDomainTypeAndCollection() { + + template.update(Jedi.class).inCollection(STAR_WARS).matching(query(where("_id").is(han.getId()))) + .apply(new Update().set("name", "Han")).findAndModify().as(StepVerifier::create) + .consumeNextWith(actual -> assertThat(actual.getName()).isEqualTo("han")).verifyComplete(); + + assertThat(blocking.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Han"); + } + + @Test // DATAMONGO-1719 + void findAndModifyWithOptions() { + + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) + .withOptions(FindAndModifyOptions.options().returnNew(true)).findAndModify().as(StepVerifier::create) + .consumeNextWith(actual -> { + + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Han"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1719 + void upsert() { + + template.update(Person.class).matching(query(where("id").is("id-3"))) + .apply(new Update().set("firstname", "Chewbacca")).upsert().as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(0L); + assertThat(actual.getUpsertedId()).isEqualTo(new BsonString("id-3")); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplace() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplace() // + .as(StepVerifier::create).expectNext(han).verifyComplete(); + + template.findOne(queryHan(), Person.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithProjection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke).as(Jedi.class).findAndReplace() // + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getName()).isEqualTo(han.firstname); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithCollection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).inCollection(STAR_WARS).matching(queryHan()).replaceWith(luke).findAndReplace() // + .as(StepVerifier::create).expectNext(han).verifyComplete(); + + template.findOne(queryHan(), Person.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithOptions() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke) + .withOptions(FindAndReplaceOptions.options().returnNew()).findAndReplace() // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + + private Query queryHan() { + return query(where("id").is(han.getId())); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person { + + @Id String id; + String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } + } + + static class Human { + + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Human(id=" + this.getId() + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvokerTestUtil.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvokerTestUtil.java deleted file mode 100644 index 017963996a..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReflectiveMongoOptionsInvokerTestUtil.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import com.mongodb.MongoOptions; - -/** - * Helper class allowing to keep {@link ReflectiveMongoOptionsInvoker} within default visibility while using it publicly - * across tests. - * - * @author Christoph Strobl - */ -@SuppressWarnings("deprecation") -public class ReflectiveMongoOptionsInvokerTestUtil { - - public static void setAutoConnectRetry(MongoOptions options, boolean autoConnectRetry) { - ReflectiveMongoOptionsInvoker.setAutoConnectRetry(options, autoConnectRetry); - } - - public static void setMaxAutoConnectRetryTime(MongoOptions options, long maxAutoConnectRetryTime) { - ReflectiveMongoOptionsInvoker.setMaxAutoConnectRetryTime(options, maxAutoConnectRetryTime); - } - - public static void setSlaveOk(MongoOptions options, boolean slaveOk) { - ReflectiveMongoOptionsInvoker.setSlaveOk(options, slaveOk); - } - - public static boolean getSlaveOk(MongoOptions options) { - return ReflectiveMongoOptionsInvoker.getSlaveOk(options); - } - - public static boolean getAutoConnectRetry(MongoOptions options) { - return ReflectiveMongoOptionsInvoker.getAutoConnectRetry(options); - } - - public static long getMaxAutoConnectRetryTime(MongoOptions options) { - return ReflectiveMongoOptionsInvoker.getMaxAutoConnectRetryTime(options); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java new file mode 100644 index 0000000000..ed0010242c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.AssertionsForClassTypes.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ScrollUtils}. + * + * @author Mark Paluch + */ +class ScrollUtilsUnitTests { + + @Test // GH-4413 + void positionShouldRetainScrollDirection() { + + Query query = new Query(); + query.with(ScrollPosition.keyset().backward()); + EntityOperations entityOperationsMock = mock(EntityOperations.class); + Entity entityMock = mock(Entity.class); + + when(entityOperationsMock.forEntity(any())).thenReturn(entityMock); + when(entityMock.extractKeys(any(), any())).thenReturn(Map.of("k", "v")); + + Window window = ScrollUtils.createWindow(query, new ArrayList<>(List.of(1, 2, 3)), Integer.class, + entityOperationsMock); + + assertThat(window.positionAt(0)).isInstanceOf(KeysetScrollPosition.class); + assertThat(((KeysetScrollPosition) window.positionAt(0)).scrollsBackward()).isTrue(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java index 0b42b27c03..1bf9114967 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2016 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,121 +15,106 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import java.util.Arrays; import java.util.Map; -import org.hamcrest.Matcher; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.query.SerializationUtils; import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; /** * Unit tests for {@link SerializationUtils}. - * + * * @author Oliver Gierke * @author Christoph Strobl */ public class SerializationUtilsUnitTests { @Test - public void writesSimpleDBObject() { + public void writesSimpleDocument() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - assertThat(serializeToJsonSafely(dbObject), is("{ \"foo\" : \"bar\"}")); + Document document = new Document("foo", "bar"); + assertThat(serializeToJsonSafely(document)).isEqualTo("{ \"foo\" : \"bar\"}"); } @Test public void writesComplexObjectAsPlainToString() { - DBObject dbObject = new BasicDBObject("foo", new Complex()); - assertThat(serializeToJsonSafely(dbObject), - startsWith("{ \"foo\" : { $java : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex")); + Document document = new Document("foo", new Complex()); + assertThat(serializeToJsonSafely(document).startsWith( + "{ \"foo\" : { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex")); } @Test public void writesCollection() { - DBObject dbObject = new BasicDBObject("foo", Arrays.asList("bar", new Complex())); - Matcher expectedOutput = allOf( - startsWith("{ \"foo\" : [ \"bar\", { $java : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex"), - endsWith(" } ] }")); - assertThat(serializeToJsonSafely(dbObject), is(expectedOutput)); + Document document = new Document("foo", Arrays.asList("bar", new Complex())); + assertThat(serializeToJsonSafely(document)).startsWith( + "{ \"foo\" : [ \"bar\", { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex") + .endsWith(" } ] }"); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void flattenMapShouldFlatOutNestedStructureCorrectly() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("value", "conflux")).get(); + Document document = new Document(); + document.put("_id", 1); + document.put("nested", new Document("value", "conflux")); - assertThat(flattenMap(dbo), hasEntry("_id", (Object) 1)); - assertThat(flattenMap(dbo), hasEntry("nested.value", (Object) "conflux")); + assertThat(flattenMap(document)).containsEntry("_id", 1).containsEntry("nested.value", "conflux"); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void flattenMapShouldFlatOutNestedStructureWithListCorrectly() { BasicDBList dbl = new BasicDBList(); dbl.addAll(Arrays.asList("nightwielder", "calamity")); - DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("value", dbl)).get(); + Document document = new Document(); + document.put("_id", 1); + document.put("nested", new Document("value", dbl)); - assertThat(flattenMap(dbo), hasEntry("_id", (Object) 1)); - assertThat(flattenMap(dbo), hasEntry("nested.value", (Object) dbl)); + assertThat(flattenMap(document)).containsEntry("_id", 1).containsEntry("nested.value", dbl); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void flattenMapShouldLeaveKeywordsUntouched() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", 1).add("nested", new BasicDBObject("$regex", "^conflux$")) - .get(); + Document document = new Document(); + document.put("_id", 1); + document.put("nested", new Document("$regex", "^conflux$")); - Map map = flattenMap(dbo); + Map map = flattenMap(document); - assertThat(map, hasEntry("_id", (Object) 1)); - assertThat(map.get("nested"), notNullValue()); - assertThat(((Map) map.get("nested")).get("$regex"), is((Object) "^conflux$")); + assertThat(map).containsEntry("_id", 1).containsKey("nested"); + assertThat(((Map) map.get("nested")).get("$regex")).isEqualTo("^conflux$"); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void flattenMapShouldAppendCommandsCorrectly() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", 1) - .add("nested", new BasicDBObjectBuilder().add("$regex", "^conflux$").add("$options", "i").get()).get(); + Document document = new Document(); + Document nested = new Document(); + nested.put("$regex", "^conflux$"); + nested.put("$options", "i"); + document.put("_id", 1); + document.put("nested", nested); - Map map = flattenMap(dbo); + Map map = flattenMap(document); - assertThat(map, hasEntry("_id", (Object) 1)); - assertThat(map.get("nested"), notNullValue()); - assertThat(((Map) map.get("nested")).get("$regex"), is((Object) "^conflux$")); - assertThat(((Map) map.get("nested")).get("$options"), is((Object) "i")); + assertThat(map).containsEntry("_id", 1).containsKey("nested"); + assertThat(((Map) map.get("nested")).get("$regex")).isEqualTo("^conflux$"); + assertThat(((Map) map.get("nested")).get("$options")).isEqualTo("i"); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void flattenMapShouldReturnEmptyMapWhenSourceIsNull() { - assertThat(flattenMap(null).isEmpty(), is(true)); + assertThat(flattenMap(null)).isEmpty(); } static class Complex { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java new file mode 100644 index 0000000000..8769656537 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java @@ -0,0 +1,444 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.aopalliance.aop.Advice; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.dao.DataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.LazyLoadingException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; +import org.springframework.data.mongodb.core.MongoTemplate.SessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Integration tests for {@link SessionBoundMongoTemplate} operating up an active {@link ClientSession}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class SessionBoundMongoTemplateTests { + + static @ReplSetClient MongoClient client; + + MongoTemplate template; + SessionBoundMongoTemplate sessionBoundTemplate; + ClientSession session; + volatile List> spiedCollections = new ArrayList<>(); + volatile List spiedDatabases = new ArrayList<>(); + + @BeforeEach + public void setUp() { + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, "session-bound-mongo-template-tests") { + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + + MongoDatabase spiedDatabase = Mockito.spy(super.getMongoDatabase()); + spiedDatabases.add(spiedDatabase); + return spiedDatabase; + } + }; + + session = client.startSession(ClientSessionOptions.builder().build()); + + this.template = new MongoTemplate(factory); + + this.sessionBoundTemplate = new SessionBoundMongoTemplate(session, + new MongoTemplate(factory, getDefaultMongoConverter(factory))) { + + @Override + protected MongoCollection prepareCollection(MongoCollection collection) { + + injectCollectionSpy(collection); + + return super.prepareCollection(collection); + } + + @SuppressWarnings({ "ConstantConditions", "unchecked" }) + private void injectCollectionSpy(MongoCollection collection) { + + InvocationHandler handler = Proxy.getInvocationHandler(collection); + + Advised advised = (Advised) ReflectionTestUtils.getField(handler, "advised"); + + for (Advisor advisor : advised.getAdvisors()) { + Advice advice = advisor.getAdvice(); + if (advice instanceof SessionAwareMethodInterceptor) { + + MongoCollection spiedCollection = Mockito + .spy((MongoCollection) ReflectionTestUtils.getField(advice, "target")); + spiedCollections.add(spiedCollection); + + ReflectionTestUtils.setField(advice, "target", spiedCollection); + } + } + } + }; + } + + @AfterEach + public void tearDown() { + + session.close(); + } + + @Test // DATAMONGO-1880 + public void findDelegatesToMethodWithSession() { + + sessionBoundTemplate.find(new Query(), Person.class); + + verify(operation(0)).find(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void fluentFindDelegatesToMethodWithSession() { + + sessionBoundTemplate.query(Person.class).all(); + + verify(operation(0)).find(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void aggregateDelegatesToMethoddWithSession() { + + sessionBoundTemplate.aggregate(Aggregation.newAggregation(Aggregation.project("firstName")), Person.class, + Person.class); + + verify(operation(0)).aggregate(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void collectionExistsDelegatesToMethodWithSession() { + + sessionBoundTemplate.collectionExists(Person.class); + + verify(command(0)).listCollectionNames(eq(session)); + } + + @Test // DATAMONGO-1880 + public void shouldLoadDbRefWhenSessionIsActive() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithDbRef wdr = new WithDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithDbRef result = sessionBoundTemplate.findById(wdr.id, WithDbRef.class); + + assertThat(result.personRef).isEqualTo(person); + } + + @Test // DATAMONGO-1880 + public void shouldErrorOnLoadDbRefWhenSessionIsClosed() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithDbRef wdr = new WithDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + session.close(); + + assertThatExceptionOfType(ClientSessionException.class) + .isThrownBy(() -> sessionBoundTemplate.findById(wdr.id, WithDbRef.class)); + } + + @Test // DATAMONGO-1880 + public void shouldLoadLazyDbRefWhenSessionIsActive() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithLazyDbRef wdr = new WithLazyDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithLazyDbRef result = sessionBoundTemplate.findById(wdr.id, WithLazyDbRef.class); + + assertThat(result.getPersonRef()).isEqualTo(person); + } + + @Test // DATAMONGO-1880 + public void shouldErrorOnLoadLazyDbRefWhenSessionIsClosed() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithLazyDbRef wdr = new WithLazyDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithLazyDbRef result = sessionBoundTemplate.findById(wdr.id, WithLazyDbRef.class); + + session.close(); // now close the session + + assertThatExceptionOfType(LazyLoadingException.class).isThrownBy(() -> result.getPersonRef().toString()); + } + + @Test // DATAMONGO-2001 + @MongoVersion(asOf = "4.0") + public void countShouldWorkInTransactions() { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + } else { + template.remove(Person.class).all(); + } + + ClientSession session = client.startSession(); + session.startTransaction(); + + MongoTemplate sessionBound = template.withSession(session); + + sessionBound.save(new Person("Kylar Stern")); + + assertThat(sessionBound.query(Person.class).matching(query(where("firstName").is("foobar"))).count()).isZero(); + assertThat(sessionBound.query(Person.class).matching(query(where("firstName").is("Kylar Stern"))).count()).isOne(); + assertThat(sessionBound.query(Person.class).count()).isOne(); + + session.commitTransaction(); + session.close(); + } + + @Test // DATAMONGO-2012 + @MongoVersion(asOf = "4.0") + public void countWithGeoInTransaction() { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + template.indexOps(Person.class).ensureIndex(new GeospatialIndex("location")); + } else { + template.remove(Person.class).all(); + } + + ClientSession session = client.startSession(); + session.startTransaction(); + + MongoTemplate sessionBound = template.withSession(session); + + sessionBound.save(new Person("Kylar Stern")); + + assertThat(sessionBound.query(Person.class).matching(query(where("location").near(new Point(1, 0)))).count()) + .isZero(); + + session.commitTransaction(); + session.close(); + } + + @Test // DATAMONGO-2001 + @MongoVersion(asOf = "4.0") + public void countShouldReturnIsolatedCount() throws InterruptedException { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + } else { + template.remove(Person.class).all(); + } + + int nrThreads = 2; + CountDownLatch savedInTransaction = new CountDownLatch(nrThreads); + CountDownLatch beforeCommit = new CountDownLatch(nrThreads); + List resultList = new CopyOnWriteArrayList<>(); + + Runnable runnable = () -> { + + ClientSession session = client.startSession(); + session.startTransaction(); + + try { + MongoTemplate sessionBound = template.withSession(session); + + try { + sessionBound.save(new Person("Kylar Stern")); + } finally { + savedInTransaction.countDown(); + } + + savedInTransaction.await(1, TimeUnit.SECONDS); + + try { + resultList.add(sessionBound.query(Person.class).count()); + } finally { + beforeCommit.countDown(); + } + + beforeCommit.await(1, TimeUnit.SECONDS); + } catch (Exception e) { + resultList.add(e); + } + + session.commitTransaction(); + session.close(); + }; + + List threads = IntStream.range(0, nrThreads) // + .mapToObj(i -> new Thread(runnable)) // + .peek(Thread::start) // + .collect(Collectors.toList()); + + for (Thread thread : threads) { + thread.join(); + } + + assertThat(template.query(Person.class).count()).isEqualTo(2L); + assertThat(resultList).hasSize(nrThreads).allMatch(it -> it.equals(1L)); + } + + static class WithDbRef { + + @Id String id; + @DBRef Person personRef; + + public String getId() { + return this.id; + } + + public Person getPersonRef() { + return this.personRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setPersonRef(Person personRef) { + this.personRef = personRef; + } + + public String toString() { + return "SessionBoundMongoTemplateTests.WithDbRef(id=" + this.getId() + ", personRef=" + this.getPersonRef() + ")"; + } + } + + static class WithLazyDbRef { + + @Id String id; + @DBRef(lazy = true) Person personRef; + + public Person getPersonRef() { + return personRef; + } + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public void setPersonRef(Person personRef) { + this.personRef = personRef; + } + + public String toString() { + return "SessionBoundMongoTemplateTests.WithLazyDbRef(id=" + this.getId() + ", personRef=" + this.getPersonRef() + + ")"; + } + } + + // --> Just some helpers for testing + + MongoCollection operation(int index) { + return spiedCollections.get(index); + } + + MongoDatabase command(int index) { + return spiedDatabases.get(index); + } + + private MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) { + + DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); + MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(factory); + converter.afterPropertiesSet(); + + return converter; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java new file mode 100644 index 0000000000..dfa4b00515 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java @@ -0,0 +1,353 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.lang.reflect.Proxy; +import java.util.Collections; + +import com.mongodb.client.*; +import org.bson.Document; +import org.bson.codecs.BsonValueCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.MongoTemplate.SessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.UpdateOptions; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; + +/** + * Unit test for {@link SessionBoundMongoTemplate} making sure a proxied {@link MongoCollection} and + * {@link MongoDatabase} is used for executing high level commands like {@link MongoOperations#find(Query, Class)} + * provided by Spring Data. Those commands simply handing over MongoDB base types for interaction like when obtaining a + * {@link MongoCollection} via {@link MongoOperations#getCollection(String)} shall not be proxied as the user can + * control the behavior by using the methods dedicated for {@link ClientSession} directly. + * + * @author Christoph Strobl + * @author Jens Schauder + */ +@SuppressWarnings("unchecked") +@RunWith(MockitoJUnitRunner.Silent.class) +public class SessionBoundMongoTemplateUnitTests { + + private static final String COLLECTION_NAME = "collection-1"; + + SessionBoundMongoTemplate template; + + MongoDatabaseFactory factory; + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Mock MongoDatabase database; + @Mock MongoClient client; + @Mock ClientSession clientSession; + @Mock FindIterable findIterable; + MongoIterable collectionNamesIterable; + @Mock MongoIterable mongoIterable; + @Mock DistinctIterable distinctIterable; + @Mock AggregateIterable aggregateIterable; + @Mock MapReduceIterable mapReduceIterable; + @Mock MongoCursor cursor; + @Mock CodecRegistry codecRegistry; + + MappingMongoConverter converter; + MongoMappingContext mappingContext; + + @Before + public void setUp() { + + collectionNamesIterable = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(database).collectionNameIterableType()); + when(client.getDatabase(anyString())).thenReturn(database); + when(codecRegistry.get(any(Class.class))).thenReturn(new BsonValueCodec()); + when(database.getCodecRegistry()).thenReturn(codecRegistry); + when(database.getCollection(anyString(), any())).thenReturn(collection); + doReturn(collectionNamesIterable).when(database).listCollectionNames(any(ClientSession.class)); + when(collection.find(any(ClientSession.class), any(), any())).thenReturn(findIterable); + when(collection.aggregate(any(ClientSession.class), anyList(), any())).thenReturn(aggregateIterable); + when(collection.distinct(any(ClientSession.class), any(), any(), any())).thenReturn(distinctIterable); + when(collection.mapReduce(any(ClientSession.class), any(), any(), any())).thenReturn(mapReduceIterable); + when(findIterable.iterator()).thenReturn(cursor); + when(aggregateIterable.collation(any())).thenReturn(aggregateIterable); + when(aggregateIterable.allowDiskUse(anyBoolean())).thenReturn(aggregateIterable); + when(aggregateIterable.batchSize(anyInt())).thenReturn(aggregateIterable); + when(aggregateIterable.map(any())).thenReturn(aggregateIterable); + when(aggregateIterable.into(any())).thenReturn(Collections.emptyList()); + when(mongoIterable.iterator()).thenReturn(cursor); + when(collectionNamesIterable.iterator()).thenReturn(cursor); + when(distinctIterable.map(any())).thenReturn(distinctIterable); + when(distinctIterable.into(any())).thenReturn(Collections.emptyList()); + when(mapReduceIterable.sort(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.filter(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.map(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); + when(findIterable.projection(any())).thenReturn(findIterable); + + factory = new SimpleMongoClientDatabaseFactory(client, "foo"); + + this.mappingContext = new MongoMappingContext(); + this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + this.template = new SessionBoundMongoTemplate(clientSession, new MongoTemplate(factory, converter)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedCollectionInCallback() { + + template.execute("collection", MongoCollection::find); + + verify(collection, never()).find(); + verify(collection).find(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedDatabaseInCallback() { + + template.execute(MongoDatabase::listCollectionNames); + + verify(database, never()).listCollectionNames(); + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findOneUsesProxiedCollection() { + + template.findOne(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findShouldUseProxiedCollection() { + + template.find(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findAllShouldUseProxiedCollection() { + + template.findAll(Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void executeCommandShouldUseProxiedDatabase() { + + template.executeCommand("{}"); + + verify(database).runCommand(eq(clientSession), any(), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void removeShouldUseProxiedCollection() { + + template.remove(new Query(), Person.class); + + verify(collection).deleteMany(eq(clientSession), any(), any(DeleteOptions.class)); + } + + @Test // DATAMONGO-1880 + public void insertShouldUseProxiedCollection() { + + template.insert(new Person()); + + verify(collection).insertOne(eq(clientSession), any(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateShouldUseProxiedCollection() { + + template.aggregate(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateStreamShouldUseProxiedCollection() { + + template.aggregateStream(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void collectionExistsShouldUseProxiedDatabase() { + + template.collectionExists(Person.class); + + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldUseProxiedCollection() { + + template.count(new Query(), Person.class); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldDelegateToExactCountNoMatterWhat() { + + template.useEstimatedCount(true); + template.count(new Query(), Person.class); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880 + public void createCollectionShouldUseProxiedDatabase() { + + template.createCollection(Person.class); + + verify(database).createCollection(eq(clientSession), anyString(), any()); + } + + @Test // DATAMONGO-1880 + public void dropShouldUseProxiedCollection() { + + template.dropCollection(Person.class); + + verify(collection).drop(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findAndModifyShouldUseProxiedCollection() { + + template.findAndModify(new Query(), new Update().set("foo", "bar"), Person.class); + + verify(collection).findOneAndUpdate(eq(clientSession), any(), any(Bson.class), any(FindOneAndUpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void findDistinctShouldUseProxiedCollection() { + + template.findDistinct(new Query(), "firstName", Person.class, String.class); + + verify(collection).distinct(eq(clientSession), anyString(), any(), any()); + } + + @Test // DATAMONGO-1880, DATAMONGO-2264 + public void geoNearShouldUseProxiedDatabase() { + + when(database.runCommand(any(ClientSession.class), any(), eq(Document.class))) + .thenReturn(new Document("results", Collections.emptyList())); + template.geoNear(NearQuery.near(new Point(0, 0), Metrics.NEUTRAL), Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void mapReduceShouldUseProxiedCollection() { + + template.mapReduce(COLLECTION_NAME, "foo", "bar", Person.class); + + verify(collection).mapReduce(eq(clientSession), anyString(), anyString(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void streamShouldUseProxiedCollection() { + + template.stream(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void updateFirstShouldUseProxiedCollection() { + + template.updateFirst(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void updateMultiShouldUseProxiedCollection() { + + template.updateMulti(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateMany(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void upsertShouldUseProxiedCollection() { + + template.upsert(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void getCollectionShouldShouldJustReturnTheCollection/*No ClientSession binding*/() { + assertThat(template.getCollection(COLLECTION_NAME)).isNotInstanceOf(Proxy.class); + } + + @Test // DATAMONGO-1880 + public void getDbShouldJustReturnTheDatabase/*No ClientSession binding*/() { + assertThat(template.getDb()).isNotInstanceOf(Proxy.class); + } + + @Test // DATAMONGO-1880 + public void indexOpsShouldUseProxiedCollection() { + + template.indexOps(COLLECTION_NAME).dropIndex("index-name"); + + verify(collection).dropIndex(eq(clientSession), eq("index-name")); + } + + @Test // DATAMONGO-1880 + public void bulkOpsShouldUseProxiedCollection() { + + BulkOperations bulkOps = template.bulkOps(BulkMode.ORDERED, COLLECTION_NAME); + bulkOps.insert(new Document()); + + bulkOps.execute(); + + verify(collection).bulkWrite(eq(clientSession), anyList(), any()); + } + + @Test // DATAMONGO-1880 + public void scriptOpsShouldUseProxiedDatabase() { + + when(database.runCommand(eq(clientSession), any())).thenReturn(new Document("retval", new Object())); + template.scriptOps().call("W-O-P-R"); + + verify(database).runCommand(eq(clientSession), any()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java new file mode 100644 index 0000000000..20debee659 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded +public class ShardedEntityWithDefaultShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithDefaultShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithDefaultShardKey that = (ShardedEntityWithDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithDefaultShardKey(id=" + this.getId() + ", country=" + this.getCountry() + ", userId=" + + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java new file mode 100644 index 0000000000..a69ad39359 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }, immutableKey = true) +public class ShardedEntityWithNonDefaultImmutableShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithNonDefaultImmutableShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithNonDefaultImmutableShardKey that = (ShardedEntityWithNonDefaultImmutableShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithNonDefaultImmutableShardKey(id=" + this.getId() + ", country=" + this.getCountry() + + ", userId=" + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java new file mode 100644 index 0000000000..3dc130a89d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java @@ -0,0 +1,90 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }) +public class ShardedEntityWithNonDefaultShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithNonDefaultShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithNonDefaultShardKey that = (ShardedEntityWithNonDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithNonDefaultShardKey(id=" + this.getId() + ", country=" + this.getCountry() + ", userId=" + + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java new file mode 100644 index 0000000000..fc22a40103 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }) +public class ShardedVersionedEntityWithNonDefaultShardKey { + + private @Id String id; + + private @Version Long version; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedVersionedEntityWithNonDefaultShardKey(String id, Long version, String country, Integer userId) { + + this.id = id; + this.version = version; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setVersion(Long version) { + this.version = version; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedVersionedEntityWithNonDefaultShardKey that = (ShardedVersionedEntityWithNonDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(country, that.country) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, country, userId); + } + + public String toString() { + return "ShardedVersionedEntityWithNonDefaultShardKey(id=" + this.getId() + ", version=" + this.getVersion() + + ", country=" + this.getCountry() + ", userId=" + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java new file mode 100644 index 0000000000..5e64eed4fd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java @@ -0,0 +1,110 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.AopProxyUtils; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; + +/** + * Unit tests for {@link SimpleMongoClientDatabaseFactory}. + * + * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class SimpleMongoClientDatabaseFactoryUnitTests { + + @Mock MongoClient mongo; + @Mock ClientSession clientSession; + @Mock MongoDatabase database; + + @Test // DATADOC-254, DATAMONGO-1903 + void rejectsIllegalDatabaseNames() { + + rejectsDatabaseName("foo.bar"); + rejectsDatabaseName("foo$bar"); + rejectsDatabaseName("foo\\bar"); + rejectsDatabaseName("foo//bar"); + rejectsDatabaseName("foo bar"); + rejectsDatabaseName("foo\"bar"); + } + + @Test // DATADOC-254 + void allowsDatabaseNames() { + new SimpleMongoClientDatabaseFactory(mongo, "foo-bar"); + new SimpleMongoClientDatabaseFactory(mongo, "foo_bar"); + new SimpleMongoClientDatabaseFactory(mongo, "foo01231bar"); + } + + @Test // DATADOC-295 + void mongoUriConstructor() { + + ConnectionString mongoURI = new ConnectionString( + "mongodb://myUsername:myPassword@localhost/myDatabase.myCollection"); + MongoDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(mongoURI); + + assertThat(mongoDbFactory).hasFieldOrPropertyWithValue("databaseName", "myDatabase"); + } + + @Test // DATAMONGO-1158 + void constructsMongoClientAccordingToMongoUri() { + + ConnectionString uri = new ConnectionString( + "mongodb://myUserName:myPassWord@127.0.0.1:27017/myDataBase.myCollection"); + SimpleMongoClientDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(uri); + + assertThat(factory).hasFieldOrPropertyWithValue("databaseName", "myDataBase"); + } + + @Test // DATAMONGO-1880 + void cascadedWithSessionUsesRootFactory() { + + when(mongo.getDatabase("foo")).thenReturn(database); + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(mongo, "foo"); + MongoDatabaseFactory wrapped = factory.withSession(clientSession).withSession(clientSession); + + InvocationHandler invocationHandler = Proxy.getInvocationHandler(wrapped.getMongoDatabase()); + + Object singletonTarget = AopProxyUtils + .getSingletonTarget(ReflectionTestUtils.getField(invocationHandler, "advised")); + + assertThat(singletonTarget).isSameAs(database); + } + + private void rejectsDatabaseName(String databaseName) { + assertThatThrownBy(() -> new SimpleMongoClientDatabaseFactory(mongo, databaseName)) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java deleted file mode 100644 index 9967de75ec..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; -import static org.springframework.test.util.ReflectionTestUtils.*; - -import java.net.UnknownHostException; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.MongoDbFactory; - -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; - -/** - * Unit tests for {@link SimpleMongoDbFactory}. - * - * @author Oliver Gierke - * @author Christoph Strobl - */ -@RunWith(MockitoJUnitRunner.class) -public class SimpleMongoDbFactoryUnitTests { - - public @Rule ExpectedException expectedException = ExpectedException.none(); - @Mock Mongo mongo; - - /** - * @see DATADOC-254 - */ - @Test - public void rejectsIllegalDatabaseNames() { - rejectsDatabaseName("foo.bar"); - rejectsDatabaseName("foo!bar"); - } - - /** - * @see DATADOC-254 - */ - @Test - @SuppressWarnings("deprecation") - public void allowsDatabaseNames() { - new SimpleMongoDbFactory(mongo, "foo-bar"); - new SimpleMongoDbFactory(mongo, "foo_bar"); - new SimpleMongoDbFactory(mongo, "foo01231bar"); - } - - /** - * @see DATADOC-295 - * @throws UnknownHostException - */ - @Test - @SuppressWarnings("deprecation") - public void mongoUriConstructor() throws UnknownHostException { - - MongoURI mongoURI = new MongoURI("mongodb://myUsername:myPassword@localhost/myDatabase.myCollection"); - MongoDbFactory mongoDbFactory = new SimpleMongoDbFactory(mongoURI); - - assertThat(getField(mongoDbFactory, "credentials"), is((Object) new UserCredentials("myUsername", "myPassword"))); - assertThat(getField(mongoDbFactory, "databaseName").toString(), is("myDatabase")); - } - - /** - * @see DATAMONGO-789 - */ - @Test - @SuppressWarnings("deprecation") - public void defaultsAuthenticationDatabaseToDatabase() { - - SimpleMongoDbFactory factory = new SimpleMongoDbFactory(mongo, "foo"); - assertThat(getField(factory, "authenticationDatabaseName"), is((Object) "foo")); - } - - /** - * @see DATAMONGO-1158 - */ - @Test - public void constructsMongoClientAccordingToMongoUri() throws UnknownHostException { - - MongoClientURI uri = new MongoClientURI("mongodb://myUserName:myPassWord@127.0.0.1:27017/myDataBase.myCollection"); - SimpleMongoDbFactory factory = new SimpleMongoDbFactory(uri); - - assertThat(getField(factory, "databaseName").toString(), is("myDataBase")); - } - - /** - * @see DATAMONGO-1158 - */ - @Test - public void shouldDefaultAuthenticationDbNameToDbNameWhenUsingMongoClient() throws UnknownHostException { - - MongoClient clientMock = mock(MongoClient.class); - SimpleMongoDbFactory factory = new SimpleMongoDbFactory(clientMock, "FooBar"); - - assertThat(getField(factory, "authenticationDatabaseName").toString(), is("FooBar")); - } - - /** - * @see DATAMONGO-1260 - */ - @Test - public void rejectsMongoClientWithUserCredentials() { - - expectedException.expect(InvalidDataAccessApiUsageException.class); - expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'"); - - new SimpleMongoDbFactory(mock(MongoClient.class), "cairhienin", new UserCredentials("moiraine", "sedai")); - } - - /** - * @see DATAMONGO-1260 - */ - @Test - public void rejectsMongoClientWithUserCredentialsAndAuthDb() { - - expectedException.expect(InvalidDataAccessApiUsageException.class); - expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'"); - - new SimpleMongoDbFactory(mock(MongoClient.class), "malkieri", new UserCredentials("lan", "mandragoran"), "authdb"); - } - - /** - * @see DATAMONGO-1260 - */ - @Test - public void shouldNotRejectMongoClientWithNoCredentials() { - new SimpleMongoDbFactory(mock(MongoClient.class), "andoran", UserCredentials.NO_CREDENTIALS); - } - - /** - * @see DATAMONGO-1260 - */ - @Test - public void shouldNotRejectMongoClientWithEmptyUserCredentials() { - new SimpleMongoDbFactory(mock(MongoClient.class), "shangtai", new UserCredentials("", "")); - } - - @SuppressWarnings("deprecation") - private void rejectsDatabaseName(String databaseName) { - - try { - new SimpleMongoDbFactory(mongo, databaseName); - fail("Expected database name " + databaseName + " to be rejected!"); - } catch (IllegalArgumentException ex) { - - } - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java new file mode 100644 index 0000000000..3882acb4a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.AopProxyUtils; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Unit tests for {@link SimpleReactiveMongoDatabaseFactory}. + * + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class SimpleReactiveMongoDatabaseFactoryUnitTests { + + @Mock MongoClient mongoClient; + @Mock ClientSession clientSession; + @Mock MongoDatabase database; + + @Test // DATAMONGO-1880 + void cascadedWithSessionUsesRootFactory() { + + when(mongoClient.getDatabase("foo")).thenReturn(database); + + ReactiveMongoDatabaseFactory factory = new SimpleReactiveMongoDatabaseFactory(mongoClient, "foo"); + ReactiveMongoDatabaseFactory wrapped = factory.withSession(clientSession).withSession(clientSession); + + InvocationHandler invocationHandler = Proxy.getInvocationHandler(wrapped.getMongoDatabase().block()); + + Object singletonTarget = AopProxyUtils + .getSingletonTarget(ReflectionTestUtils.getField(invocationHandler, "advised")); + + assertThat(singletonTarget).isSameAs(database); + } + + @Test // DATAMONGO-1903 + void rejectsIllegalDatabaseNames() { + + rejectsDatabaseName("foo.bar"); + rejectsDatabaseName("foo$bar"); + rejectsDatabaseName("foo\\bar"); + rejectsDatabaseName("foo//bar"); + rejectsDatabaseName("foo bar"); + rejectsDatabaseName("foo\"bar"); + } + + private void rejectsDatabaseName(String databaseName) { + assertThatThrownBy(() -> new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName)) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java new file mode 100644 index 0000000000..45cb804eca --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java @@ -0,0 +1,105 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; + +/** + * A simple collection of grouped test entities used throughout the test suite. + * + * @author Christoph Strobl + */ +public class TestEntities { + + private static final GeoEntities GEO = new GeoEntities(); + + public static GeoEntities geolocation() { + return GEO; + } + + public static class GeoEntities { + + /** + *
                    +		 * X: -73.99408
                    +		 * Y: 40.75057
                    +		 * 
                    + * + * @return new {@link Venue} + */ + public Venue pennStation() { + return new Venue("Penn Station", -73.99408, 40.75057); + } + + /** + *
                    +		 * X: -73.99171
                    +		 * Y: 40.738868
                    +		 * 
                    + * + * @return new {@link Venue} + */ + + public Venue tenGenOffice() { + return new Venue("10gen Office", -73.99171, 40.738868); + } + + /** + *
                    +		 * X: -73.988135
                    +		 * Y: 40.741404
                    +		 * 
                    + * + * @return new {@link Venue} + */ + public Venue flatironBuilding() { + return new Venue("Flatiron Building", -73.988135, 40.741404); + } + + /** + *
                    +		 * X: -74.2713
                    +		 * Y: 40.73137
                    +		 * 
                    + * + * @return new {@link Venue} + */ + public Venue maplewoodNJ() { + return new Venue("Maplewood, NJ", -74.2713, 40.73137); + } + + public List newYork() { + + List venues = new ArrayList<>(); + + venues.add(pennStation()); + venues.add(tenGenOffice()); + venues.add(flatironBuilding()); + venues.add(new Venue("Players Club", -73.997812, 40.739128)); + venues.add(new Venue("City Bakery ", -73.992491, 40.738673)); + venues.add(new Venue("Splash Bar", -73.992491, 40.738673)); + venues.add(new Venue("Momofuku Milk Bar", -73.985839, 40.731698)); + venues.add(new Venue("Shake Shack", -73.98820, 40.74164)); + venues.add(new Venue("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue("Empire State Building", -73.98602, 40.74894)); + venues.add(new Venue("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); + venues.add(maplewoodNJ()); + + return venues; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java index 34ee5dcba6..216d4447d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java @@ -1,41 +1,75 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Set; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Primary; import org.springframework.core.convert.converter.Converter; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; -import org.springframework.data.mongodb.core.convert.CustomConversions; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; -public class TestMongoConfiguration extends AbstractMongoConfiguration { +public class TestMongoConfiguration extends AbstractMongoClientConfiguration { @Override public String getDatabaseName() { return "database"; } + @Primary + @Bean + @Override + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + return super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); + } + @Override @Bean - public Mongo mongo() throws Exception { - return new MongoClient("127.0.0.1", 27017); + public MongoClient mongoClient() { + return MongoTestUtils.client(); } @Override - public String getMappingBasePackage() { - return MongoMappingContext.class.getPackage().getName(); + protected Collection getMappingBasePackages() { + return Collections.singleton(MongoMappingContext.class.getPackage().getName()); } @Override - public CustomConversions customConversions() { + public MongoCustomConversions customConversions() { - List> converters = new ArrayList>(); + List> converters = new ArrayList<>(2); converters.add(new org.springframework.data.mongodb.core.PersonReadConverter()); converters.add(new org.springframework.data.mongodb.core.PersonWriteConverter()); - return new CustomConversions(converters); + return new MongoCustomConversions(converters); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java index 85bf0ec840..d673c8ad95 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java @@ -1,60 +1,60 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -public class Trade { - - private String ticker; - - private long quantity; - - private double price; - - private String orderType; - - public String getOrderType() { - return orderType; - } - - public void setOrderType(String orderType) { - this.orderType = orderType; - } - - public double getPrice() { - return price; - } - - public void setPrice(double price) { - this.price = price; - } - - public long getQuantity() { - return quantity; - } - - public void setQuantity(long quantity) { - this.quantity = quantity; - } - - public String getTicker() { - return ticker; - } - - public void setTicker(String ticker) { - this.ticker = ticker; - } - -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +public class Trade { + + private String ticker; + + private long quantity; + + private double price; + + private String orderType; + + public String getOrderType() { + return orderType; + } + + public void setOrderType(String orderType) { + this.orderType = orderType; + } + + public double getPrice() { + return price; + } + + public void setPrice(double price) { + this.price = price; + } + + public long getQuantity() { + return quantity; + } + + public void setQuantity(long quantity) { + this.quantity = quantity; + } + + public String getTicker() { + return ticker; + } + + public void setTicker(String ticker) { + this.ticker = ticker; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java new file mode 100644 index 0000000000..8968f53a74 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java @@ -0,0 +1,107 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; +import java.util.function.UnaryOperator; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.annotation.Transactional; + +/** + * Helper class for integration tests of {@link Transactional#label()} MongoDb options in non-reactive context. + * + * @param root document type + * @author Yan Kardziyaka + * @see org.springframework.data.mongodb.ReactiveTransactionOptionsTestService + */ +public class TransactionOptionsTestService { + + private final Function findByIdFunction; + private final UnaryOperator saveFunction; + + public TransactionOptionsTestService(MongoOperations operations, Class entityClass) { + this.findByIdFunction = id -> operations.findById(id, entityClass); + this.saveFunction = operations::save; + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=-PT6H3M" }) + public T saveWithInvalidMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=PT1M" }) + public T saveWithinMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=available" }) + public T availableReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=invalid" }) + public T invalidReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=${tx.read.concern}" }) + public T environmentReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=majority" }) + public T majorityReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primaryPreferred" }) + public T findFromPrimaryPreferredReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=invalid" }) + public T findFromInvalidReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primary" }) + public T findFromPrimaryReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=unacknowledged" }) + public T unacknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=invalid" }) + public T invalidWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=acknowledged" }) + public T acknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDbObjectCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDbObjectCallbackUnitTests.java deleted file mode 100644 index 9e8dceb6a7..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDbObjectCallbackUnitTests.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate.UnwrapAndReadDbObjectCallback; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; - -import com.mongodb.BasicDBObject; - -/** - * Unit tests for {@link UnwrapAndReadDbObjectCallback}. - * - * @author Oliver Gierke - */ -@RunWith(MockitoJUnitRunner.class) -public class UnwrapAndReadDbObjectCallbackUnitTests { - - @Mock MongoDbFactory factory; - - UnwrapAndReadDbObjectCallback callback; - - @Before - public void setUp() { - - MongoTemplate template = new MongoTemplate(factory); - MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), - new MongoMappingContext()); - - this.callback = template.new UnwrapAndReadDbObjectCallback(converter, Target.class, "collection-1"); - } - - @Test - public void usesFirstLevelValues() { - - Target target = callback.doWith(new BasicDBObject("foo", "bar")); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("bar")); - } - - @Test - public void unwrapsUnderscoreIdIfBasicDBObject() { - - Target target = callback.doWith(new BasicDBObject("_id", new BasicDBObject("foo", "bar"))); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("bar")); - } - - @Test - public void firstLevelPropertiesTrumpNestedOnes() { - - Target target = callback.doWith(new BasicDBObject("_id", new BasicDBObject("foo", "bar")).append("foo", "foobar")); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("foobar")); - } - - @Test - public void keepsUnderscoreIdIfScalarValue() { - - Target target = callback.doWith(new BasicDBObject("_id", "bar").append("foo", "foo")); - - assertThat(target.id, is("bar")); - assertThat(target.foo, is("foo")); - } - - static class Target { - - String id; - String foo; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java new file mode 100644 index 0000000000..d4c2f37f63 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; + +import com.mongodb.MongoClientSettings; + +/** + * Unit test for {@link com.mongodb.internal.operation.UpdateOperation}. + * + * @author Christoph Strobl + */ +class UpdateOperationsUnitTests { + + static final Document SHARD_KEY = new Document("country", "AT").append("userid", "4230"); + static final Document SOURCE_DOC = appendShardKey(new Document("_id", "id-1")); + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + QueryMapper queryMapper = new QueryMapper(mongoConverter); + UpdateMapper updateMapper = new UpdateMapper(mongoConverter); + EntityOperations entityOperations = new EntityOperations(mongoConverter, this.queryMapper); + PropertyOperations propertyOperations = new PropertyOperations(mongoConverter.getMappingContext()); + + ExtendedQueryOperations queryOperations = new ExtendedQueryOperations(queryMapper, updateMapper, entityOperations, propertyOperations, + MongoClientSettings::getDefaultCodecRegistry); + + @Test // DATAMONGO-2341 + void appliesShardKeyToFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + assertThat(shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, null)) + .isEqualTo(appendShardKey(sourceFilter)); + } + + @Test + void applyShardKeyDoesNotAlterSourceFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, null); + assertThat(sourceFilter).isEqualTo(new Document("name", "kaladin")); + } + + @Test // DATAMONGO-2341 + void appliesExistingShardKeyToFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + Document existing = new Document("country", "GB").append("userid", "007"); + + assertThat(shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, existing)) + .isEqualTo(new Document(existing).append("name", "kaladin")); + } + + @Test // DATAMONGO-2341 + void recognizesExistingShardKeyInFilter() { + + Document sourceFilter = appendShardKey(new Document("name", "kaladin")); + + assertThat(queryOperations.replaceSingleContextFor(SOURCE_DOC).requiresShardKey(sourceFilter, + entityOf(ShardedEntityWithNonDefaultShardKey.class))).isFalse(); + } + + @Test // DATAMONGO-2341 + void recognizesIdPropertyAsShardKey() { + + Document sourceFilter = new Document("_id", "id-1"); + + assertThat(queryOperations.replaceSingleContextFor(SOURCE_DOC).requiresShardKey(sourceFilter, + entityOf(ShardedEntityWithDefaultShardKey.class))).isFalse(); + } + + @Test // DATAMONGO-2341 + void returnsMappedShardKey() { + + queryOperations.replaceSingleContextFor(SOURCE_DOC) + .getMappedShardKeyFields(entityOf(ShardedEntityWithDefaultShardKey.class)) + .containsAll(Arrays.asList("country", "userid")); + } + + @NonNull + private Document shardedFilter(Document sourceFilter, Class entity, Document existing) { + return queryOperations.replaceSingleContextFor(SOURCE_DOC).applyShardKey(entity, sourceFilter, existing); + } + + private static Document appendShardKey(Document source) { + + Document target = new Document(source); + target.putAll(SHARD_KEY); + return target; + } + + MongoPersistentEntity entityOf(Class type) { + return mappingContext.getPersistentEntity(type); + } + + class ExtendedQueryOperations extends QueryOperations { + + ExtendedQueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations, PropertyOperations propertyOperations, + CodecRegistryProvider codecRegistryProvider) { + super(queryMapper, updateMapper, entityOperations, propertyOperations, codecRegistryProvider); + } + + @NonNull + private ExtendedUpdateContext replaceSingleContextFor(Document source) { + return new ExtendedUpdateContext(MappedDocument.of(source), true); + } + + MongoPersistentEntity entityOf(Class type) { + return mappingContext.getPersistentEntity(type); + } + + class ExtendedUpdateContext extends UpdateContext { + + ExtendedUpdateContext(MappedDocument update, boolean upsert) { + super(update, upsert); + } + + Document applyShardKey(Class domainType, Document filter, @Nullable Document existing) { + return applyShardKey(entityOf(domainType), filter, existing); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java index 7e1b0cb521..25fbbbcb83 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java @@ -1,71 +1,73 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -public class User { - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((accountName == null) ? 0 : accountName.hashCode()); - result = prime * result + ((userName == null) ? 0 : userName.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - User other = (User) obj; - if (accountName == null) { - if (other.accountName != null) - return false; - } else if (!accountName.equals(other.accountName)) - return false; - if (userName == null) { - if (other.userName != null) - return false; - } else if (!userName.equals(other.userName)) - return false; - return true; - } - - private String accountName; - - private String userName; - - public String getAccountName() { - return accountName; - } - - public void setAccountName(String accountName) { - this.accountName = accountName; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +public class User { + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((accountName == null) ? 0 : accountName.hashCode()); + result = prime * result + ((userName == null) ? 0 : userName.hashCode()); + return result; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + User other = (User) obj; + if (accountName == null) { + if (other.accountName != null) + return false; + } else if (!accountName.equals(other.accountName)) + return false; + if (userName == null) { + if (other.userName != null) + return false; + } else if (!userName.equals(other.userName)) + return false; + return true; + } + + private String accountName; + + private String userName; + + public String getAccountName() { + return accountName; + } + + public void setAccountName(String accountName) { + this.accountName = accountName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java index 0759c0b181..09a0605ed7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java @@ -1,66 +1,66 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.Arrays; - -import org.joda.time.LocalDate; -import org.springframework.data.annotation.Id; -import org.springframework.data.annotation.PersistenceConstructor; -import org.springframework.data.mongodb.core.mapping.Document; - -@Document(collection = "newyork") -public class Venue { - - @Id private String id; - private String name; - private double[] location; - private LocalDate openingDate; - - @PersistenceConstructor - Venue(String name, double[] location) { - super(); - this.name = name; - this.location = location; - } - - public Venue(String name, double x, double y) { - super(); - this.name = name; - this.location = new double[] { x, y }; - } - - public String getName() { - return name; - } - - public double[] getLocation() { - return location; - } - - public LocalDate getOpeningDate() { - return openingDate; - } - - public void setOpeningDate(LocalDate openingDate) { - this.openingDate = openingDate; - } - - @Override - public String toString() { - return "Venue [id=" + id + ", name=" + name + ", location=" + Arrays.toString(location) + "]"; - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.Date; + +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.mongodb.core.mapping.Document; + +@Document("newyork") +public class Venue { + + @Id private String id; + private String name; + private double[] location; + private Date openingDate; + + @PersistenceConstructor + Venue(String name, double[] location) { + super(); + this.name = name; + this.location = location; + } + + public Venue(String name, double x, double y) { + super(); + this.name = name; + this.location = new double[] { x, y }; + } + + public String getName() { + return name; + } + + public double[] getLocation() { + return location; + } + + public Date getOpeningDate() { + return openingDate; + } + + public void setOpeningDate(Date openingDate) { + this.openingDate = openingDate; + } + + @Override + public String toString() { + return "Venue [id=" + id + ", name=" + name + ", location=" + Arrays.toString(location) + "]"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java new file mode 100644 index 0000000000..5b24d85c3b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -0,0 +1,173 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.*; + +import java.util.Arrays; +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * Unit tests for {@link AccumulatorOperators}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +class AccumulatorOperatorsUnitTests { + + @Test // GH-3712 + void rendersCovariancePopWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovariancePopWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithNumberOfHistoricDocuments() { + + assertThat(valueOf("price").expMovingAvg().historicalDocuments(2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", N: 2 } }")); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithAlpha() { + + assertThat(valueOf("price").expMovingAvg().alpha(0.75).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", alpha: 0.75 } }")); + } + + @Test // GH-4139 + void rendersMax() { + + assertThat(valueOf("price").max().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $max: \"$price\" }")); + } + + @Test // GH-4139 + void rendersMaxN() { + + assertThat(valueOf("price").max(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $maxN: { n: 3, input : \"$price\" } }")); + } + + @Test // GH-4139 + void rendersMin() { + + assertThat(valueOf("price").min().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $min: \"$price\" }")); + } + + @Test // GH-4139 + void rendersMinN() { + + assertThat(valueOf("price").min(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $minN: { n: 3, input : \"$price\" } }")); + } + + @Test // GH-4473 + void rendersPercentileWithFieldReference() { + + assertThat(valueOf("score").percentile(0.2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.2] } }")); + + assertThat(valueOf("score").percentile(0.3, 0.9).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }")); + + assertThat(valueOf("score").percentile(0.3, 0.9).and("scoreTwo").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\", p: [0.3, 0.9] } }")); + } + + @Test // GH-4473 + void rendersPercentileWithExpression() { + + assertThat(valueOf(Sum.sumOf("score")).percentile(0.1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: {\"$sum\": \"$score\"}, method: \"approximate\", p: [0.1] } }")); + + assertThat(valueOf("scoreOne").percentile(0.1, 0.2).and(Sum.sumOf("scoreTwo")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: [\"$scoreOne\", {\"$sum\": \"$scoreTwo\"}], method: \"approximate\", p: [0.1, 0.2] } }")); + } + + @Test // GH-4472 + void rendersMedianWithFieldReference() { + + assertThat(valueOf("score").median().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: \"$score\", method: \"approximate\" } }")); + + assertThat(valueOf("score").median().and("scoreTwo").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\" } }")); + } + + @Test // GH-4472 + void rendersMedianWithExpression() { + + assertThat(valueOf(Sum.sumOf("score")).median().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: {\"$sum\": \"$score\"}, method: \"approximate\" } }")); + + assertThat(valueOf("scoreOne").median().and(Sum.sumOf("scoreTwo")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: [\"$scoreOne\", {\"$sum\": \"$scoreTwo\"}], method: \"approximate\" } }")); + } + + @Test // GH-4472 + void rendersMedianCorrectlyWithTypedAggregationContext() { + + assertThat(valueOf("midichlorianCount").median() + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(Document.parse("{ $median: { input: \"$force\", method: \"approximate\" } }")); + } + + static class Jedi { + + String name; + + Date birthdate; + + @Field("force") Integer midichlorianCount; + + Integer balance; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java new file mode 100644 index 0000000000..32c6d43220 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java @@ -0,0 +1,161 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link AddFieldsOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class AddFieldsOperationUnitTests { + + @Test // DATAMONGO-2363 + void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new AddFieldsOperation(null, "value")); + } + + @Test // DATAMONGO-2363 + void rendersFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("name", "value").toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMultipleEntriesCorrectly() { + + assertThat(new AddFieldsOperation("name", "value").addField("field-2", "value2") + .toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"value\", \"field-2\":\"value2\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMappedFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("student", "value").toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersNestedMappedFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("scoresWithMappedField.student", "value") + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"scoresWithMappedField.student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("name", Fields.field("value")).toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"$value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("student", Fields.field("homework")) + .toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"student_name\":\"$home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersNestedMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("scoresWithMappedField.student", Fields.field("scoresWithMappedField.homework")) + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse( + "{\"$addFields\" : {\"scoresWithMappedField.student_name\":\"$scoresWithMappedField.home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void appliesSpelExpressionCorrectly() { + + AddFieldsOperation operation = AddFieldsOperation.builder().addField("totalHomework") + .withValueOfExpression("sum(homework) * [0]", 2) // + .build(); + + assertThat(operation.toPipelineStages(contextFor(ScoresWrapper.class))).contains( + Document.parse("{\"$addFields\" : {\"totalHomework\": { $multiply : [{ \"$sum\" : [\"$homework\"] }, 2] }}}")); + } + + @Test // DATAMONGO-2363 + void rendersTargetValueExpressionCorrectly() { + + assertThat(AddFieldsOperation.builder().addField("totalHomework") + .withValueOf(ArithmeticOperators.valueOf("homework").sum()).build().toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"totalHomework\": { \"$sum\" : \"$homework\" }}}")); + } + + @Test // DATAMONGO-2363 + void exposesFieldsCorrectly() { + + ExposedFields fields = AddFieldsOperation.builder().addField("totalHomework").withValue("A+") // + .addField("totalQuiz").withValue("B-") // + .addField("computed").withValueOfExpression("totalHomework").build().getFields(); + + assertThat(fields.getField("totalHomework")).isNotNull(); + assertThat(fields.getField("totalQuiz")).isNotNull(); + assertThat(fields.getField("computed")).isNotNull(); + assertThat(fields.getField("does-not-exist")).isNull(); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } + + static class Scores { + + String student; + List homework; + } + + static class ScoresWithMappedField { + + @Field("student_name") String student; + @Field("home_work") List homework; + } + + static class ScoresWrapper { + + Scores scores; + ScoresWithMappedField scoresWithMappedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java new file mode 100644 index 0000000000..5cc93ee5b9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java @@ -0,0 +1,160 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.mockito.Mockito.*; +import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.CustomConversions.StoreConversions; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; + +/** + * @author Christoph Strobl + */ +public class AggregationOperationRendererUnitTests { + + @Test // GH-4443 + void nonFieldsExposingAggregationOperationContinuesWithSameContextForNextStage() { + + AggregationOperationContext rootContext = mock(AggregationOperationContext.class); + AggregationOperation stage1 = mock(AggregationOperation.class); + AggregationOperation stage2 = mock(AggregationOperation.class); + + AggregationOperationRenderer.toDocument(List.of(stage1, stage2), rootContext); + + verify(stage1).toPipelineStages(eq(rootContext)); + verify(stage2).toPipelineStages(eq(rootContext)); + } + + @Test // GH-4722 + void contextShouldCarryOnRelaxedFieldMapping() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(TestRecord.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + + Aggregation agg = Aggregation.newAggregation(Aggregation.unwind("layerOne.layerTwo"), + project().and("layerOne.layerTwo.layerThree").as("layerOne.layerThree"), + sort(DESC, "layerOne.layerThree.fieldA")); + + AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + new RelaxedTypeBasedAggregationOperationContext(TestRecord.class, ctx, new QueryMapper(mongoConverter))); + } + + @Test // GH-4722 + void appliesConversionToValuesUsedInAggregation() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(TestRecord.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + mongoConverter.setCustomConversions(new CustomConversions(StoreConversions.NONE, + Set.copyOf(ConverterBuilder.writing(ZonedDateTime.class, String.class, ZonedDateTime::toString) + .andReading(it -> ZonedDateTime.parse(it)).getConverters()))); + mongoConverter.afterPropertiesSet(); + + var agg = Aggregation.newAggregation(Aggregation.sort(Direction.DESC, "version"), + Aggregation.group("entityId").first(Aggregation.ROOT).as("value"), Aggregation.replaceRoot("value"), + Aggregation.match(Criteria.where("createdDate").lt(ZonedDateTime.now())) // here is the problem + ); + + List document = AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + new RelaxedTypeBasedAggregationOperationContext(TestRecord.class, ctx, new QueryMapper(mongoConverter))); + Assertions.assertThat(document).last() + .extracting(it -> it.getEmbedded(List.of("$match", "createdDate", "$lt"), Object.class)) + .isInstanceOf(String.class); + } + + @ParameterizedTest // GH-4722 + @MethodSource("studentAggregationContexts") + void mapsOperationThatDoesNotExposeDedicatedFieldsCorrectly(AggregationOperationContext aggregationContext) { + + var agg = newAggregation(Student.class, Aggregation.unwind("grades"), Aggregation.replaceRoot("grades"), + Aggregation.project("grades")); + + List mappedPipeline = AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + aggregationContext); + + Assertions.assertThat(mappedPipeline).last().isEqualTo(Document.parse("{\"$project\": {\"grades\": 1}}")); + } + + private static Stream studentAggregationContexts() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(Student.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + mongoConverter.afterPropertiesSet(); + + QueryMapper queryMapper = new QueryMapper(mongoConverter); + + return Stream.of( + Arguments + .of(new TypeBasedAggregationOperationContext(Student.class, ctx, queryMapper, FieldLookupPolicy.strict())), + Arguments.of( + new TypeBasedAggregationOperationContext(Student.class, ctx, queryMapper, FieldLookupPolicy.relaxed()))); + } + + record TestRecord(@Id String field1, String field2, LayerOne layerOne) { + record LayerOne(List layerTwo) { + } + + record LayerTwo(LayerThree layerThree) { + } + + record LayerThree(int fieldA, int fieldB) { + } + } + + static class Student { + + @Field("mark") List grades; + + } + + static class Grade { + + int points; + String grades; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java index ea3f556d4b..ab65236f7c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,50 +15,106 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import org.junit.Before; -import org.junit.Test; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link AggregationOptions}. - * + * * @author Thomas Darimont + * @author Mark Paluch + * @author Christoph Strobl + * @author Yadhukrishna S Pai * @since 1.6 */ -public class AggregationOptionsTests { +class AggregationOptionsTests { + private final Document dummyHint = new Document("dummyField", 1); AggregationOptions aggregationOptions; - @Before - public void setup() { - aggregationOptions = newAggregationOptions().explain(true).cursor(new BasicDBObject("foo", 1)).allowDiskUse(true) + @BeforeEach + void setup() { + aggregationOptions = newAggregationOptions().explain(true) // + .cursorBatchSize(1) // + .allowDiskUse(true) // + .comment("hola") // + .hint(dummyHint) // .build(); + } + + @Test // DATAMONGO-960, DATAMONGO-1836 + void aggregationOptionsBuilderShouldSetOptionsAccordingly() { + + assertThat(aggregationOptions.isAllowDiskUse()).isTrue(); + assertThat(aggregationOptions.isExplain()).isTrue(); + assertThat(aggregationOptions.getCursor()).contains(new Document("batchSize", 1)); + assertThat(aggregationOptions.getHint()).contains(dummyHint); + assertThat(aggregationOptions.getHintObject()).contains(dummyHint); + } + + @Test // DATAMONGO-1637, DATAMONGO-2153, DATAMONGO-1836 + void shouldInitializeFromDocument() { + + Document document = new Document(); + document.put("cursor", new Document("batchSize", 1)); + document.put("explain", true); + document.put("allowDiskUse", true); + document.put("comment", "hola"); + document.put("hint", dummyHint); + + aggregationOptions = AggregationOptions.fromDocument(document); + + assertThat(aggregationOptions.isAllowDiskUse()).isTrue(); + assertThat(aggregationOptions.isExplain()).isTrue(); + assertThat(aggregationOptions.getCursor()).contains(new Document("batchSize", 1)); + assertThat(aggregationOptions.getCursorBatchSize()).isEqualTo(1); + assertThat(aggregationOptions.getComment()).contains("hola"); + assertThat(aggregationOptions.getHint()).contains(dummyHint); + assertThat(aggregationOptions.getHintObject()).contains(dummyHint); + } + @Test // GH-4664 + void omitsAllowDiskUseByDefault() { + + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + assertThat(aggregationOptions.isAllowDiskUse()).isFalse(); + assertThat(aggregationOptions.isAllowDiskUseSet()).isFalse(); + + assertThat(aggregationOptions.toDocument()).doesNotContainKey("allowDiskUse"); } - /** - * @see DATAMONGO-960 - */ - @Test - public void aggregationOptionsBuilderShouldSetOptionsAccordingly() { + @Test // GH-4664 + void applyOptionsDoesNotChangeAllowDiskUseDefault() { - assertThat(aggregationOptions.isAllowDiskUse(), is(true)); - assertThat(aggregationOptions.isExplain(), is(true)); - assertThat(aggregationOptions.getCursor(), is((DBObject) new BasicDBObject("foo", 1))); + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + Document empty = new Document(); + aggregationOptions.applyAndReturnPotentiallyChangedCommand(empty); + + assertThat(empty).doesNotContainKey("allowDiskUse"); } - /** - * @see DATAMONGO-960 - */ - @Test - public void aggregationOptionsToString() { - assertThat(aggregationOptions.toString(), - is("{ \"allowDiskUse\" : true , \"explain\" : true , \"cursor\" : { \"foo\" : 1}}")); + @Test // GH-4664 + void applyOptionsDoesNotChangeExistingAllowDiskUse() { + + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + Document existing = new Document("allowDiskUse", true); + aggregationOptions.applyAndReturnPotentiallyChangedCommand(existing); + + assertThat(existing).containsEntry("allowDiskUse", true); + } + + @Test // DATAMONGO-960, DATAMONGO-2153, DATAMONGO-1836 + void aggregationOptionsToString() { + + assertThat(aggregationOptions.toDocument()).isEqualTo(Document + .parse("{ " + "\"allowDiskUse\" : true , " + "\"explain\" : true , " + "\"cursor\" : { \"batchSize\" : 1}, " + + "\"comment\": \"hola\", " + "\"hint\" : { \"dummyField\" : 1}" + "}")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index 7a23b03782..99579b34a7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2016 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,182 +15,179 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; import static org.springframework.data.domain.Sort.Direction.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.io.BufferedInputStream; import java.text.ParseException; import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.Objects; import java.util.Scanner; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.LocalDateTime; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; +import java.util.stream.Stream; + +import org.assertj.core.data.Offset; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.io.ClassPathResource; -import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.geo.Box; import org.springframework.data.geo.Metrics; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.mongodb.core.CollectionCallback; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.BulkOperations; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry; +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; -import org.springframework.data.util.Version; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.CommandResult; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.MongoException; -import com.mongodb.util.JSON; +import com.mongodb.client.MongoCollection; /** - * Tests for {@link MongoTemplate#aggregate(String, AggregationPipeline, Class)}. - * - * @see DATAMONGO-586 + * Tests for {@link MongoTemplate#aggregate(Aggregation, Class, Class)}. + * * @author Tobias Trelle * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Nikolay Bogdanov + * @author Maninder Singh + * @author Sergey Shcherbakov + * @author Minsu Kim + * @author Sangyong Choi + * @author Julia Lee */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class AggregationTests { private static final String INPUT_COLLECTION = "aggregation_test_collection"; - private static final Logger LOGGER = LoggerFactory.getLogger(AggregationTests.class); - private static final Version TWO_DOT_FOUR = new Version(2, 4); - private static final Version TWO_DOT_SIX = new Version(2, 6); - private static final Version THREE_DOT_TWO = new Version(3, 2); private static boolean initialized = false; + private static List documents = parseDocuments(); - @Autowired MongoTemplate mongoTemplate; + @Template // + private static MongoTestTemplate mongoTemplate; - @Rule public ExpectedException exception = ExpectedException.none(); - private static Version mongoVersion; + @BeforeEach + void setUp() { - @Before - public void setUp() { - - queryMongoVersionIfNecessary(); cleanDb(); initSampleDataIfNecessary(); } - private void queryMongoVersionIfNecessary() { - - if (mongoVersion == null) { - CommandResult result = mongoTemplate.executeCommand("{ buildInfo: 1 }"); - mongoVersion = Version.parse(result.get("version").toString()); - } - } - - @After - public void cleanUp() { + @AfterEach + void cleanUp() { cleanDb(); } private void cleanDb() { + + mongoTemplate.flush(Product.class, UserWithLikes.class, DATAMONGO753.class, Data.class, DATAMONGO788.class, + User.class, Person.class, Reservation.class, Venue.class, MeterData.class, LineItem.class, InventoryItem.class, + Sales.class, Sales2.class, Employee.class, Art.class, Venue.class, Item.class); + mongoTemplate.dropCollection(INPUT_COLLECTION); - mongoTemplate.dropCollection(Product.class); - mongoTemplate.dropCollection(UserWithLikes.class); - mongoTemplate.dropCollection(DATAMONGO753.class); - mongoTemplate.dropCollection(Data.class); - mongoTemplate.dropCollection(DATAMONGO788.class); - mongoTemplate.dropCollection(User.class); - mongoTemplate.dropCollection(Person.class); - mongoTemplate.dropCollection(Reservation.class); - mongoTemplate.dropCollection(Venue.class); - mongoTemplate.dropCollection(MeterData.class); + mongoTemplate.dropCollection("personQueryTemp"); } /** - * Imports the sample dataset (zips.json) if necessary (e.g. if it doen't exist yet). The dataset can originally be + * Imports the sample dataset (zips.json) if necessary (e.g. if it doesn't exist yet). The dataset can originally be * found on the mongodb aggregation framework example website: - * - * @see http://docs.mongodb.org/manual/tutorial/aggregation-examples/. + * + * @see MongoDB Aggregation Examples */ private void initSampleDataIfNecessary() { if (!initialized) { - LOGGER.debug("Server uses MongoDB Version: {}", mongoVersion); - mongoTemplate.dropCollection(ZipInfo.class); - mongoTemplate.execute(ZipInfo.class, new CollectionCallback() { - - @Override - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { - - Scanner scanner = null; - try { - scanner = new Scanner(new BufferedInputStream(new ClassPathResource("zips.json").getInputStream())); - while (scanner.hasNextLine()) { - String zipInfoRecord = scanner.nextLine(); - collection.save((DBObject) JSON.parse(zipInfoRecord)); - } - } catch (Exception e) { - if (scanner != null) { - scanner.close(); - } - throw new RuntimeException("Could not load mongodb sample dataset!", e); - } - - return null; - } - }); + + mongoTemplate.bulkOps(BulkOperations.BulkMode.UNORDERED, ZipInfo.class).insert(documents).execute(); long count = mongoTemplate.count(new Query(), ZipInfo.class); - assertThat(count, is(29467L)); + assertThat(count).isEqualTo(29467L); initialized = true; } } - @Test(expected = IllegalArgumentException.class) - public void shouldHandleMissingInputCollection() { - mongoTemplate.aggregate(newAggregation(), (String) null, TagCount.class); + static List parseDocuments() { + + Scanner scanner = null; + List documents = new ArrayList<>(30000); + + try { + scanner = new Scanner(new BufferedInputStream(new ClassPathResource("zips.json").getInputStream())); + while (scanner.hasNextLine()) { + String zipInfoRecord = scanner.nextLine(); + documents.add(Document.parse(zipInfoRecord)); + } + } catch (Exception e) { + if (scanner != null) { + scanner.close(); + } + throw new RuntimeException("Could not load mongodb sample dataset", e); + } + + return documents; + } + + @Test // DATAMONGO-586 + void shouldHandleMissingInputCollection() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(newAggregation(), (String) null, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) - public void shouldHandleMissingAggregationPipeline() { - mongoTemplate.aggregate(null, INPUT_COLLECTION, TagCount.class); + @Test // DATAMONGO-586 + void shouldHandleMissingAggregationPipeline() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(null, INPUT_COLLECTION, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) - public void shouldHandleMissingEntityClass() { - mongoTemplate.aggregate(newAggregation(), INPUT_COLLECTION, null); + @Test // DATAMONGO-586 + void shouldHandleMissingEntityClass() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(newAggregation(), INPUT_COLLECTION, null)); } - @Test - public void shouldAggregate() { + @Test // DATAMONGO-586 + void shouldAggregate() { createTagDocuments(); @@ -206,20 +203,48 @@ public void shouldAggregate() { AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(3)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); assertTagCount("spring", 3, tagCount.get(0)); assertTagCount("mongodb", 2, tagCount.get(1)); assertTagCount("nosql", 1, tagCount.get(2)); } - @Test - public void shouldAggregateEmptyCollection() { + @Test // DATAMONGO-1637 + void shouldAggregateAndStream() { + + createTagDocuments(); + + Aggregation agg = newAggregation( // + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ).withOptions(new AggregationOptions(true, false, 1)); + + try (Stream stream = mongoTemplate.aggregateStream(agg, INPUT_COLLECTION, TagCount.class)) { + + List tagCount = stream.toList(); + + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); + + assertTagCount("spring", 3, tagCount.get(0)); + assertTagCount("mongodb", 2, tagCount.get(1)); + assertTagCount("nosql", 1, tagCount.get(2)); + } + } + + @Test // DATAMONGO-586 + void shouldAggregateEmptyCollection() { Aggregation aggregation = newAggregation(// project("tags"), // @@ -233,16 +258,90 @@ public void shouldAggregateEmptyCollection() { AggregationResults results = mongoTemplate.aggregate(aggregation, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); + + List tagCount = results.getMappedResults(); + + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(0); + } + + @Test // DATAMONGO-1637 + void shouldAggregateEmptyCollectionAndStream() { + + Aggregation aggregation = newAggregation(// + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ); + + try (Stream stream = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class)) { + + List tagCount = stream.toList(); + + assertThat(tagCount.size()).isEqualTo(0); + } + } + + @Test // DATAMONGO-1391 + void shouldUnwindWithIndex() { + + MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); + + coll.insertOne(createDocument("Doc1", "spring", "mongodb", "nosql")); + coll.insertOne(createDocument("Doc2")); + + Aggregation agg = newAggregation( // + project("tags"), // + unwind("tags", "n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ); + + AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); + + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(0)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); + } + + @Test // DATAMONGO-1391 + @EnableIfMongoServerVersion(isLessThan = "6.0") // $sort does not seem to have an effect on $unwind + void shouldUnwindPreserveEmpty() { + + MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); + + coll.insertOne(createDocument("Doc1", "spring", "mongodb", "nosql")); + coll.insertOne(createDocument("Doc2")); + + Aggregation agg = newAggregation( // + project("tags"), // + unwind("tags", "n", true), // + sort(DESC, "n") // + ); + + AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, Document.class); + + assertThat(results).isNotNull(); + + List tagCount = results.getMappedResults(); + + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(4); + assertThat(tagCount.get(0)).containsEntry("n", 2L); + assertThat(tagCount.get(3)).containsEntry("n", null); } - @Test - public void shouldDetectResultMismatch() { + @Test // DATAMONGO-586 + void shouldDetectResultMismatch() { createTagDocuments(); @@ -256,21 +355,44 @@ public void shouldDetectResultMismatch() { AggregationResults results = mongoTemplate.aggregate(aggregation, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(2)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(2); assertTagCount(null, 0, tagCount.get(0)); assertTagCount(null, 0, tagCount.get(1)); } - @Test - public void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { + @Test // DATAMONGO-1637 + void shouldDetectResultMismatchWhileStreaming() { + + createTagDocuments(); + + Aggregation aggregation = newAggregation( // + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("count"), // count field not present + limit(2) // + ); + + try (Stream stream = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class)) { + + List tagCount = stream.toList(); + + assertThat(tagCount.size()).isEqualTo(2); + assertTagCount(null, 0, tagCount.get(0)); + assertTagCount(null, 0, tagCount.get(1)); + } + } + + @Test // DATAMONGO-586 + void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { /* - //complex mongodb aggregation framework example from http://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state - db.zipInfo.aggregate( + //complex mongodb aggregation framework example from https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state + db.zipInfo.aggregate( { $group: { _id: { @@ -343,52 +465,52 @@ public void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { sort(ASC, "state") // ); - assertThat(aggregation, is(notNullValue())); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(51)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(51); ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); - assertThat(firstZipInfoStats, is(notNullValue())); - assertThat(firstZipInfoStats.id, is(nullValue())); - assertThat(firstZipInfoStats.state, is("AK")); - assertThat(firstZipInfoStats.smallestCity, is(notNullValue())); - assertThat(firstZipInfoStats.smallestCity.name, is("CHEVAK")); - assertThat(firstZipInfoStats.smallestCity.population, is(0)); - assertThat(firstZipInfoStats.biggestCity, is(notNullValue())); - assertThat(firstZipInfoStats.biggestCity.name, is("ANCHORAGE")); - assertThat(firstZipInfoStats.biggestCity.population, is(183987)); + assertThat(firstZipInfoStats).isNotNull(); + assertThat(firstZipInfoStats.id).isNull(); + assertThat(firstZipInfoStats.state).isEqualTo("AK"); + assertThat(firstZipInfoStats.smallestCity).isNotNull(); + assertThat(firstZipInfoStats.smallestCity.name).isEqualTo("CHEVAK"); + assertThat(firstZipInfoStats.smallestCity.population).isEqualTo(0); + assertThat(firstZipInfoStats.biggestCity).isNotNull(); + assertThat(firstZipInfoStats.biggestCity.name).isEqualTo("ANCHORAGE"); + assertThat(firstZipInfoStats.biggestCity.population).isEqualTo(183987); ZipInfoStats lastZipInfoStats = result.getMappedResults().get(50); - assertThat(lastZipInfoStats, is(notNullValue())); - assertThat(lastZipInfoStats.id, is(nullValue())); - assertThat(lastZipInfoStats.state, is("WY")); - assertThat(lastZipInfoStats.smallestCity, is(notNullValue())); - assertThat(lastZipInfoStats.smallestCity.name, is("LOST SPRINGS")); - assertThat(lastZipInfoStats.smallestCity.population, is(6)); - assertThat(lastZipInfoStats.biggestCity, is(notNullValue())); - assertThat(lastZipInfoStats.biggestCity.name, is("CHEYENNE")); - assertThat(lastZipInfoStats.biggestCity.population, is(70185)); - } - - @Test - public void findStatesWithPopulationOver10MillionAggregationExample() { + assertThat(lastZipInfoStats).isNotNull(); + assertThat(lastZipInfoStats.id).isNull(); + assertThat(lastZipInfoStats.state).isEqualTo("WY"); + assertThat(lastZipInfoStats.smallestCity).isNotNull(); + assertThat(lastZipInfoStats.smallestCity.name).isEqualTo("LOST SPRINGS"); + assertThat(lastZipInfoStats.smallestCity.population).isEqualTo(6); + assertThat(lastZipInfoStats.biggestCity).isNotNull(); + assertThat(lastZipInfoStats.biggestCity.name).isEqualTo("CHEYENNE"); + assertThat(lastZipInfoStats.biggestCity.population).isEqualTo(70185); + } + + @Test // DATAMONGO-586 + void findStatesWithPopulationOver10MillionAggregationExample() { /* - //complex mongodb aggregation framework example from - http://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state - - db.zipcodes.aggregate( + //complex mongodb aggregation framework example from + https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state + + db.zipcodes.aggregate( { $group: { _id:"$state", totalPop:{ $sum:"$pop"} } }, - { - $sort: { _id: 1, "totalPop": 1 } + { + $sort: { _id: 1, "totalPop": 1 } }, { $match: { @@ -405,38 +527,317 @@ public void findStatesWithPopulationOver10MillionAggregationExample() { match(where("totalPop").gte(10 * 1000 * 1000)) // ); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(7)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(7); StateStats stateStats = result.getMappedResults().get(0); - assertThat(stateStats, is(notNullValue())); - assertThat(stateStats.id, is("CA")); - assertThat(stateStats.state, is(nullValue())); - assertThat(stateStats.totalPopulation, is(29760021)); + assertThat(stateStats).isNotNull(); + assertThat(stateStats.id).isEqualTo("CA"); + assertThat(stateStats.state).isNull(); + assertThat(stateStats.totalPopulation).isEqualTo(29760021); + } + + /** + * @see MongoDB Aggregation + * Framework: $cond + */ + @Test // DATAMONGO-861 + void aggregationUsingConditionalProjectionToCalculateDiscount() { + + /* + db.inventory.aggregate( + [ + { + $project: + { + item: 1, + discount: + { + $cond: { if: { $gte: [ "$qty", 250 ] }, then: 30, else: 20 } + } + } + } + ] + ) + */ + + mongoTemplate.insert(new InventoryItem(1, "abc1", 300)); + mongoTemplate.insert(new InventoryItem(2, "abc2", 200)); + mongoTemplate.insert(new InventoryItem(3, "xyz1", 250)); + + TypedAggregation aggregation = newAggregation(InventoryItem.class, // + project("item") // + .and("discount")// + .applyCondition(ConditionalOperators.Cond.newBuilder().when(Criteria.where("qty").gte(250)) // + .then(30) // + .otherwise(20))); + + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(3); + + Document first = result.getMappedResults().get(0); + assertThat(first.get("_id")).isEqualTo((Object) 1); + assertThat(first.get("discount")).isEqualTo((Object) 30); + + Document second = result.getMappedResults().get(1); + assertThat(second.get("_id")).isEqualTo((Object) 2); + assertThat(second.get("discount")).isEqualTo((Object) 20); + + Document third = result.getMappedResults().get(2); + assertThat(third.get("_id")).isEqualTo((Object) 3); + assertThat(third.get("discount")).isEqualTo((Object) 30); + } + + /** + * @see MongoDB Aggregation + * Framework: $ifNull + */ + @Test // DATAMONGO-861 + void aggregationUsingIfNullToProjectSaneDefaults() { + + /* + db.inventory.aggregate( + [ + { + $project: { + item: 1, + description: { $ifNull: [ "$description", "Unspecified" ] } + } + } + ] + ) + */ + + mongoTemplate.insert(new InventoryItem(1, "abc1", "product 1", 300)); + mongoTemplate.insert(new InventoryItem(2, "abc2", 200)); + mongoTemplate.insert(new InventoryItem(3, "xyz1", 250)); + + TypedAggregation aggregation = newAggregation(InventoryItem.class, // + project("item") // + .and(ConditionalOperators.ifNull("description").then("Unspecified")) // + .as("description")// + ); + + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(3); + + Document first = result.getMappedResults().get(0); + assertThat(first.get("_id")).isEqualTo((Object) 1); + assertThat(first.get("description")).isEqualTo((Object) "product 1"); + + Document second = result.getMappedResults().get(1); + assertThat(second.get("_id")).isEqualTo((Object) 2); + assertThat(second.get("description")).isEqualTo((Object) "Unspecified"); + } + + @Test // DATAMONGO-861 + void aggregationUsingConditionalProjection() { + + TypedAggregation aggregation = newAggregation(ZipInfo.class, // + project() // + .and("largePopulation")// + .applyCondition(ConditionalOperators.when(Criteria.where("population").gte(20000)) // + .then(true) // + .otherwise(false)) // + .and("population").as("population")); + + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(29467); + + Document firstZipInfoStats = result.getMappedResults().get(0); + assertThat(firstZipInfoStats.get("largePopulation")).isEqualTo((Object) false); + assertThat(firstZipInfoStats.get("population")).isEqualTo((Object) 6055); + } + + @Test // DATAMONGO-861 + void aggregationUsingNestedConditionalProjection() { + + TypedAggregation aggregation = newAggregation(ZipInfo.class, // + project() // + .and("size")// + .applyCondition(ConditionalOperators.when(Criteria.where("population").gte(20000)) // + .then( + ConditionalOperators.when(Criteria.where("population").gte(200000)).then("huge").otherwise("small")) // + .otherwise("small")) // + .and("population").as("population")); + + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(29467); + + Document firstZipInfoStats = result.getMappedResults().get(0); + assertThat(firstZipInfoStats.get("size")).isEqualTo((Object) "small"); + assertThat(firstZipInfoStats.get("population")).isEqualTo((Object) 6055); + } + + @Test // DATAMONGO-861 + void aggregationUsingIfNullProjection() { + + mongoTemplate.insert(new LineItem("id", "caption", 0)); + mongoTemplate.insert(new LineItem("idonly", null, 0)); + + TypedAggregation aggregation = newAggregation(LineItem.class, // + project("id") // + .and("caption")// + .applyCondition(ConditionalOperators.ifNull("caption").then("unknown")), + sort(ASC, "id")); + + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(2); + + Document id = result.getMappedResults().get(0); + assertThat((String) id.get("caption")).isEqualTo("caption"); + + Document idonly = result.getMappedResults().get(1); + assertThat((String) idonly.get("caption")).isEqualTo("unknown"); + } + + @Test // DATAMONGO-861 + void aggregationUsingIfNullReplaceWithFieldReferenceProjection() { + + mongoTemplate.insert(new LineItem("id", "caption", 0)); + mongoTemplate.insert(new LineItem("idonly", null, 0)); + + TypedAggregation aggregation = newAggregation(LineItem.class, // + project("id") // + .and("caption")// + .applyCondition(ConditionalOperators.ifNull("caption").thenValueOf("id")), + sort(ASC, "id")); + + assertThat(aggregation.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(2); + + Document id = result.getMappedResults().get(0); + assertThat((String) id.get("caption")).isEqualTo("caption"); + + Document idonly = result.getMappedResults().get(1); + assertThat((String) idonly.get("caption")).isEqualTo("idonly"); + } + + @Test // DATAMONGO-861 + void shouldAllowGroupingUsingConditionalExpressions() { + + mongoTemplate.dropCollection(CarPerson.class); + + CarPerson person1 = new CarPerson("first1", "last1", new CarDescriptor.Entry("MAKE1", "MODEL1", 2000), + new CarDescriptor.Entry("MAKE1", "MODEL2", 2001)); + + CarPerson person2 = new CarPerson("first2", "last2", new CarDescriptor.Entry("MAKE3", "MODEL4", 2014)); + CarPerson person3 = new CarPerson("first3", "last3", new CarDescriptor.Entry("MAKE2", "MODEL5", 2015)); + + mongoTemplate.save(person1); + mongoTemplate.save(person2); + mongoTemplate.save(person3); + + TypedAggregation agg = Aggregation.newAggregation(CarPerson.class, + unwind("descriptors.carDescriptor.entries"), // + project() // + .and(ConditionalOperators // + .when(Criteria.where("descriptors.carDescriptor.entries.make").is("MAKE1")).then("good") + .otherwise("meh")) + .as("make") // + .and("descriptors.carDescriptor.entries.model").as("model") // + .and("descriptors.carDescriptor.entries.year").as("year"), // + group("make").avg(ConditionalOperators // + .when(Criteria.where("year").gte(2012)) // + .then(1) // + .otherwise(9000)) // + .as("score"), + sort(ASC, "score")); + + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + + assertThat(result.getMappedResults()).hasSize(2); + + Document meh = result.getMappedResults().get(0); + assertThat((String) meh.get("_id")).isEqualTo("meh"); + assertThat(((Number) meh.get("score")).longValue()).isEqualTo(1L); + + Document good = result.getMappedResults().get(1); + assertThat((String) good.get("_id")).isEqualTo("good"); + assertThat(((Number) good.get("score")).longValue()).isEqualTo(9000L); + } + + @Test // DATAMONGO-1784, DATAMONGO-2264 + void shouldAllowSumUsingConditionalExpressions() { + + mongoTemplate.dropCollection(CarPerson.class); + + CarPerson person1 = new CarPerson("first1", "last1", new CarDescriptor.Entry("MAKE1", "MODEL1", 2000), + new CarDescriptor.Entry("MAKE1", "MODEL2", 2001)); + + CarPerson person2 = new CarPerson("first2", "last2", new CarDescriptor.Entry("MAKE3", "MODEL4", 2014)); + CarPerson person3 = new CarPerson("first3", "last3", new CarDescriptor.Entry("MAKE2", "MODEL5", 2015)); + + mongoTemplate.save(person1); + mongoTemplate.save(person2); + mongoTemplate.save(person3); + + TypedAggregation agg = Aggregation.newAggregation(CarPerson.class, + unwind("descriptors.carDescriptor.entries"), // + project() // + .and(ConditionalOperators // + .when(Criteria.where("descriptors.carDescriptor.entries.make").is("MAKE1")).then("good") + .otherwise("meh")) + .as("make") // + .and("descriptors.carDescriptor.entries.model").as("model") // + .and("descriptors.carDescriptor.entries.year").as("year"), // + group("make").sum(ConditionalOperators // + .when(Criteria.where("year").gte(2012)) // + .then(1) // + .otherwise(9000)).as("score"), + sort(ASC, "score")); + + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + + assertThat(result.getMappedResults()).hasSize(2); + + Document meh = result.getMappedResults().get(0); + assertThat(meh.get("_id")).isEqualTo("meh"); + assertThat(((Number) meh.get("score")).longValue()).isEqualTo(2L); + + Document good = result.getMappedResults().get(1); + assertThat(good.get("_id")).isEqualTo("good"); + assertThat(((Number) good.get("score")).longValue()).isEqualTo(18000L); } /** - * @see http://docs.mongodb.org/manual/tutorial/aggregation-examples/#return-the-five-most-common-likes + * @see Return + * the Five Most Common “Likes” */ - @Test - public void returnFiveMostCommonLikesAggregationFrameworkExample() { + @Test // DATAMONGO-586 + void returnFiveMostCommonLikesAggregationFrameworkExample() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation(); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(5)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(5); assertLikeStats(result.getMappedResults().get(0), "a", 4); assertLikeStats(result.getMappedResults().get(1), "b", 2); @@ -445,7 +846,7 @@ public void returnFiveMostCommonLikesAggregationFrameworkExample() { assertLikeStats(result.getMappedResults().get(4), "e", 3); } - protected TypedAggregation createUsersWithCommonLikesAggregation() { + TypedAggregation createUsersWithCommonLikesAggregation() { return newAggregation(UserWithLikes.class, // unwind("likes"), // group("likes").count().as("number"), // @@ -455,8 +856,8 @@ protected TypedAggregation createUsersWithCommonLikesAggregation( ); } - @Test - public void arithmenticOperatorsInProjectionExample() { + @Test // DATAMONGO-586 + void arithmenticOperatorsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -475,32 +876,31 @@ public void arithmenticOperatorsInProjectionExample() { .and("spaceUnits").mod("spaceUnits").as("spaceUnitsModSpaceUnits") // ); - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); - List resultList = result.getMappedResults(); - - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((Double) resultList.get(0).get("netPricePlus1"), is(product.netPrice + 1)); - assertThat((Double) resultList.get(0).get("netPriceMinus1"), is(product.netPrice - 1)); - assertThat((Double) resultList.get(0).get("netPriceMul2"), is(product.netPrice * 2)); - assertThat((Double) resultList.get(0).get("netPriceDiv119"), is(product.netPrice / 1.19)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMod2"), is(product.spaceUnits % 2)); - assertThat((Integer) resultList.get(0).get("spaceUnitsPlusSpaceUnits"), is(product.spaceUnits + product.spaceUnits)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMinusSpaceUnits"), - is(product.spaceUnits - product.spaceUnits)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMultiplySpaceUnits"), is(product.spaceUnits - * product.spaceUnits)); - assertThat((Double) resultList.get(0).get("spaceUnitsDivideSpaceUnits"), - is((double) (product.spaceUnits / product.spaceUnits))); - assertThat((Integer) resultList.get(0).get("spaceUnitsModSpaceUnits"), is(product.spaceUnits % product.spaceUnits)); + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + List resultList = result.getMappedResults(); + + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((Double) resultList.get(0).get("netPricePlus1")).isEqualTo(product.netPrice + 1); + assertThat((Double) resultList.get(0).get("netPriceMinus1")).isEqualTo(product.netPrice - 1); + assertThat((Double) resultList.get(0).get("netPriceMul2")).isEqualTo(product.netPrice * 2); + assertThat((Double) resultList.get(0).get("netPriceDiv119")).isEqualTo(product.netPrice / 1.19); + assertThat((Integer) resultList.get(0).get("spaceUnitsMod2")).isEqualTo(product.spaceUnits % 2); + assertThat((Integer) resultList.get(0).get("spaceUnitsPlusSpaceUnits")) + .isEqualTo(product.spaceUnits + product.spaceUnits); + assertThat((Integer) resultList.get(0).get("spaceUnitsMinusSpaceUnits")) + .isEqualTo(product.spaceUnits - product.spaceUnits); + assertThat((Integer) resultList.get(0).get("spaceUnitsMultiplySpaceUnits")) + .isEqualTo(product.spaceUnits * product.spaceUnits); + assertThat((Double) resultList.get(0).get("spaceUnitsDivideSpaceUnits")) + .isEqualTo((double) (product.spaceUnits / product.spaceUnits)); + assertThat((Integer) resultList.get(0).get("spaceUnitsModSpaceUnits")) + .isEqualTo(product.spaceUnits % product.spaceUnits); } - /** - * @see DATAMONGO-774 - */ - @Test - public void expressionsInProjectionExample() { + @Test // DATAMONGO-774 + void expressionsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -516,28 +916,23 @@ public void expressionsInProjectionExample() { ); - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); - List resultList = result.getMappedResults(); - - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((Double) resultList.get(0).get("netPricePlus1"), is(product.netPrice + 1)); - assertThat((Double) resultList.get(0).get("netPriceMinus1"), is(product.netPrice - 1)); - assertThat((Double) resultList.get(0).get("netPriceDiv2"), is(product.netPrice / 2)); - assertThat((Double) resultList.get(0).get("grossPrice"), is(product.netPrice * 1.19)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMod2"), is(product.spaceUnits % 2)); - assertThat((Double) resultList.get(0).get("grossPriceIncludingDiscountAndCharge"), - is((product.netPrice * 0.8 + 1.2) * 1.19)); + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + List resultList = result.getMappedResults(); + + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((Double) resultList.get(0).get("netPricePlus1")).isEqualTo(product.netPrice + 1); + assertThat((Double) resultList.get(0).get("netPriceMinus1")).isEqualTo(product.netPrice - 1); + assertThat((Double) resultList.get(0).get("netPriceDiv2")).isEqualTo(product.netPrice / 2); + assertThat((Double) resultList.get(0).get("grossPrice")).isEqualTo(product.netPrice * 1.19); + assertThat((Integer) resultList.get(0).get("spaceUnitsMod2")).isEqualTo(product.spaceUnits % 2); + assertThat((Double) resultList.get(0).get("grossPriceIncludingDiscountAndCharge")) + .isEqualTo((product.netPrice * 0.8 + 1.2) * 1.19); } - /** - * @see DATAMONGO-774 - */ - @Test - public void stringExpressionsInProjectionExample() { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_FOUR)); + @Test // DATAMONGO-774 + void stringExpressionsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -547,20 +942,17 @@ public void stringExpressionsInProjectionExample() { .andExpression("concat(name, '_bubu')").as("name_bubu") // ); - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); - List resultList = result.getMappedResults(); + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + List resultList = result.getMappedResults(); - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((String) resultList.get(0).get("name_bubu"), is(product.name + "_bubu")); + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((String) resultList.get(0).get("name_bubu")).isEqualTo(product.name + "_bubu"); } - /** - * @see DATAMONGO-774 - */ - @Test - public void expressionsInProjectionExampleShowcase() { + @Test // DATAMONGO-774 + void expressionsInProjectionExampleShowcase() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -572,42 +964,24 @@ public void expressionsInProjectionExampleShowcase() { .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") // ); - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); - List resultList = result.getMappedResults(); - - assertThat(resultList, is(notNullValue())); - DBObject firstItem = resultList.get(0); - assertThat((String) firstItem.get("_id"), is(product.id)); - assertThat((String) firstItem.get("name"), is(product.name)); - assertThat((Double) firstItem.get("salesPrice"), is((product.netPrice * (1 - product.discountRate) + shippingCosts) - * (1 + product.taxRate))); - } - - @Test - public void shouldThrowExceptionIfUnknownFieldIsReferencedInArithmenticExpressionsInProjection() { - - exception.expect(MappingException.class); - exception.expectMessage("unknown"); - - Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); - mongoTemplate.insert(product); - - TypedAggregation agg = newAggregation(Product.class, // - project("name", "netPrice") // - .andExpression("unknown + 1").as("netPricePlus1") // - ); + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + List resultList = result.getMappedResults(); - mongoTemplate.aggregate(agg, DBObject.class); + assertThat(resultList).isNotNull(); + Document firstItem = resultList.get(0); + assertThat((String) firstItem.get("_id")).isEqualTo(product.id); + assertThat((String) firstItem.get("name")).isEqualTo(product.name); + assertThat((Double) firstItem.get("salesPrice")) + .isEqualTo((product.netPrice * (1 - product.discountRate) + shippingCosts) * (1 + product.taxRate)); } /** - * @see DATAMONGO-753 - * @see http - * ://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group - * -operati + * @see Spring + * Data MongoDB - Aggregation Framework - invalid reference in group Operation */ - @Test - public void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { + @Test // DATAMONGO-753 + void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1))); mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1))); @@ -616,28 +990,28 @@ public void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { unwind("pd"), // group("pd.pDch") // the nested field expression .sum("pd.up").as("uplift"), // - project("_id", "uplift")); - - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); - List stats = result.getMappedResults(); - - assertThat(stats.size(), is(3)); - assertThat(stats.get(0).get("_id").toString(), is("C")); - assertThat((Integer) stats.get(0).get("uplift"), is(2)); - assertThat(stats.get(1).get("_id").toString(), is("B")); - assertThat((Integer) stats.get(1).get("uplift"), is(3)); - assertThat(stats.get(2).get("_id").toString(), is("A")); - assertThat((Integer) stats.get(2).get("uplift"), is(1)); + project("_id", "uplift"), // + sort(Sort.by("uplift"))); + + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + List stats = result.getMappedResults(); + + assertThat(stats.size()).isEqualTo(3); + assertThat(stats.get(0).get("_id").toString()).isEqualTo("A"); + assertThat((Integer) stats.get(0).get("uplift")).isEqualTo(1); + assertThat(stats.get(1).get("_id").toString()).isEqualTo("C"); + assertThat((Integer) stats.get(1).get("uplift")).isEqualTo(2); + assertThat(stats.get(2).get("_id").toString()).isEqualTo("B"); + assertThat((Integer) stats.get(2).get("uplift")).isEqualTo(3); } /** - * @see DATAMONGO-753 - * @see http - * ://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group - * -operati + * @see Spring + * Data MongoDB - Aggregation Framework - invalid reference in group Operation */ - @Test - public void aliasesNestedFieldInProjectionImmediately() { + @Test // DATAMONGO-753 + void aliasesNestedFieldInProjectionImmediately() { mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1))); mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1))); @@ -646,22 +1020,17 @@ public void aliasesNestedFieldInProjectionImmediately() { unwind("pd"), // project().and("pd.up").as("up")); - AggregationResults results = mongoTemplate.aggregate(agg, DBObject.class); - List mappedResults = results.getMappedResults(); + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); + List mappedResults = results.getMappedResults(); - assertThat(mappedResults, hasSize(6)); - for (DBObject element : mappedResults) { - assertThat(element.get("up"), is((Object) 1)); + assertThat(mappedResults).hasSize(6); + for (Document element : mappedResults) { + assertThat(element.get("up")).isEqualTo((Object) 1); } } - /** - * @DATAMONGO-774 - */ - @Test - public void shouldPerformDateProjectionOperatorsCorrectly() throws ParseException { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_FOUR)); + @Test // DATAMONGO-774 + void shouldPerformDateProjectionOperatorsCorrectly() throws ParseException { Data data = new Data(); data.stringValue = "ABC"; @@ -675,24 +1044,19 @@ public void shouldPerformDateProjectionOperatorsCorrectly() throws ParseExceptio .andExpression("toUpper(toLower(stringValue))").as("toUpper") // ); - AggregationResults results = mongoTemplate.aggregate(agg, DBObject.class); - DBObject dbo = results.getUniqueMappedResult(); + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); + Document document = results.getUniqueMappedResult(); - assertThat(dbo, is(notNullValue())); - assertThat((String) dbo.get("concat"), is("ABCDE")); - assertThat((Integer) dbo.get("strcasecmp"), is(-1)); - assertThat((String) dbo.get("substr"), is("B")); - assertThat((String) dbo.get("toLower"), is("abc")); - assertThat((String) dbo.get("toUpper"), is("ABC")); + assertThat(document).isNotNull(); + assertThat((String) document.get("concat")).isEqualTo("ABCDE"); + assertThat((Integer) document.get("strcasecmp")).isEqualTo(-1); + assertThat((String) document.get("substr")).isEqualTo("B"); + assertThat((String) document.get("toLower")).isEqualTo("abc"); + assertThat((String) document.get("toUpper")).isEqualTo("ABC"); } - /** - * @DATAMONGO-774 - */ - @Test - public void shouldPerformStringProjectionOperatorsCorrectly() throws ParseException { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_FOUR)); + @Test // DATAMONGO-774 + void shouldPerformStringProjectionOperatorsCorrectly() throws ParseException { Data data = new Data(); data.dateValue = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss.SSSZ").parse("29.08.1983 12:34:56.789+0000"); @@ -711,27 +1075,45 @@ public void shouldPerformStringProjectionOperatorsCorrectly() throws ParseExcept .andExpression("millisecond(dateValue)").as("millisecond") // ); - AggregationResults results = mongoTemplate.aggregate(agg, DBObject.class); - DBObject dbo = results.getUniqueMappedResult(); + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); + Document document = results.getUniqueMappedResult(); + + assertThat(document).isNotNull(); + assertThat((Integer) document.get("dayOfYear")).isEqualTo(241); + assertThat((Integer) document.get("dayOfMonth")).isEqualTo(29); + assertThat((Integer) document.get("dayOfWeek")).isEqualTo(2); + assertThat((Integer) document.get("year")).isEqualTo(1983); + assertThat((Integer) document.get("month")).isEqualTo(8); + assertThat((Integer) document.get("week")).isEqualTo(35); + assertThat((Integer) document.get("hour")).isEqualTo(12); + assertThat((Integer) document.get("minute")).isEqualTo(34); + assertThat((Integer) document.get("second")).isEqualTo(56); + assertThat((Integer) document.get("millisecond")).isEqualTo(789); + } + + @Test // DATAMONGO-1550 + void shouldPerformReplaceRootOperatorCorrectly() throws ParseException { + + Data data = new Data(); + DataItem dataItem = new DataItem(); + dataItem.primitiveIntValue = 42; + data.item = dataItem; + mongoTemplate.insert(data); + + TypedAggregation agg = newAggregation(Data.class, project("item"), // + replaceRoot("item"), // + project().and("primitiveIntValue").as("my_primitiveIntValue")); + + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); + Document resultDocument = results.getUniqueMappedResult(); - assertThat(dbo, is(notNullValue())); - assertThat((Integer) dbo.get("dayOfYear"), is(241)); - assertThat((Integer) dbo.get("dayOfMonth"), is(29)); - assertThat((Integer) dbo.get("dayOfWeek"), is(2)); - assertThat((Integer) dbo.get("year"), is(1983)); - assertThat((Integer) dbo.get("month"), is(8)); - assertThat((Integer) dbo.get("week"), is(35)); - assertThat((Integer) dbo.get("hour"), is(12)); - assertThat((Integer) dbo.get("minute"), is(34)); - assertThat((Integer) dbo.get("second"), is(56)); - assertThat((Integer) dbo.get("millisecond"), is(789)); + assertThat(resultDocument).isNotNull(); + assertThat((Integer) resultDocument.get("my_primitiveIntValue")).isEqualTo(42); + assertThat((Integer) resultDocument.keySet().size()).isEqualTo(1); } - /** - * @see DATAMONGO-788 - */ - @Test - public void referencesToGroupIdsShouldBeRenderedProperly() { + @Test // DATAMONGO-788, DATAMONGO-2264 + void referencesToGroupIdsShouldBeRenderedProperly() { mongoTemplate.insert(new DATAMONGO788(1, 1)); mongoTemplate.insert(new DATAMONGO788(1, 1)); @@ -744,32 +1126,29 @@ public void referencesToGroupIdsShouldBeRenderedProperly() { AggregationOperation project = Aggregation.project("xPerY", "x", "y").andExclude("_id"); TypedAggregation aggregation = Aggregation.newAggregation(DATAMONGO788.class, projectFirst, group, - project); - AggregationResults aggResults = mongoTemplate.aggregate(aggregation, DBObject.class); - List items = aggResults.getMappedResults(); - - assertThat(items.size(), is(2)); - assertThat((Integer) items.get(0).get("xPerY"), is(2)); - assertThat((Integer) items.get(0).get("x"), is(2)); - assertThat((Integer) items.get(0).get("y"), is(1)); - assertThat((Integer) items.get(1).get("xPerY"), is(3)); - assertThat((Integer) items.get(1).get("x"), is(1)); - assertThat((Integer) items.get(1).get("y"), is(1)); + project, Aggregation.sort(Sort.by("xPerY"))); + AggregationResults aggResults = mongoTemplate.aggregate(aggregation, Document.class); + List items = aggResults.getMappedResults(); + + assertThat(items.size()).isEqualTo(2); + assertThat((Integer) items.get(0).get("xPerY")).isEqualTo(2); + assertThat((Integer) items.get(0).get("x")).isEqualTo(2); + assertThat((Integer) items.get(0).get("y")).isEqualTo(1); + assertThat((Integer) items.get(1).get("xPerY")).isEqualTo(3); + assertThat((Integer) items.get(1).get("x")).isEqualTo(1); + assertThat((Integer) items.get(1).get("y")).isEqualTo(1); } - /** - * @see DATAMONGO-806 - */ - @Test - public void shouldAllowGroupByIdFields() { + @Test // DATAMONGO-806 + void shouldAllowGroupByIdFields() { mongoTemplate.dropCollection(User.class); - LocalDateTime now = new LocalDateTime(); + Instant now = Instant.now(); - User user1 = new User("u1", new PushMessage("1", "aaa", now.toDate())); - User user2 = new User("u2", new PushMessage("2", "bbb", now.minusDays(2).toDate())); - User user3 = new User("u3", new PushMessage("3", "ccc", now.minusDays(1).toDate())); + User user1 = new User("u1", new PushMessage("1", "aaa", now)); + User user2 = new User("u2", new PushMessage("2", "bbb", now.minus(2, ChronoUnit.DAYS))); + User user3 = new User("u3", new PushMessage("3", "ccc", now.minus(1, ChronoUnit.DAYS))); mongoTemplate.save(user1); mongoTemplate.save(user2); @@ -778,24 +1157,21 @@ public void shouldAllowGroupByIdFields() { Aggregation agg = newAggregation( // project("id", "msgs"), // unwind("msgs"), // - match(where("msgs.createDate").gt(now.minusDays(1).toDate())), // + match(where("msgs.createDate").gt(Date.from(now.minus(1, ChronoUnit.DAYS)))), // group("id").push("msgs").as("msgs") // ); - AggregationResults results = mongoTemplate.aggregate(agg, User.class, DBObject.class); + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); - List mappedResults = results.getMappedResults(); + List mappedResults = results.getMappedResults(); - DBObject firstItem = mappedResults.get(0); - assertThat(firstItem.get("_id"), is(notNullValue())); - assertThat(String.valueOf(firstItem.get("_id")), is("u1")); + Document firstItem = mappedResults.get(0); + assertThat(firstItem.get("_id")).isNotNull(); + assertThat(String.valueOf(firstItem.get("_id"))).isEqualTo("u1"); } - /** - * @see DATAMONGO-840 - */ - @Test - public void shouldAggregateOrderDataToAnInvoice() { + @Test // DATAMONGO-840 + void shouldAggregateOrderDataToAnInvoice() { mongoTemplate.dropCollection(Order.class); @@ -820,22 +1196,19 @@ public void shouldAggregateOrderDataToAnInvoice() { .and("orderId").previousOperation() // .andExpression("netAmount * [0]", taxRate).as("taxAmount") // .andExpression("netAmount * (1 + [0])", taxRate).as("totalAmount") // - ), Invoice.class); + ), Invoice.class); Invoice invoice = results.getUniqueMappedResult(); - assertThat(invoice, is(notNullValue())); - assertThat(invoice.getOrderId(), is(order.getId())); - assertThat(invoice.getNetAmount(), is(closeTo(8.3, 000001))); - assertThat(invoice.getTaxAmount(), is(closeTo(1.577, 000001))); - assertThat(invoice.getTotalAmount(), is(closeTo(9.877, 000001))); + assertThat(invoice).isNotNull(); + assertThat(invoice.getOrderId()).isEqualTo(order.getId()); + assertThat(invoice.getNetAmount()).isCloseTo(8.3, Offset.offset(000001D)); + assertThat(invoice.getTaxAmount()).isCloseTo(1.577, Offset.offset(000001D)); + assertThat(invoice.getTotalAmount()).isCloseTo(9.877, Offset.offset(000001D)); } - /** - * @see DATAMONGO-924 - */ - @Test - public void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { + @Test // DATAMONGO-924 + void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { mongoTemplate.dropCollection(CarPerson.class); @@ -860,31 +1233,26 @@ public void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { .and("lastName").as("lastName"), // group("make")); - AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); + assertThat(result.getMappedResults()).hasSize(3); } - /** - * @see DATAMONGO-960 - */ - @Test - public void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabled() { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_SIX)); + @Test // DATAMONGO-960 + void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabled() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation() // .withOptions(newAggregationOptions().allowDiskUse(true).build()); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(5)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(5); assertLikeStats(result.getMappedResults().get(0), "a", 4); assertLikeStats(result.getMappedResults().get(1), "b", 2); @@ -893,65 +1261,76 @@ public void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOp assertLikeStats(result.getMappedResults().get(4), "e", 3); } - /** - * @see DATAMONGO-960 - */ - @Test - public void returnFiveMostCommonLikesShouldReturnStageExecutionInformationWithExplainOptionEnabled() { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_SIX)); + @Test // DATAMONGO-1637 + void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabledWhileStreaming() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation() // - .withOptions(newAggregationOptions().explain(true).build()); + .withOptions(newAggregationOptions().allowDiskUse(true).build()); - AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); - assertThat(result.getMappedResults(), is(empty())); + try (Stream stream = mongoTemplate.aggregateStream(agg, LikeStats.class)) { - DBObject rawResult = result.getRawResults(); + List result = stream.toList(); - assertThat(rawResult, is(notNullValue())); - assertThat(rawResult.containsField("stages"), is(true)); + assertThat(result.size()).isEqualTo(5); + + assertLikeStats(result.get(0), "a", 4); + assertLikeStats(result.get(1), "b", 2); + assertLikeStats(result.get(2), "c", 4); + assertLikeStats(result.get(3), "d", 2); + assertLikeStats(result.get(4), "e", 3); + } } - /** - * @see DATAMONGO-954 - */ - @Test - public void shouldSupportReturningCurrentAggregationRoot() { + @Test // DATAMONGO-960 + void returnFiveMostCommonLikesShouldReturnStageExecutionInformationWithExplainOptionEnabled() { + + createUserWithLikesDocuments(); + + TypedAggregation agg = createUsersWithCommonLikesAggregation() // + .withOptions(newAggregationOptions().explain(true).build()); + + AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); + + assertThat(result.getMappedResults()).isEmpty(); + + Document rawResult = result.getRawResults(); + + assertThat(rawResult).isNotNull(); + assertThat(rawResult.containsKey("stages")).isEqualTo(true); + } - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_SIX)); + @Test // DATAMONGO-954, DATAMONGO-2264 + void shouldSupportReturningCurrentAggregationRoot() { mongoTemplate.save(new Person("p1_first", "p1_last", 25)); mongoTemplate.save(new Person("p2_first", "p2_last", 32)); mongoTemplate.save(new Person("p3_first", "p3_last", 25)); mongoTemplate.save(new Person("p4_first", "p4_last", 15)); - List personsWithAge25 = mongoTemplate.find(Query.query(where("age").is(25)), DBObject.class, + List personsWithAge25 = mongoTemplate.find(Query.query(where("age").is(25)), Document.class, mongoTemplate.getCollectionName(Person.class)); - Aggregation agg = newAggregation(group("age").push(Aggregation.ROOT).as("users")); - AggregationResults result = mongoTemplate.aggregate(agg, Person.class, DBObject.class); + Aggregation agg = newAggregation(group("age").push(Aggregation.ROOT).as("users"), sort(Sort.by("_id"))); + AggregationResults result = mongoTemplate.aggregate(agg, Person.class, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); - DBObject o = (DBObject) result.getMappedResults().get(2); + assertThat(result.getMappedResults()).hasSize(3); + Document o = result.getMappedResults().get(1); - assertThat(o.get("_id"), is((Object) 25)); - assertThat((List) o.get("users"), hasSize(2)); - assertThat((List) o.get("users"), is(contains(personsWithAge25.toArray()))); + assertThat(o.get("_id")).isEqualTo((Object) 25); + assertThat((List) o.get("users")).hasSize(2); + assertThat((List) o.get("users")).contains(personsWithAge25.toArray()); } /** - * @see DATAMONGO-954 - * @see http - * ://stackoverflow.com/questions/24185987/using-root-inside-spring-data-mongodb-for-retrieving-whole-document + * {@link https://stackoverflow.com/questions/24185987/using-root-inside-spring-data-mongodb-for-retrieving-whole-document} */ - @Test - public void shouldSupportReturningCurrentAggregationRootInReference() { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(TWO_DOT_SIX)); + @Test // DATAMONGO-954, DATAMONGO-2264 + void shouldSupportReturningCurrentAggregationRootInReference() { mongoTemplate.save(new Reservation("0123", "42", 100)); mongoTemplate.save(new Reservation("0360", "43", 200)); @@ -964,26 +1343,39 @@ public void shouldSupportReturningCurrentAggregationRootInReference() { .first("timestamp").as("timestamp") // .first(Aggregation.ROOT).as("reservationImage") // ); - AggregationResults result = mongoTemplate.aggregate(agg, Reservation.class, DBObject.class); + AggregationResults result = mongoTemplate.aggregate(agg, Reservation.class, Document.class); - assertThat(result.getMappedResults(), hasSize(2)); + assertThat(result.getMappedResults()).hasSize(2); } - /** - * @see DATAMONGO-975 - */ - @Test - public void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { + @Test // DATAMONGO-1549 + void shouldApplyCountCorrectly() { + + mongoTemplate.save(new Reservation("0123", "42", 100)); + mongoTemplate.save(new Reservation("0360", "43", 200)); + mongoTemplate.save(new Reservation("0360", "44", 300)); + + Aggregation agg = newAggregation( // + count().as("documents"), // + project("documents") // + .andExpression("documents * 2").as("twice")); + AggregationResults result = mongoTemplate.aggregate(agg, Reservation.class, Document.class); + + assertThat(result.getMappedResults()).hasSize(1); + + Document document = result.getMappedResults().get(0); + assertThat(document).containsEntry("documents", 3).containsEntry("twice", 6); + } + + @Test // DATAMONGO-975 + void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { mongoTemplate.dropCollection(ObjectWithDate.class); - DateTime dateTime = new DateTime() // - .withYear(2014) // - .withMonthOfYear(2) // - .withDayOfMonth(7) // - .withTime(3, 4, 5, 6).toDateTime(DateTimeZone.UTC).toDateTimeISO(); + ZonedDateTime dateTime = ZonedDateTime.of(LocalDateTime.of(LocalDate.of(2014, 2, 7), LocalTime.of(3, 4, 5, 6)), + ZoneId.of("UTC")); - ObjectWithDate owd = new ObjectWithDate(dateTime.toDate()); + ObjectWithDate owd = new ObjectWithDate(Date.from(dateTime.toInstant())); mongoTemplate.insert(owd); ProjectionOperation dateProjection = Aggregation.project() // @@ -1002,57 +1394,95 @@ public void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { ; Aggregation agg = newAggregation(dateProjection); - AggregationResults result = mongoTemplate.aggregate(agg, ObjectWithDate.class, DBObject.class); + AggregationResults result = mongoTemplate.aggregate(agg, ObjectWithDate.class, Document.class); - assertThat(result.getMappedResults(), hasSize(1)); - DBObject dbo = result.getMappedResults().get(0); + assertThat(result.getMappedResults()).hasSize(1); + Document document = result.getMappedResults().get(0); - assertThat(dbo.get("hour"), is((Object) dateTime.getHourOfDay())); - assertThat(dbo.get("min"), is((Object) dateTime.getMinuteOfHour())); - assertThat(dbo.get("second"), is((Object) dateTime.getSecondOfMinute())); - assertThat(dbo.get("millis"), is((Object) dateTime.getMillisOfSecond())); - assertThat(dbo.get("year"), is((Object) dateTime.getYear())); - assertThat(dbo.get("month"), is((Object) dateTime.getMonthOfYear())); + assertThat(document.get("hour")).isEqualTo((Object) dateTime.getHour()); + assertThat(document.get("min")).isEqualTo((Object) dateTime.getMinute()); + assertThat(document.get("second")).isEqualTo((Object) dateTime.getSecond()); + assertThat(document.get("millis")).isEqualTo((Object) dateTime.get(ChronoField.MILLI_OF_SECOND)); + assertThat(document.get("year")).isEqualTo((Object) dateTime.getYear()); + assertThat(document.get("month")).isEqualTo((Object) dateTime.getMonthValue()); // dateTime.getWeekOfWeekyear()) returns 6 since for MongoDB the week starts on sunday and not on monday. - assertThat(dbo.get("week"), is((Object) 5)); - assertThat(dbo.get("dayOfYear"), is((Object) dateTime.getDayOfYear())); - assertThat(dbo.get("dayOfMonth"), is((Object) dateTime.getDayOfMonth())); + assertThat(document.get("week")).isEqualTo((Object) 5); + assertThat(document.get("dayOfYear")).isEqualTo((Object) dateTime.getDayOfYear()); + assertThat(document.get("dayOfMonth")).isEqualTo((Object) dateTime.getDayOfMonth()); // dateTime.getDayOfWeek() - assertThat(dbo.get("dayOfWeek"), is((Object) 6)); - assertThat(dbo.get("dayOfYearPlus1Day"), is((Object) dateTime.plusDays(1).getDayOfYear())); - assertThat(dbo.get("dayOfYearPlus1DayManually"), is((Object) dateTime.plusDays(1).getDayOfYear())); + assertThat(document.get("dayOfWeek")).isEqualTo((Object) 6); + assertThat(document.get("dayOfYearPlus1Day")).isEqualTo((Object) dateTime.plusDays(1).getDayOfYear()); + assertThat(document.get("dayOfYearPlus1DayManually")).isEqualTo((Object) dateTime.plusDays(1).getDayOfYear()); } - /** - * @see DATAMONGO-1127 - */ - @Test - public void shouldSupportGeoNearQueriesForAggregationWithDistanceField() { + @Test // DATAMONGO-1127 + void shouldSupportGeoNearQueriesForAggregationWithDistanceField() { + + mongoTemplate.insertAll(Arrays.asList(TestEntities.geolocation().pennStation(), + TestEntities.geolocation().tenGenOffice(), TestEntities.geolocation().flatironBuilding())); + + mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); + + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150); + + Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); + AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); + + assertThat(result.getMappedResults()).hasSize(3); + + Document firstResult = result.getMappedResults().get(0); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(117.620092203928, Offset.offset(0.00001D)); + } + + @Test // DATAMONGO-1348 + void shouldSupportGeoJsonInGeoNearQueriesForAggregationWithDistanceField() { mongoTemplate.insert(new Venue("Penn Station", -73.99408, 40.75057)); mongoTemplate.insert(new Venue("10gen Office", -73.99171, 40.738868)); mongoTemplate.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); - mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); + mongoTemplate.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); - AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, DBObject.class); + AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); + assertThat(result.getMappedResults()).hasSize(3); - DBObject firstResult = result.getMappedResults().get(0); - assertThat(firstResult.containsField("distance"), is(true)); - assertThat((Double) firstResult.get("distance"), closeTo(117.620092203928, 0.00001)); + Document firstResult = result.getMappedResults().get(0); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(117.61940988193759, Offset.offset(0.00001D)); } - /** - * @see DATAMONGO-1133 - */ - @Test - public void shouldHonorFieldAliasesForFieldReferences() { + @Test // DATAMONGO-1348 + void shouldSupportGeoJsonInGeoNearQueriesForAggregationWithDistanceFieldInMiles() { + + mongoTemplate.insert(new Venue("Penn Station", -73.99408, 40.75057)); + mongoTemplate.insert(new Venue("10gen Office", -73.99171, 40.738868)); + mongoTemplate.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); + + mongoTemplate.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150) + .inMiles(); + + Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); + AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); + + assertThat(result.getMappedResults()).hasSize(3); + + Document firstResult = result.getMappedResults().get(0); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(73.08517, Offset.offset(0.00001D)); + } + + @Test // DATAMONGO-1133 + void shouldHonorFieldAliasesForFieldReferences() { mongoTemplate.insert(new MeterData("m1", "counter1", 42)); mongoTemplate.insert(new MeterData("m1", "counter1", 13)); @@ -1062,22 +1492,17 @@ public void shouldHonorFieldAliasesForFieldReferences() { match(where("resourceId").is("m1")), // group("counterName").sum("counterVolume").as("totalValue")); - AggregationResults results = mongoTemplate.aggregate(agg, DBObject.class); + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); - assertThat(results.getMappedResults(), hasSize(1)); - DBObject result = results.getMappedResults().get(0); + assertThat(results.getMappedResults()).hasSize(1); + Document result = results.getMappedResults().get(0); - assertThat(result.get("_id"), is(equalTo((Object) "counter1"))); - assertThat(result.get("totalValue"), is(equalTo((Object) 100.0))); + assertThat(result.get("_id")).isEqualTo("counter1"); + assertThat(result.get("totalValue")).isEqualTo(100.0); } - /** - * @see DATAMONGO-1326 - */ - @Test - public void shouldLookupPeopleCorectly() { - - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); + @Test // DATAMONGO-1326 + void shouldLookupPeopleCorectly() { createUsersWithReferencedPersons(); @@ -1085,23 +1510,57 @@ public void shouldLookupPeopleCorectly() { lookup("person", "_id", "firstname", "linkedPerson"), // sort(ASC, "id")); - AggregationResults results = mongoTemplate.aggregate(agg, User.class, DBObject.class); + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); - List mappedResults = results.getMappedResults(); + List mappedResults = results.getMappedResults(); - DBObject firstItem = mappedResults.get(0); + Document firstItem = mappedResults.get(0); - assertThat(firstItem, isBsonObject().containing("_id", "u1")); - assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1")); + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); } - /** - * @see DATAMONGO-1326 - */ - @Test - public void shouldGroupByAndLookupPeopleCorectly() { + @Test // GH-3322 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void shouldLookupPeopleCorrectlyWithPipeline() { + createUsersWithReferencedPersons(); + + TypedAggregation agg = newAggregation(User.class, // + lookup().from("person").localField("_id").foreignField("firstname").pipeline(match(where("firstname").is("u1"))).as("linkedPerson"), // + sort(ASC, "id")); + + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); + + List mappedResults = results.getMappedResults(); + + Document firstItem = mappedResults.get(0); + + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); + } + + @Test // GH-3322 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void shouldLookupPeopleCorrectlyWithPipelineAndLet() { + createUsersWithReferencedPersons(); + + TypedAggregation agg = newAggregation(User.class, // + lookup().from("person").localField("_id").foreignField("firstname").let(Let.ExpressionVariable.newVariable("the_id").forField("_id")).pipeline( + match(ctx -> new Document("$expr", new Document("$eq", List.of("$$the_id", "u1"))))).as("linkedPerson"), + sort(ASC, "id")); + + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); + + List mappedResults = results.getMappedResults(); + + Document firstItem = mappedResults.get(0); + + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); + } - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); + @Test // DATAMONGO-1326 + void shouldGroupByAndLookupPeopleCorrectly() { createUsersWithReferencedPersons(); @@ -1110,14 +1569,508 @@ public void shouldGroupByAndLookupPeopleCorectly() { lookup("person", "foreignKey", "firstname", "linkedPerson"), // sort(ASC, "foreignKey", "linkedPerson.firstname")); - AggregationResults results = mongoTemplate.aggregate(agg, User.class, DBObject.class); + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); + + List mappedResults = results.getMappedResults(); + + Document firstItem = mappedResults.get(0); + + assertThat(firstItem).containsEntry("foreignKey", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); + } + + @Test // DATAMONGO-1418, DATAMONGO-1824 + @MongoVersion(asOf = "2.6") + void shouldCreateOutputCollection() { + + createPersonDocuments(); + + String tempOutCollection = "personQueryTemp"; + TypedAggregation agg = newAggregation(Person.class, // + group("sex").count().as("count"), // + sort(DESC, "count"), // + out(tempOutCollection)); + + AggregationResults results = mongoTemplate.aggregate(agg, Document.class); + + assertThat(results.getMappedResults()).hasSize(2); + + List list = mongoTemplate.findAll(Document.class, tempOutCollection); + + assertThat(list).hasSize(2); + assertThat(list.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(list.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); + + mongoTemplate.dropCollection(tempOutCollection); + } + + @Test // DATAMONGO-1637 + void shouldCreateOutputCollectionWhileStreaming() { + + createPersonDocuments(); + + String tempOutCollection = "personQueryTemp"; + TypedAggregation agg = newAggregation(Person.class, // + group("sex").count().as("count"), // + sort(DESC, "count"), // + out(tempOutCollection)); + + mongoTemplate.aggregateStream(agg, Document.class).close(); + + List list = mongoTemplate.findAll(Document.class, tempOutCollection); + + assertThat(list).hasSize(2); + assertThat(list.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(list.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); + + mongoTemplate.dropCollection(tempOutCollection); + } + + @Test // DATAMONGO-1637 + void shouldReturnDocumentsWithOutputCollectionWhileStreaming() { + + createPersonDocuments(); + + String tempOutCollection = "personQueryTemp"; + TypedAggregation agg = newAggregation(Person.class, // + group("sex").count().as("count"), // + sort(DESC, "count"), // + out(tempOutCollection)); + + try (Stream stream = mongoTemplate.aggregateStream(agg, Document.class)) { + + List result = stream.toList(); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(result.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); + } + + mongoTemplate.dropCollection(tempOutCollection); + } + + private void createPersonDocuments() { + + mongoTemplate.save(new Person("Anna", "Ivanova", 21, Person.Sex.FEMALE)); + mongoTemplate.save(new Person("Pavel", "Sidorov", 36, Person.Sex.MALE)); + mongoTemplate.save(new Person("Anastasia", "Volochkova", 29, Person.Sex.FEMALE)); + mongoTemplate.save(new Person("Igor", "Stepanov", 31, Person.Sex.MALE)); + mongoTemplate.save(new Person("Leoniv", "Yakubov", 55, Person.Sex.MALE)); + } + + @Test // DATAMONGO-1418, DATAMONGO-2536 + void outShouldOutBeTheLastOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(match(new Criteria()), // + group("field1").count().as("totalCount"), // + out("collection1"), // + skip(100L)).toPipeline(DEFAULT_CONTEXT)); + } + + @Test // DATAMONGO-1325 + void shouldApplySampleCorrectly() { + + createUserWithLikesDocuments(); + + TypedAggregation agg = newAggregation(UserWithLikes.class, // + unwind("likes"), // + sample(3) // + ); + + assertThat(agg.toString()).isNotNull(); + + AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); + assertThat(result.getMappedResults().size()).isEqualTo(3); + } + + @Test // DATAMONGO-1457 + @MongoVersion(asOf = "3.2") + void sliceShouldBeAppliedCorrectly() { + + createUserWithLikesDocuments(); + + TypedAggregation agg = newAggregation(UserWithLikes.class, match(new Criteria()), + project().and("likes").slice(2)); + + AggregationResults result = mongoTemplate.aggregate(agg, UserWithLikes.class); + + assertThat(result.getMappedResults()).hasSize(9); + for (UserWithLikes user : result) { + assertThat(user.likes.size() <= 2).isEqualTo(true); + } + } + + @Test // DATAMONGO-1491 + void filterShouldBeAppliedCorrectly() { + + Item item43 = Item.builder().itemId("43").quantity(2).price(2L).build(); + Item item2 = Item.builder().itemId("2").quantity(1).price(240L).build(); + Sales sales1 = Sales.builder().id("0").items(Arrays.asList( // + item43, item2)) // + .build(); + + Item item23 = Item.builder().itemId("23").quantity(3).price(110L).build(); + Item item103 = Item.builder().itemId("103").quantity(4).price(5L).build(); + Item item38 = Item.builder().itemId("38").quantity(1).price(300L).build(); + Sales sales2 = Sales.builder().id("1").items(Arrays.asList( // + item23, item103, item38)).build(); + + Item item4 = Item.builder().itemId("4").quantity(1).price(23L).build(); + Sales sales3 = Sales.builder().id("2").items(Arrays.asList( // + item4)).build(); + + mongoTemplate.insert(Arrays.asList(sales1, sales2, sales3), Sales.class); + + TypedAggregation agg = newAggregation(Sales.class, project().and("items") + .filter("item", ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100)).as("items")); + + assertThat(mongoTemplate.aggregate(agg, Sales.class).getMappedResults()).contains( + Sales.builder().id("0").items(Collections.singletonList(item2)).build(), + Sales.builder().id("1").items(Arrays.asList(item23, item38)).build(), + Sales.builder().id("2").items(Collections. emptyList()).build()); + } + + @Test // DATAMONGO-1538 + void letShouldBeAppliedCorrectly() { + + Sales2 sales1 = Sales2.builder().id("1").price(10).tax(0.5F).applyDiscount(true).build(); + Sales2 sales2 = Sales2.builder().id("2").price(10).tax(0.25F).applyDiscount(false).build(); + + mongoTemplate.insert(Arrays.asList(sales1, sales2), Sales2.class); + + ExpressionVariable total = ExpressionVariable.newVariable("total") + .forExpression(ArithmeticOperators.valueOf("price").sum().and("tax")); + ExpressionVariable discounted = ExpressionVariable.newVariable("discounted") + .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D)); + + TypedAggregation agg = Aggregation.newAggregation(Sales2.class, + Aggregation.project().and(VariableOperators.Let.define(total, discounted) + .andApply(ArithmeticOperators.valueOf("total").multiplyBy("discounted"))).as("finalTotal")); + + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + assertThat(result.getMappedResults()).contains(new Document("_id", "1").append("finalTotal", 9.450000000000001D), + new Document("_id", "2").append("finalTotal", 10.25D)); + } + + @Test // DATAMONGO-1551, DATAMONGO-2264 + void graphLookupShouldBeAppliedCorrectly() { + + Employee em1 = Employee.builder().id(1).name("Dev").build(); + Employee em2 = Employee.builder().id(2).name("Eliot").reportsTo("Dev").build(); + Employee em4 = Employee.builder().id(4).name("Andrew").reportsTo("Eliot").build(); + + mongoTemplate.insert(Arrays.asList(em1, em2, em4), Employee.class); + + TypedAggregation agg = Aggregation.newAggregation(Employee.class, + match(Criteria.where("name").is("Andrew")), // + Aggregation.graphLookup("employee") // + .startWith("reportsTo") // + .connectFrom("reportsTo") // + .connectTo("name") // + .depthField("depth") // + .maxDepth(5) // + .as("reportingHierarchy"), // + project("id", "depth", "name", "reportsTo", "reportingHierarchy")); + + AggregationResults result = mongoTemplate.aggregate(agg, Document.class); + + Document object = result.getUniqueMappedResult(); + List list = (List) object.get("reportingHierarchy"); + + assertThat(object).containsEntry("name", "Andrew").containsEntry("reportsTo", "Eliot"); + assertThat(list).containsOnly( + new Document("_id", 2).append("name", "Eliot").append("reportsTo", "Dev").append("depth", 0L).append("_class", + Employee.class.getName()), + new Document("_id", 1).append("name", "Dev").append("depth", 1L).append("_class", Employee.class.getName())); + } + + @Test // DATAMONGO-1552 + void bucketShouldCollectDocumentsIntoABucket() { + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + TypedAggregation aggregation = newAggregation(Art.class, // + bucket("price") // + .withBoundaries(0, 100, 200) // + .withDefaultBucket("other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10").sum().as("sum")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(3); + + // { "_id" : 0 , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + Document bound0 = result.getMappedResults().get(0); + assertThat(bound0).containsEntry("count", 1).containsEntry("titles.[0]", "Dancer"); + assertThat((Double) bound0.get("sum")).isCloseTo(760.40, Offset.offset(0.1D)); + + // { "_id" : 100 , "count" : 2 , "titles" : [ "The Pillars of Society" , "The Great Wave off Kanagawa"] , "sum" : + // 3672.9} + Document bound100 = result.getMappedResults().get(1); + assertThat(bound100).containsEntry("count", 2).containsEntry("_id", 100); + assertThat((List) bound100.get("titles")).contains("The Pillars of Society", "The Great Wave off Kanagawa"); + assertThat((Double) bound100.get("sum")).isCloseTo(3672.9, Offset.offset(0.1D)); + } + + @Test // DATAMONGO-1552, DATAMONGO-2437 + void bucketAutoShouldCollectDocumentsIntoABucket() { + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + TypedAggregation aggregation = newAggregation(Art.class, // + bucketAuto(ArithmeticOperators.Multiply.valueOf("price").multiplyBy(10), 3) // + .withGranularity(Granularities.E12) // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10").sum().as("sum")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(3); + + // { "_id" : { "min" : 680.0 , "max" : 820.0 }, "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + Document bound0 = result.getMappedResults().get(0); + assertThat(bound0).containsEntry("count", 1).containsEntry("titles.[0]", "Dancer").containsEntry("_id.min", 680.0) + .containsKey("_id.max"); + + // { "_id" : { "min" : 820.0 , "max" : 1800.0 }, "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : + // 1673.0} + Document bound1 = result.getMappedResults().get(1); + assertThat(bound1).containsEntry("count", 1).containsEntry("_id.min", 820.0); + assertThat((List) bound1.get("titles")).contains("The Great Wave off Kanagawa"); + assertThat((Double) bound1.get("sum")).isCloseTo(1673.0, Offset.offset(0.1D)); + } + + @Test // DATAMONGO-1552 + void facetShouldCreateFacets() { + + Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); + Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); + Art a3 = Art.builder().id(3).title("Dancer").artist("Miro").year(1925).price(76.04).build(); + Art a4 = Art.builder().id(4).title("The Great Wave off Kanagawa").artist("Hokusai").price(167.30).build(); + + mongoTemplate.insert(Arrays.asList(a1, a2, a3, a4), Art.class); + + BucketAutoOperation bucketPrice = bucketAuto(ArithmeticOperators.Multiply.valueOf("price").multiplyBy(10), 3) // + .withGranularity(Granularities.E12) // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles") // + .andOutputExpression("price * 10") // + .sum().as("sum"); + + TypedAggregation aggregation = newAggregation(Art.class, // + project("title", "artist", "year", "price"), // + facet(bucketPrice).as("categorizeByPrice") // + .and(bucketAuto("year", 3)).as("categorizeByYear")); + + AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); + assertThat(result.getMappedResults().size()).isEqualTo(1); + + Document mappedResult = result.getUniqueMappedResult(); + + // [ { "_id" : { "min" : 680.0 , "max" : 820.0} , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + // , + // { "_id" : { "min" : 820.0 , "max" : 1800.0} , "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : + // 1673.0} , + // { "_id" : { "min" : 1800.0 , "max" : 3300.0} , "count" : 2 , "titles" : [ "The Pillars of Society" , "Melancholy + // III"] , "sum" : 4799.9}] + List categorizeByPrice = (List) mappedResult.get("categorizeByPrice"); + assertThat(categorizeByPrice).hasSize(3); + + // [ { "_id" : { "min" : null , "max" : 1902} , "count" : 1} , + // { "_id" : { "min" : 1902-2018 , "max" : 1925} , "count" : 1} , + // { "_id" : { "min" : 1925-2018 , "max" : 1926} , "count" : 2}] + List categorizeByYear = (List) mappedResult.get("categorizeByYear"); + assertThat(categorizeByYear).hasSize(3); + } + + @Test // GH-4473 + @EnableIfMongoServerVersion(isGreaterThanEqual = "7.0") + void percentileShouldBeAppliedCorrectly() { + + DATAMONGO788 objectToSave = new DATAMONGO788(62, 81, 80); + DATAMONGO788 objectToSave2 = new DATAMONGO788(60, 83, 79); + + mongoTemplate.insert(objectToSave); + mongoTemplate.insert(objectToSave2); + + Aggregation agg = Aggregation.newAggregation( + project().and(ArithmeticOperators.valueOf("x").percentile(0.9, 0.4).and("y").and("xField")) + .as("percentileValues")); + + AggregationResults result = mongoTemplate.aggregate(agg, DATAMONGO788.class, Document.class); + + // MongoDB server returns $percentile as an array of doubles + List rawResults = (List) result.getRawResults().get("results"); + assertThat((List) rawResults.get(0).get("percentileValues")).containsExactly(81.0, 80.0); + assertThat((List) rawResults.get(1).get("percentileValues")).containsExactly(83.0, 79.0); + } + + @Test // GH-4472 + @EnableIfMongoServerVersion(isGreaterThanEqual = "7.0") + void medianShouldBeAppliedCorrectly() { + + DATAMONGO788 objectToSave = new DATAMONGO788(62, 81, 80); + DATAMONGO788 objectToSave2 = new DATAMONGO788(60, 83, 79); + + mongoTemplate.insert(objectToSave); + mongoTemplate.insert(objectToSave2); + + Aggregation agg = Aggregation.newAggregation( + project().and(ArithmeticOperators.valueOf("x").median().and("y").and("xField")) + .as("medianValue")); + + AggregationResults result = mongoTemplate.aggregate(agg, DATAMONGO788.class, Document.class); + + // MongoDB server returns $median a Double + List rawResults = (List) result.getRawResults().get("results"); + assertThat(rawResults.get(0).get("medianValue")).isEqualTo(80.0); + assertThat(rawResults.get(1).get("medianValue")).isEqualTo(79.0); + } + + @Test // DATAMONGO-1986 + void runMatchOperationCriteriaThroughQueryMapperForTypedAggregation() { + + mongoTemplate.insertAll(TestEntities.geolocation().newYork()); + + Aggregation aggregation = newAggregation(Venue.class, + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + AggregationResults groupResults = mongoTemplate.aggregate(aggregation, "newyork", Document.class); + + assertThat(groupResults.getMappedResults().size()).isEqualTo(4); + } + + @Test // DATAMONGO-1986 + void runMatchOperationCriteriaThroughQueryMapperForUntypedAggregation() { + + mongoTemplate.insertAll(TestEntities.geolocation().newYork()); + + Aggregation aggregation = newAggregation( + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + AggregationResults groupResults = mongoTemplate.aggregate(aggregation, "newyork", Document.class); + + assertThat(groupResults.getMappedResults().size()).isEqualTo(4); + } + + @Test // DATAMONGO-2437 + void shouldReadComplexIdValueCorrectly() { + + WithComplexId source = new WithComplexId(); + source.id = new ComplexId(); + source.id.p1 = "v1"; + source.id.p2 = "v2"; + + mongoTemplate.save(source); - List mappedResults = results.getMappedResults(); + AggregationResults result = mongoTemplate.aggregate(newAggregation(project("id")), + WithComplexId.class, WithComplexId.class); + assertThat(result.getMappedResults()).containsOnly(source); + } + + @Test // DATAMONGO-2536 + void skipOutputDoesNotReadBackAggregationResults() { + + createTagDocuments(); + + Aggregation agg = newAggregation( // + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ).withOptions(AggregationOptions.builder().skipOutput().build()); + + AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); + + assertThat(results.getMappedResults()).isEmpty(); + assertThat(results.getRawResults()).isEmpty(); + } + + @Test // DATAMONGO-2635 + void mapsEnumsInMatchClauseUsingInCriteriaCorrectly() { + + WithEnum source = new WithEnum(); + source.enumValue = MyEnum.TWO; + source.id = "id-1"; + + mongoTemplate.save(source); + + Aggregation agg = newAggregation(match(where("enumValue").in(Collections.singletonList(MyEnum.TWO)))); + + AggregationResults results = mongoTemplate.aggregate(agg, mongoTemplate.getCollectionName(WithEnum.class), + Document.class); + assertThat(results.getMappedResults()).hasSize(1); + } + + @Test // GH-4043 + void considersMongoIdWithinTypedCollections() { + + UserRef userRef = new UserRef(); + userRef.id = "4ee921aca44fd11b3254e001"; + userRef.name = "u-1"; + + Widget widget = new Widget(); + widget.id = "w-1"; + widget.users = List.of(userRef); + + mongoTemplate.save(widget); + + Criteria criteria = Criteria.where("users").elemMatch(Criteria.where("id").is("4ee921aca44fd11b3254e001")); + AggregationResults aggregate = mongoTemplate.aggregate(newAggregation(match(criteria)), Widget.class, Widget.class); + assertThat(aggregate.getMappedResults()).contains(widget); + } + + @Test // GH-4443 + void shouldHonorFieldAliasesForFieldReferencesUsingFieldExposingOperation() { + + Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build(); + Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build(); + mongoTemplate.insert(Arrays.asList(item1, item2), Item.class); + + TypedAggregation aggregation = newAggregation(Item.class, + match(where("itemId").is("1")), + unwind("tags"), + match(where("itemId").is("1").and("tags").is("c"))); + AggregationResults results = mongoTemplate.aggregate(aggregation, Document.class); + List mappedResults = results.getMappedResults(); + assertThat(mappedResults).hasSize(1); + assertThat(mappedResults.get(0)).containsEntry("item_id", "1"); + } + + @Test // GH-4443 + void projectShouldResetContextToAvoidMappingFieldsAgainstANoLongerExistingTarget() { - DBObject firstItem = mappedResults.get(0); + Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build(); + Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build(); + mongoTemplate.insert(Arrays.asList(item1, item2), Item.class); - assertThat(firstItem, isBsonObject().containing("foreignKey", "u1")); - assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1")); + TypedAggregation aggregation = newAggregation(Item.class, + match(where("itemId").is("1")), + unwind("tags"), + project().and("itemId").as("itemId").and("tags").as("tags"), + match(where("itemId").is("1").and("tags").is("c"))); + + AggregationResults results = mongoTemplate.aggregate(aggregation, Document.class); + List mappedResults = results.getMappedResults(); + assertThat(mappedResults).hasSize(1); + assertThat(mappedResults.get(0)).containsEntry("itemId", "1"); } private void createUsersWithReferencedPersons() { @@ -1143,9 +2096,9 @@ private void createUsersWithReferencedPersons() { private void assertLikeStats(LikeStats like, String id, long count) { - assertThat(like, is(notNullValue())); - assertThat(like.id, is(id)); - assertThat(like.count, is(count)); + assertThat(like).isNotNull(); + assertThat(like.id).isEqualTo(id); + assertThat(like.count).isEqualTo(count); } private void createUserWithLikesDocuments() { @@ -1162,16 +2115,16 @@ private void createUserWithLikesDocuments() { private void createTagDocuments() { - DBCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); + MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); - coll.insert(createDocument("Doc1", "spring", "mongodb", "nosql")); - coll.insert(createDocument("Doc2", "spring", "mongodb")); - coll.insert(createDocument("Doc3", "spring")); + coll.insertOne(createDocument("Doc1", "spring", "mongodb", "nosql")); + coll.insertOne(createDocument("Doc2", "spring", "mongodb")); + coll.insertOne(createDocument("Doc3", "spring")); } - private static DBObject createDocument(String title, String... tags) { + private static Document createDocument(String title, String... tags) { - DBObject doc = new BasicDBObject("title", title); + Document doc = new Document("title", title); List tagList = new ArrayList(); for (String tag : tags) { @@ -1184,11 +2137,12 @@ private static DBObject createDocument(String title, String... tags) { private static void assertTagCount(String tag, int n, TagCount tagCount) { - assertThat(tagCount.getTag(), is(tag)); - assertThat(tagCount.getN(), is(n)); + assertThat(tagCount.getTag()).isEqualTo(tag); + assertThat(tagCount.getN()).isEqualTo(n); } static class DATAMONGO753 { + PD[] pd; DATAMONGO753 withPDs(PD... pds) { @@ -1198,10 +2152,11 @@ DATAMONGO753 withPDs(PD... pds) { } static class PD { + String pDch; @org.springframework.data.mongodb.core.mapping.Field("alias") int up; - public PD(String pDch, int up) { + PD(String pDch, int up) { this.pDch = pDch; this.up = up; } @@ -1216,17 +2171,21 @@ static class DATAMONGO788 { public DATAMONGO788() {} - public DATAMONGO788(int x, int y) { + DATAMONGO788(int x, int y) { this.x = x; this.xField = x; this.y = y; this.yField = y; } + + public DATAMONGO788(int x, int y, int xField) { + this.x = x; + this.y = y; + this.xField = xField; + } } - /** - * @see DATAMONGO-806 - */ + // DATAMONGO-806 static class User { @Id String id; @@ -1234,15 +2193,13 @@ static class User { public User() {} - public User(String id, PushMessage... msgs) { + User(String id, PushMessage... msgs) { this.id = id; this.msgs = Arrays.asList(msgs); } } - /** - * @see DATAMONGO-806 - */ + // DATAMONGO-806 static class PushMessage { @Id String id; @@ -1251,7 +2208,11 @@ static class PushMessage { public PushMessage() {} - public PushMessage(String id, String content, Date createDate) { + PushMessage(String id, String content, Instant createDate) { + this(id, content, Date.from(createDate)); + } + + PushMessage(String id, String content, Date createDate) { this.id = id; this.content = content; this.createDate = createDate; @@ -1266,7 +2227,7 @@ static class CarPerson { private String lastName; private Descriptors descriptors; - public CarPerson(String firstname, String lastname, Entry... entries) { + CarPerson(String firstname, String lastname, Entry... entries) { this.firstName = firstname; this.lastName = lastname; @@ -1278,6 +2239,7 @@ public CarPerson(String firstname, String lastname, Entry... entries) { @SuppressWarnings("unused") static class Descriptors { + private CarDescriptor carDescriptor; } @@ -1285,7 +2247,7 @@ static class CarDescriptor { private List entries = new ArrayList(); - public CarDescriptor(Entry... entries) { + CarDescriptor(Entry... entries) { for (Entry entry : entries) { this.entries.add(entry); @@ -1294,13 +2256,14 @@ public CarDescriptor(Entry... entries) { @SuppressWarnings("unused") static class Entry { + private String make; private String model; private int year; public Entry() {} - public Entry(String make, String model, int year) { + Entry(String make, String model, int year) { this.make = make; this.model = model; this.year = year; @@ -1316,7 +2279,7 @@ static class Reservation { public Reservation() {} - public Reservation(String hotelCode, String confirmationNumber, int timestamp) { + Reservation(String hotelCode, String confirmationNumber, int timestamp) { this.hotelCode = hotelCode; this.confirmationNumber = confirmationNumber; this.timestamp = timestamp; @@ -1327,8 +2290,758 @@ static class ObjectWithDate { Date dateValue; - public ObjectWithDate(Date dateValue) { + ObjectWithDate(Date dateValue) { this.dateValue = dateValue; } } + + // DATAMONGO-861 + @org.springframework.data.mongodb.core.mapping.Document(collection = "inventory") + static class InventoryItem { + + int id; + String item; + String description; + int qty; + + public InventoryItem() {} + + InventoryItem(int id, String item, int qty) { + + this.id = id; + this.item = item; + this.qty = qty; + } + + InventoryItem(int id, String item, String description, int qty) { + + this.id = id; + this.item = item; + this.description = description; + this.qty = qty; + } + } + + // DATAMONGO-1491 + static class Sales { + + @Id String id; + List items; + + Sales(String id, List items) { + this.id = id; + this.items = items; + } + + public static SalesBuilder builder() { + return new SalesBuilder(); + } + + public String getId() { + return this.id; + } + + public List getItems() { + return this.items; + } + + public void setId(String id) { + this.id = id; + } + + public void setItems(List items) { + this.items = items; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sales sales = (Sales) o; + return Objects.equals(id, sales.id) && Objects.equals(items, sales.items); + } + + @Override + public int hashCode() { + return Objects.hash(id, items); + } + + public String toString() { + return "AggregationTests.Sales(id=" + this.getId() + ", items=" + this.getItems() + ")"; + } + + public static class SalesBuilder { + + private String id; + private List items; + + SalesBuilder() {} + + public SalesBuilder id(String id) { + this.id = id; + return this; + } + + public SalesBuilder items(List items) { + this.items = items; + return this; + } + + public Sales build() { + return new Sales(id, items); + } + + public String toString() { + return "AggregationTests.Sales.SalesBuilder(id=" + this.id + ", items=" + this.items + ")"; + } + } + } + + // DATAMONGO-1491, GH-4443 + static class Item { + + @org.springframework.data.mongodb.core.mapping.Field("item_id") // + String itemId; + Integer quantity; + Long price; + List tags = new ArrayList<>(); + + Item(String itemId, Integer quantity, Long price, List tags) { + + this.itemId = itemId; + this.quantity = quantity; + this.price = price; + this.tags = tags; + } + + public static ItemBuilder builder() { + return new ItemBuilder(); + } + + public String getItemId() { + return this.itemId; + } + + public Integer getQuantity() { + return this.quantity; + } + + public Long getPrice() { + return this.price; + } + + public void setItemId(String itemId) { + this.itemId = itemId; + } + + public void setQuantity(Integer quantity) { + this.quantity = quantity; + } + + public void setPrice(Long price) { + this.price = price; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Item item = (Item) o; + return Objects.equals(itemId, item.itemId) && Objects.equals(quantity, item.quantity) + && Objects.equals(price, item.price); + } + + @Override + public int hashCode() { + return Objects.hash(itemId, quantity, price); + } + + public String toString() { + return "AggregationTests.Item(itemId=" + this.getItemId() + ", quantity=" + this.getQuantity() + ", price=" + + this.getPrice() + ")"; + } + + public static class ItemBuilder { + + private String itemId; + private Integer quantity; + private Long price; + private List tags; + + ItemBuilder() {} + + public ItemBuilder itemId(String itemId) { + this.itemId = itemId; + return this; + } + + public ItemBuilder quantity(Integer quantity) { + this.quantity = quantity; + return this; + } + + public ItemBuilder price(Long price) { + this.price = price; + return this; + } + + public ItemBuilder tags(List tags) { + this.tags = tags; + return this; + } + + public Item build() { + return new Item(itemId, quantity, price, tags); + } + + public String toString() { + return "AggregationTests.Item.ItemBuilder(itemId=" + this.itemId + ", quantity=" + this.quantity + ", price=" + + this.price + ")"; + } + } + } + + // DATAMONGO-1538 + static class Sales2 { + + String id; + Integer price; + Float tax; + boolean applyDiscount; + + Sales2(String id, Integer price, Float tax, boolean applyDiscount) { + + this.id = id; + this.price = price; + this.tax = tax; + this.applyDiscount = applyDiscount; + } + + public static Sales2Builder builder() { + return new Sales2Builder(); + } + + public String getId() { + return this.id; + } + + public Integer getPrice() { + return this.price; + } + + public Float getTax() { + return this.tax; + } + + public boolean isApplyDiscount() { + return this.applyDiscount; + } + + public void setId(String id) { + this.id = id; + } + + public void setPrice(Integer price) { + this.price = price; + } + + public void setTax(Float tax) { + this.tax = tax; + } + + public void setApplyDiscount(boolean applyDiscount) { + this.applyDiscount = applyDiscount; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sales2 sales2 = (Sales2) o; + return applyDiscount == sales2.applyDiscount && Objects.equals(id, sales2.id) + && Objects.equals(price, sales2.price) && Objects.equals(tax, sales2.tax); + } + + @Override + public int hashCode() { + return Objects.hash(id, price, tax, applyDiscount); + } + + public String toString() { + return "AggregationTests.Sales2(id=" + this.getId() + ", price=" + this.getPrice() + ", tax=" + this.getTax() + + ", applyDiscount=" + this.isApplyDiscount() + ")"; + } + + public static class Sales2Builder { + + private String id; + private Integer price; + private Float tax; + private boolean applyDiscount; + + public Sales2Builder id(String id) { + this.id = id; + return this; + } + + public Sales2Builder price(Integer price) { + this.price = price; + return this; + } + + public Sales2Builder tax(Float tax) { + this.tax = tax; + return this; + } + + public Sales2Builder applyDiscount(boolean applyDiscount) { + this.applyDiscount = applyDiscount; + return this; + } + + public Sales2 build() { + return new Sales2(id, price, tax, applyDiscount); + } + + public String toString() { + return "AggregationTests.Sales2.Sales2Builder(id=" + this.id + ", price=" + this.price + ", tax=" + this.tax + + ", applyDiscount=" + this.applyDiscount + ")"; + } + } + } + + // DATAMONGO-1551 + static class Employee { + + int id; + String name; + String reportsTo; + + Employee(int id, String name, String reportsTo) { + + this.id = id; + this.name = name; + this.reportsTo = reportsTo; + } + + public static EmployeeBuilder builder() { + return new EmployeeBuilder(); + } + + public int getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public String getReportsTo() { + return this.reportsTo; + } + + public void setId(int id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setReportsTo(String reportsTo) { + this.reportsTo = reportsTo; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Employee employee = (Employee) o; + return id == employee.id && Objects.equals(name, employee.name) && Objects.equals(reportsTo, employee.reportsTo); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, reportsTo); + } + + public String toString() { + return "AggregationTests.Employee(id=" + this.getId() + ", name=" + this.getName() + ", reportsTo=" + + this.getReportsTo() + ")"; + } + + public static class EmployeeBuilder { + + private int id; + private String name; + private String reportsTo; + + public EmployeeBuilder id(int id) { + this.id = id; + return this; + } + + public EmployeeBuilder name(String name) { + this.name = name; + return this; + } + + public EmployeeBuilder reportsTo(String reportsTo) { + this.reportsTo = reportsTo; + return this; + } + + public Employee build() { + return new Employee(id, name, reportsTo); + } + + public String toString() { + return "AggregationTests.Employee.EmployeeBuilder(id=" + this.id + ", name=" + this.name + ", reportsTo=" + + this.reportsTo + ")"; + } + } + } + + // DATAMONGO-1552 + static class Art { + + int id; + String title; + String artist; + Integer year; + double price; + + Art(int id, String title, String artist, Integer year, double price) { + + this.id = id; + this.title = title; + this.artist = artist; + this.year = year; + this.price = price; + } + + public static ArtBuilder builder() { + return new ArtBuilder(); + } + + public int getId() { + return this.id; + } + + public String getTitle() { + return this.title; + } + + public String getArtist() { + return this.artist; + } + + public Integer getYear() { + return this.year; + } + + public double getPrice() { + return this.price; + } + + public void setId(int id) { + this.id = id; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setArtist(String artist) { + this.artist = artist; + } + + public void setYear(Integer year) { + this.year = year; + } + + public void setPrice(double price) { + this.price = price; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Art art = (Art) o; + return id == art.id && Double.compare(art.price, price) == 0 && Objects.equals(title, art.title) + && Objects.equals(artist, art.artist) && Objects.equals(year, art.year); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, artist, year, price); + } + + public String toString() { + return "AggregationTests.Art(id=" + this.getId() + ", title=" + this.getTitle() + ", artist=" + this.getArtist() + + ", year=" + this.getYear() + ", price=" + this.getPrice() + ")"; + } + + public static class ArtBuilder { + + private int id; + private String title; + private String artist; + private Integer year; + private double price; + + public ArtBuilder id(int id) { + this.id = id; + return this; + } + + public ArtBuilder title(String title) { + this.title = title; + return this; + } + + public ArtBuilder artist(String artist) { + this.artist = artist; + return this; + } + + public ArtBuilder year(Integer year) { + this.year = year; + return this; + } + + public ArtBuilder price(double price) { + this.price = price; + return this; + } + + public Art build() { + return new Art(id, title, artist, year, price); + } + + public String toString() { + return "AggregationTests.Art.ArtBuilder(id=" + this.id + ", title=" + this.title + ", artist=" + this.artist + + ", year=" + this.year + ", price=" + this.price + ")"; + } + } + } + + static class WithComplexId { + @Id ComplexId id; + + public ComplexId getId() { + return this.id; + } + + public void setId(ComplexId id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithComplexId that = (WithComplexId) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + public String toString() { + return "AggregationTests.WithComplexId(id=" + this.getId() + ")"; + } + } + + static class ComplexId { + + String p1; + String p2; + + public String getP1() { + return this.p1; + } + + public String getP2() { + return this.p2; + } + + public void setP1(String p1) { + this.p1 = p1; + } + + public void setP2(String p2) { + this.p2 = p2; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComplexId complexId = (ComplexId) o; + return Objects.equals(p1, complexId.p1) && Objects.equals(p2, complexId.p2); + } + + @Override + public int hashCode() { + return Objects.hash(p1, p2); + } + + public String toString() { + return "AggregationTests.ComplexId(p1=" + this.getP1() + ", p2=" + this.getP2() + ")"; + } + } + + static enum MyEnum { + ONE, TWO + } + + static class WithEnum { + + @Id String id; + MyEnum enumValue; + + public WithEnum() {} + + public String getId() { + return this.id; + } + + public MyEnum getEnumValue() { + return this.enumValue; + } + + public void setId(String id) { + this.id = id; + } + + public void setEnumValue(MyEnum enumValue) { + this.enumValue = enumValue; + } + + public String toString() { + return "AggregationTests.WithEnum(id=" + this.getId() + ", enumValue=" + this.getEnumValue() + ")"; + } + } + + static class Widget { + + @Id String id; + List users; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public List getUsers() { + return users; + } + + public void setUsers(List users) { + this.users = users; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Widget widget = (Widget) o; + + if (!ObjectUtils.nullSafeEquals(id, widget.id)) { + return false; + } + return ObjectUtils.nullSafeEquals(users, widget.users); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + ObjectUtils.nullSafeHashCode(users); + return result; + } + } + + static class UserRef { + + @MongoId String id; + String name; + + public UserRef() {} + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UserRef userRef = (UserRef) o; + return Objects.equals(id, userRef.id) && Objects.equals(name, userRef.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "AggregationTests.UserRef(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java old mode 100644 new mode 100755 index 7c8d3d3dbd..066a80f82c --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,313 +15,681 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Map; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; +import org.springframework.data.mongodb.core.aggregation.ProjectionOperationUnitTests.BookWithFieldAnnotation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Criteria; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; +import com.mongodb.client.model.Aggregates; +import com.mongodb.client.model.Projections; /** * Unit tests for {@link Aggregation}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author Julia Lee */ public class AggregationUnitTests { - public @Rule ExpectedException exception = ExpectedException.none(); - - @Test(expected = IllegalArgumentException.class) - public void rejectsNullAggregationOperation() { - newAggregation((AggregationOperation[]) null); + @Test + void rejectsNullAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation((AggregationOperation[]) null)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullTypedAggregationOperation() { - newAggregation(String.class, (AggregationOperation[]) null); + @Test + void rejectsNullTypedAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(String.class, (AggregationOperation[]) null)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNoAggregationOperation() { - newAggregation(new AggregationOperation[0]); + @Test + void rejectsNoAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(new AggregationOperation[0])); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNoTypedAggregationOperation() { - newAggregation(String.class, new AggregationOperation[0]); + @Test + void rejectsNoTypedAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(String.class, new AggregationOperation[0])); } - /** - * @see DATAMONGO-753 - */ - @Test - public void checkForCorrectFieldScopeTransfer() { + @Test // DATAMONGO-753 + void checkForCorrectFieldScopeTransfer() { - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Invalid reference"); - exception.expectMessage("'b'"); + assertThatIllegalArgumentException().isThrownBy(() -> { + newAggregation( // + project("a", "b"), // + group("a").count().as("cnt"), // a was introduced to the context by the project operation + project("cnt", "b") // b was removed from the context by the group operation + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); // -> triggers IllegalArgumentException + }); + } + + @Test // DATAMONGO-753 + void unwindOperationShouldNotChangeAvailableFields() { newAggregation( // project("a", "b"), // - group("a").count().as("cnt"), // a was introduced to the context by the project operation - project("cnt", "b") // b was removed from the context by the group operation - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); // -> triggers IllegalArgumentException + unwind("a"), // + project("a", "b") // b should still be available + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); } - /** - * @see DATAMONGO-753 - */ - @Test - public void unwindOperationShouldNotChangeAvailableFields() { + @Test // DATAMONGO-1391 + void unwindOperationWithIndexShouldPreserveFields() { newAggregation( // project("a", "b"), // - unwind("a"), // + unwind("a", "x"), // project("a", "b") // b should still be available - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); } - /** - * @see DATAMONGO-753 - */ - @Test - public void matchOperationShouldNotChangeAvailableFields() { + @Test // DATAMONGO-1391 + void unwindOperationWithIndexShouldAddIndexField() { + + newAggregation( // + project("a", "b"), // + unwind("a", "x"), // + project("a", "x") // b should still be available + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + } + + @Test // DATAMONGO-1391 + void fullUnwindOperationShouldBuildCorrectClause() { + + Document agg = newAggregation( // + unwind("a", "x", true)).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + @SuppressWarnings("unchecked") + Document unwind = ((List) agg.get("pipeline")).get(0); + assertThat(unwind.get("$unwind", Document.class)). // + containsEntry("includeArrayIndex", "x"). // + containsEntry("preserveNullAndEmptyArrays", true); + } + + @Test // DATAMONGO-1391 + void unwindOperationWithPreserveNullShouldBuildCorrectClause() { + + Document agg = newAggregation( // + unwind("a", true)).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + @SuppressWarnings("unchecked") + Document unwind = ((List) agg.get("pipeline")).get(0); + assertThat(unwind) // + .doesNotContainKey("$unwind.includeArrayIndex") // + .containsEntry("$unwind.preserveNullAndEmptyArrays", true); + } + + @Test // DATAMONGO-1550 + void replaceRootOperationShouldBuildCorrectClause() { + + Document agg = newAggregation( // + replaceRoot().withDocument().andValue("value").as("field")) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + @SuppressWarnings("unchecked") + Document unwind = ((List) agg.get("pipeline")).get(0); + assertThat(unwind).containsEntry("$replaceRoot.newRoot", new Document("field", "value")); + } + + @Test // DATAMONGO-753 + void matchOperationShouldNotChangeAvailableFields() { newAggregation( // project("a", "b"), // match(where("a").gte(1)), // project("a", "b") // b should still be available - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); } - /** - * @see DATAMONGO-788 - */ - @Test - public void referencesToGroupIdsShouldBeRenderedAsReferences() { + @Test // DATAMONGO-788 + void referencesToGroupIdsShouldBeRenderedAsReferences() { - DBObject agg = newAggregation( // + Document agg = newAggregation( // project("a"), // group("a").count().as("aCnt"), // project("aCnt", "a") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @SuppressWarnings("unchecked") - DBObject secondProjection = ((List) agg.get("pipeline")).get(2); - DBObject fields = getAsDBObject(secondProjection, "$project"); - assertThat(fields.get("aCnt"), is((Object) 1)); - assertThat(fields.get("a"), is((Object) "$_id.a")); + Document secondProjection = ((List) agg.get("pipeline")).get(2); + Document fields = getAsDocument(secondProjection, "$project"); + assertThat(fields.get("aCnt")).isEqualTo(1); + assertThat(fields.get("a")).isEqualTo("$_id.a"); } - /** - * @see DATAMONGO-791 - */ - @Test - public void allowAggregationOperationsToBePassedAsIterable() { + @Test // DATAMONGO-791 + void allowAggregationOperationsToBePassedAsIterable() { List ops = new ArrayList(); ops.add(project("a")); ops.add(group("a").count().as("aCnt")); ops.add(project("aCnt", "a")); - DBObject agg = newAggregation(ops).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + Document agg = newAggregation(ops).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @SuppressWarnings("unchecked") - DBObject secondProjection = ((List) agg.get("pipeline")).get(2); - DBObject fields = getAsDBObject(secondProjection, "$project"); - assertThat(fields.get("aCnt"), is((Object) 1)); - assertThat(fields.get("a"), is((Object) "$_id.a")); + Document secondProjection = ((List) agg.get("pipeline")).get(2); + Document fields = getAsDocument(secondProjection, "$project"); + assertThat(fields.get("aCnt")).isEqualTo(1); + assertThat(fields.get("a")).isEqualTo("$_id.a"); } - /** - * @see DATAMONGO-791 - */ - @Test - public void allowTypedAggregationOperationsToBePassedAsIterable() { + @Test // DATAMONGO-791 + void allowTypedAggregationOperationsToBePassedAsIterable() { - List ops = new ArrayList(); + List ops = new ArrayList<>(); ops.add(project("a")); ops.add(group("a").count().as("aCnt")); ops.add(project("aCnt", "a")); - DBObject agg = newAggregation(DBObject.class, ops).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + Document agg = newAggregation(Document.class, ops).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @SuppressWarnings("unchecked") - DBObject secondProjection = ((List) agg.get("pipeline")).get(2); - DBObject fields = getAsDBObject(secondProjection, "$project"); - assertThat(fields.get("aCnt"), is((Object) 1)); - assertThat(fields.get("a"), is((Object) "$_id.a")); + Document secondProjection = ((List) agg.get("pipeline")).get(2); + Document fields = getAsDocument(secondProjection, "$project"); + assertThat(fields.get("aCnt")).isEqualTo((Object) 1); + assertThat(fields.get("a")).isEqualTo((Object) "$_id.a"); } - /** - * @see DATAMONGO-838 - */ - @Test - public void expressionBasedFieldsShouldBeReferencableInFollowingOperations() { + @Test // DATAMONGO-838 + void expressionBasedFieldsShouldBeReferencableInFollowingOperations() { - DBObject agg = newAggregation( // + Document agg = newAggregation( // project("a").andExpression("b+c").as("foo"), // group("a").sum("foo").as("foosum") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @SuppressWarnings("unchecked") - DBObject secondProjection = ((List) agg.get("pipeline")).get(1); - DBObject fields = getAsDBObject(secondProjection, "$group"); - assertThat(fields.get("foosum"), is((Object) new BasicDBObject("$sum", "$foo"))); + Document secondProjection = ((List) agg.get("pipeline")).get(1); + Document fields = getAsDocument(secondProjection, "$group"); + assertThat(fields.get("foosum")).isEqualTo(new Document("$sum", "$foo")); } - /** - * @see DATAMONGO-908 - */ - @Test - public void shouldSupportReferingToNestedPropertiesInGroupOperation() { + @Test // DATAMONGO-908 + void shouldSupportReferingToNestedPropertiesInGroupOperation() { - DBObject agg = newAggregation( // + Document agg = newAggregation( // project("cmsParameterId", "rules"), // unwind("rules"), // group("cmsParameterId", "rules.ruleType").count().as("totol") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - assertThat(agg, is(notNullValue())); + assertThat(agg).isNotNull(); - DBObject group = ((List) agg.get("pipeline")).get(2); - DBObject fields = getAsDBObject(group, "$group"); - DBObject id = getAsDBObject(fields, "_id"); + Document group = ((List) agg.get("pipeline")).get(2); + Document fields = getAsDocument(group, "$group"); + Document id = getAsDocument(fields, "_id"); - assertThat(id.get("ruleType"), is((Object) "$rules.ruleType")); + assertThat(id.get("ruleType")).isEqualTo("$rules.ruleType"); } - /** - * @see DATAMONGO-924 - */ - @Test - public void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFieldTarget() { + @Test // DATAMONGO-1585 + void shouldSupportSortingBySyntheticAndExposedGroupFields() { + + Document agg = newAggregation( // + group("cmsParameterId").addToSet("title").as("titles"), // + sort(Direction.ASC, "cmsParameterId", "titles") // + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isNotNull(); + + Document sort = ((List) agg.get("pipeline")).get(1); + + assertThat(getAsDocument(sort, "$sort")) + .isEqualTo(Document.parse("{ \"_id.cmsParameterId\" : 1 , \"titles\" : 1}")); + } + + @Test // DATAMONGO-1585 + void shouldSupportSortingByProjectedFields() { + + Document agg = newAggregation( // + project("cmsParameterId") // + .and(SystemVariable.CURRENT + ".titles").as("titles") // + .and("field").as("alias"), // + sort(Direction.ASC, "cmsParameterId", "titles", "alias") // + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - DBObject agg = newAggregation( // + assertThat(agg).isNotNull(); + + Document sort = ((List) agg.get("pipeline")).get(1); + + assertThat(getAsDocument(sort, "$sort")).containsEntry("cmsParameterId", 1) // + .containsEntry("titles", 1) // + .containsEntry("alias", 1); + } + + @Test // DATAMONGO-924 + void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFieldTarget() { + + Document agg = newAggregation( // project().and("foo.bar").as("ba") // , project().and("ba").as("b") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - DBObject projection0 = extractPipelineElement(agg, 0, "$project"); - assertThat(projection0, is((DBObject) new BasicDBObject("ba", "$foo.bar"))); + Document projection0 = extractPipelineElement(agg, 0, "$project"); + assertThat(projection0).isEqualTo(new Document("ba", "$foo.bar")); - DBObject projection1 = extractPipelineElement(agg, 1, "$project"); - assertThat(projection1, is((DBObject) new BasicDBObject("b", "$ba"))); + Document projection1 = extractPipelineElement(agg, 1, "$project"); + assertThat(projection1).isEqualTo(new Document("b", "$ba")); } - /** - * @see DATAMONGO-960 - */ - @Test - public void shouldRenderAggregationWithDefaultOptionsCorrectly() { + @Test // DATAMONGO-960 + void shouldRenderAggregationWithDefaultOptionsCorrectly() { - DBObject agg = newAggregation( // + Document agg = newAggregation( // project().and("a").as("aa") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - assertThat(agg.toString(), - is("{ \"aggregate\" : \"foo\" , \"pipeline\" : [ { \"$project\" : { \"aa\" : \"$a\"}}]}")); + assertThat(agg).isEqualTo( + Document.parse("{ \"aggregate\" : \"foo\" , \"pipeline\" : [ { \"$project\" : { \"aa\" : \"$a\"}}]}")); } - /** - * @see DATAMONGO-960 - */ - @Test - public void shouldRenderAggregationWithCustomOptionsCorrectly() { + @Test // DATAMONGO-960 + void shouldRenderAggregationWithCustomOptionsCorrectly() { - AggregationOptions aggregationOptions = newAggregationOptions().explain(true).cursor(new BasicDBObject("foo", 1)) + AggregationOptions aggregationOptions = newAggregationOptions().explain(true).cursor(new Document("foo", 1)) .allowDiskUse(true).build(); - DBObject agg = newAggregation( // + Document agg = newAggregation( // project().and("a").as("aa") // ) // - .withOptions(aggregationOptions) // - .toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + .withOptions(aggregationOptions) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); - assertThat(agg.toString(), is("{ \"aggregate\" : \"foo\" , " // + assertThat(agg).isEqualTo(Document.parse("{ \"aggregate\" : \"foo\" , " // + "\"pipeline\" : [ { \"$project\" : { \"aa\" : \"$a\"}}] , " // + "\"allowDiskUse\" : true , " // + "\"explain\" : true , " // - + "\"cursor\" : { \"foo\" : 1}}" // - )); + + "\"cursor\" : { \"foo\" : 1}}") // + ); } - /** - * @see DATAMONGO-954 - */ - @Test - public void shouldSupportReferencingSystemVariables() { + @Test // DATAMONGO-954, DATAMONGO-1585 + void shouldSupportReferencingSystemVariables() { - DBObject agg = newAggregation( // + Document agg = newAggregation( // project("someKey") // .and("a").as("a1") // .and(Aggregation.CURRENT + ".a").as("a2") // - , sort(Direction.DESC, "a") // + , sort(Direction.DESC, "a1") // , group("someKey").first(Aggregation.ROOT).as("doc") // - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - DBObject projection0 = extractPipelineElement(agg, 0, "$project"); - assertThat(projection0, is((DBObject) new BasicDBObject("someKey", 1).append("a1", "$a") - .append("a2", "$$CURRENT.a"))); + Document projection0 = extractPipelineElement(agg, 0, "$project"); + assertThat(projection0).isEqualTo(new Document("someKey", 1).append("a1", "$a").append("a2", "$$CURRENT.a")); - DBObject sort = extractPipelineElement(agg, 1, "$sort"); - assertThat(sort, is((DBObject) new BasicDBObject("a", -1))); + Document sort = extractPipelineElement(agg, 1, "$sort"); + assertThat(sort).isEqualTo(new Document("a1", -1)); - DBObject group = extractPipelineElement(agg, 2, "$group"); - assertThat(group, - is((DBObject) new BasicDBObject("_id", "$someKey").append("doc", new BasicDBObject("$first", "$$ROOT")))); + Document group = extractPipelineElement(agg, 2, "$group"); + assertThat(group).isEqualTo(new Document("_id", "$someKey").append("doc", new Document("$first", "$$ROOT"))); } - /** - * @see DATAMONGO-1254 - */ - @Test - public void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() { + @Test // DATAMONGO-1254 + void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() { - DBObject agg = Aggregation.newAggregation(// + Document agg = Aggregation.newAggregation(// project("date") // .and("tags").minus(10).as("tags_count")// , group("date")// .sum("tags_count").as("count")// - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); - DBObject group = extractPipelineElement(agg, 1, "$group"); - assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get())); + Document group = extractPipelineElement(agg, 1, "$group"); + assertThat(getAsDocument(group, "count")).isEqualTo(new Document().append("$sum", "$tags_count")); } - /** - * @see DATAMONGO-1254 - */ - @Test - public void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() { + @Test // DATAMONGO-1254 + void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() { - DBObject agg = Aggregation.newAggregation(// + Document agg = Aggregation.newAggregation(// project("date") // .andExpression("tags-10")// , group("date")// .sum("tags_count").as("count")// - ).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document group = extractPipelineElement(agg, 1, "$group"); + assertThat(getAsDocument(group, "count")).isEqualTo(new Document().append("$sum", "$tags_count")); + } + + @Test // DATAMONGO-861 + void conditionExpressionBasedFieldsShouldBeReferencableInFollowingOperations() { + + Document agg = newAggregation( // + project("a", "answer"), // + group("a") + .first(Cond.newBuilder().when(Criteria.where("a").gte(42)).thenValueOf("answer").otherwise("no-answer")) + .as("foosum") // + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + @SuppressWarnings("unchecked") + Document secondProjection = ((List) agg.get("pipeline")).get(1); + Document fields = getAsDocument(secondProjection, "$group"); + assertThat(getAsDocument(fields, "foosum")).containsKey("$first"); + assertThat(getAsDocument(fields, "foosum")).containsEntry("$first.$cond.then", "$answer"); + assertThat(getAsDocument(fields, "foosum")).containsEntry("$first.$cond.else", "no-answer"); + } + + @Test // DATAMONGO-861 + void shouldRenderProjectionConditionalExpressionCorrectly() { + + Document agg = Aggregation.newAggregation(// + project().and(ConditionalOperators.Cond.newBuilder() // + .when("isYellow") // + .then("bright") // + .otherwise("dark")).as("color")) + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 0, "$project"); + Document expectedCondition = new Document() // + .append("if", "$isYellow") // + .append("then", "bright") // + .append("else", "dark"); + + assertThat(getAsDocument(project, "color")).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861 + void shouldRenderProjectionConditionalCorrectly() { + + Document agg = Aggregation.newAggregation(// + project().and("color").applyCondition(ConditionalOperators.Cond.newBuilder() // + .when("isYellow") // + .then("bright") // + .otherwise("dark"))) + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 0, "$project"); + Document expectedCondition = new Document() // + .append("if", "$isYellow") // + .append("then", "bright") // + .append("else", "dark"); + + assertThat(getAsDocument(project, "color")).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861 + void shouldRenderProjectionConditionalWithCriteriaCorrectly() { - DBObject group = extractPipelineElement(agg, 1, "$group"); - assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get())); + Document agg = Aggregation.newAggregation(project()// + .and("color")// + .applyCondition(ConditionalOperators.Cond.newBuilder().when(Criteria.where("key").gt(5)) // + .then("bright").otherwise("dark"))) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 0, "$project"); + Document expectedCondition = new Document() // + .append("if", new Document("$gt", Arrays. asList("$key", 5))) // + .append("then", "bright") // + .append("else", "dark"); + + assertThat(getAsDocument(project, "color")).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861, DATAMONGO-2242 + void referencingProjectionAliasesShouldRenderProjectionConditionalWithFieldReferenceCorrectly() { + + Document agg = Aggregation.newAggregation(// + project().and("color").as("chroma"), project().and("luminosity") // + .applyCondition(ConditionalOperators // + .when("chroma") // + .then("bright") // + .otherwise("dark"))) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 1, "$project"); + Document expectedCondition = new Document() // + .append("if", "$chroma") // + .append("then", "bright") // + .append("else", "dark"); + + assertThat(getAsDocument(project, "luminosity")).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861 + void referencingProjectionAliasesShouldRenderProjectionConditionalWithCriteriaReferenceCorrectly() { + + Document agg = Aggregation.newAggregation(// + project().and("color").as("chroma"), project().and("luminosity") // + .applyCondition(ConditionalOperators.Cond.newBuilder().when(Criteria.where("chroma") // + .is(100)) // + .then("bright").otherwise("dark"))) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 1, "$project"); + Document expectedCondition = new Document() // + .append("if", new Document("$eq", Arrays. asList("$chroma", 100))) // + .append("then", "bright") // + .append("else", "dark"); + + assertThat(getAsDocument(project, "luminosity")).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861 + void shouldRenderProjectionIfNullWithFieldReferenceCorrectly() { + + Document agg = Aggregation.newAggregation(// + project().and("color"), // + project().and("luminosity") // + .applyCondition(ConditionalOperators // + .ifNull("chroma") // + .then("unknown"))) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 1, "$project"); + + assertThat(getAsDocument(project, "luminosity")).containsEntry("$ifNull", Arrays.asList("$chroma", "unknown")); + } + + @Test // DATAMONGO-861 + void shouldRenderProjectionIfNullWithFallbackFieldReferenceCorrectly() { + + Document agg = Aggregation.newAggregation(// + project("fallback").and("color").as("chroma"), project().and("luminosity") // + .applyCondition(ConditionalOperators.ifNull("chroma") // + .thenValueOf("fallback"))) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 1, "$project"); + + assertThat(getAsDocument(project, "luminosity")).containsEntry("$ifNull", Arrays.asList("$chroma", "$fallback")); + } + + @Test // DATAMONGO-1552 + void shouldHonorDefaultCountField() { + + Document agg = Aggregation.newAggregation(// + bucket("year"), // + project("count")) // + .toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + Document project = extractPipelineElement(agg, 1, "$project"); + + assertThat(project).containsEntry("count", 1); + } + + @Test // DATAMONGO-1533 + void groupOperationShouldAllowUsageOfDerivedSpELAggregationExpression() { + + Document agg = newAggregation( // + project("a"), // + group("a").first(AggregationSpELExpression.expressionOf("cond(a >= 42, 'answer', 'no-answer')")).as("foosum") // + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); + + @SuppressWarnings("unchecked") + Document secondProjection = ((List) agg.get("pipeline")).get(1); + Document fields = getAsDocument(secondProjection, "$group"); + assertThat(getAsDocument(fields, "foosum")).containsKey("$first"); + assertThat(getAsDocument(fields, "foosum")).containsEntry("$first.$cond.if", + new Document("$gte", Arrays.asList("$a", 42))); + assertThat(getAsDocument(fields, "foosum")).containsEntry("$first.$cond.then", "answer"); + assertThat(getAsDocument(fields, "foosum")).containsEntry("$first.$cond.else", "no-answer"); + } + + @Test // DATAMONGO-1756 + void projectOperationShouldRenderNestedFieldNamesCorrectly() { + + Document agg = newAggregation(project().and("value1.value").plus("value2.value").as("val")).toDocument("collection", + Aggregation.DEFAULT_CONTEXT); + + Document expected = new Document("val", new Document("$add", Arrays.asList("$value1.value", "$value2.value"))); + assertThat(extractPipelineElement(agg, 0, "$project")).isEqualTo(expected); + } + + @Test // DATAMONGO-1871 + void providedAliasShouldAllowNestingExpressionWithAliasCorrectly() { + + Document condition = new Document("$and", + Arrays.asList(new Document("$gte", Arrays.asList("$$est.dt", "2015-12-29")), // + new Document("$lte", Arrays.asList("$$est.dt", "2017-12-29")) // + )); + + Aggregation agg = newAggregation(project("_id", "dId", "aId", "cty", "cat", "plts.plt") + .and(ArrayOperators.arrayOf("plts.ests").filter().as("est").by(condition)).as("plts.ests")); + + Document $project = extractPipelineElement(agg.toDocument("collection-1", Aggregation.DEFAULT_CONTEXT), 0, + "$project"); + + assertThat($project).containsKey("plts.ests"); + } + + @Test // DATAMONGO-2377 + void shouldAllowInternalThisAndValueReferences() { + + Document untyped = newAggregation( // + Arrays.asList( // + (group("uid", "data.sourceId") // + .push("data.attributeRecords").as("attributeRecordArrays")), // + (project() // + .and(ArrayOperators.arrayOf("attributeRecordArrays") // + .reduce(ArrayOperators.arrayOf("$$value").concat("$$this")) // + .startingWith(Collections.emptyList())) // + .as("attributeRecordArrays")) // + )).toDocument("collection-1", DEFAULT_CONTEXT); + + assertThat(extractPipelineElement(untyped, 1, "$project")).isEqualTo(Document.parse( + "{\"attributeRecordArrays\": {\"$reduce\": {\"input\": \"$attributeRecordArrays\", \"initialValue\": [], \"in\": {\"$concatArrays\": [\"$$value\", \"$$this\"]}}}}")); + } + + @Test // DATAMONGO-2644 + void projectOnIdIsAlwaysValid() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + Document target = new Aggregation(bucket("start"), project("_id")).toDocument("collection-1", + new RelaxedTypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(extractPipelineElement(target, 1, "$project")).isEqualTo(Document.parse(" { \"_id\" : 1 }")); + } + + @Test // GH-3898 + void shouldNotConvertIncludeExcludeValuesForProjectOperation() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext( + WithRetypedIdField.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + Document document = project(WithRetypedIdField.class).toDocument(context); + assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1).append("entries", 1))); + } + + @Test // GH-4038 + void createsBasicAggregationOperationFromJsonString() { + + AggregationOperation stage = stage("{ $project : { name : 1} }"); + Document target = newAggregation(stage).toDocument("col-1", DEFAULT_CONTEXT); + assertThat(extractPipelineElement(target, 0, "$project")).containsEntry("name", 1); + } + + @Test // GH-4038 + void createsBasicAggregationOperationFromBson() { + + AggregationOperation stage = stage(Aggregates.project(Projections.fields(Projections.include("name")))); + Document target = newAggregation(stage).toDocument("col-1", DEFAULT_CONTEXT); + assertThat(extractPipelineElement(target, 0, "$project")).containsKey("name"); + } + + @Test // GH-3917 + void inheritedFieldsExposingContextShouldNotFailOnUnknownFieldReferenceForRelaxedRootContext() { + + List aggregationOperations = new ArrayList<>(); + + GroupOperation groupOperation = Aggregation.group("_id", "label_name"); + aggregationOperations.add(groupOperation); + + ProjectionOperation projectionOperation = Aggregation.project("label_name").andExclude("_id"); + aggregationOperations.add(projectionOperation); + + Sort sort = Sort.by(Sort.Direction.DESC, "serial_number"); + SortOperation sortOperation = new SortOperation(sort).and(Sort.Direction.DESC, "label_name"); + aggregationOperations.add(sortOperation); + + MongoMappingContext mappingContext = new MongoMappingContext(); + QueryMapper queryMapper = new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + + List documents = newAggregation(City.class, aggregationOperations).toPipeline(new RelaxedTypeBasedAggregationOperationContext(City.class, mappingContext, queryMapper)); + assertThat(documents.get(2)).isEqualTo("{ $sort : { 'serial_number' : -1, 'label_name' : -1 } }"); } - private DBObject extractPipelineElement(DBObject agg, int index, String operation) { + @Test // GH-4443 + void fieldsExposingContextShouldUseCustomFieldNameFromRelaxedRootContext() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext( + WithRetypedIdField.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + TypedAggregation agg = newAggregation(WithRetypedIdField.class, + unwind("entries"), match(where("foo").is("value 2"))); + List pipeline = agg.toPipeline(context); + + Document fields = getAsDocument(pipeline.get(1), "$match"); + assertThat(fields.get("renamed-field")).isEqualTo("value 2"); + } + + private Document extractPipelineElement(Document agg, int index, String operation) { + + List pipeline = (List) agg.get("pipeline"); + Object value = pipeline.get(index).get(operation); + if (value instanceof Document document) { + return document; + } + if (value instanceof Map map) { + return new Document(map); + } + throw new IllegalArgumentException(); + } + + public class WithRetypedIdField { + + @Id @org.springframework.data.mongodb.core.mapping.Field private String id; + + @org.springframework.data.mongodb.core.mapping.Field("renamed-field") private String foo; + + private List entries = new ArrayList<>(); - List pipeline = (List) agg.get("pipeline"); - return (DBObject) pipeline.get(index).get(operation); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java new file mode 100644 index 0000000000..60a6437f91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link AggregationUpdate}. + * + * @author Christoph Strobl + */ +public class AggregationUpdateUnitTests { + + @Test // DATAMONGO-2331 + public void createPipelineWithMultipleStages() { + + assertThat(AggregationUpdate.update() // + .set("stage-1").toValue("value-1") // + .unset("stage-2") // + .set("stage-3").toValue("value-3") // + .toPipeline(Aggregation.DEFAULT_CONTEXT)) // + .containsExactly(new Document("$set", new Document("stage-1", "value-1")), + new Document("$unset", "stage-2"), new Document("$set", new Document("stage-3", "value-3"))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java new file mode 100644 index 0000000000..7ebf7c2849 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +/** + * Unit tests for {@link AggregationVariable}. + * + * @author Christoph Strobl + */ +class AggregationVariableUnitTests { + + @Test // GH-4070 + void variableErrorsOnNullValue() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.variable(null)); + } + + @Test // GH-4070 + void createsVariable() { + + var variable = AggregationVariable.variable("$$now"); + + assertThat(variable.getTarget()).isEqualTo("$$now"); + assertThat(variable.isInternal()).isFalse(); + } + + @Test // GH-4070 + void prefixesVariableIfNeeded() { + + var variable = AggregationVariable.variable("this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + } + + @Test // GH-4070 + void localVariableErrorsOnNullValue() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.localVariable(null)); + } + + @Test // GH-4070 + void localVariable() { + + var variable = AggregationVariable.localVariable("$$this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + assertThat(variable.isInternal()).isTrue(); + } + + @Test // GH-4070 + void prefixesLocalVariableIfNeeded() { + + var variable = AggregationVariable.localVariable("this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + } + + @Test // GH-4070 + void isVariableReturnsTrueForAggregationVariableTypes() { + + var variable = Mockito.mock(AggregationVariable.class); + + assertThat(AggregationVariable.isVariable(variable)).isTrue(); + } + + @Test // GH-4070 + void isVariableReturnsTrueForFieldThatTargetsVariable() { + + var variable = Fields.field("value", "$$this"); + + assertThat(AggregationVariable.isVariable(variable)).isTrue(); + } + + @Test // GH-4070 + void isVariableReturnsFalseForFieldThatDontTargetsVariable() { + + var variable = Fields.field("value", "$this"); + + assertThat(AggregationVariable.isVariable(variable)).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java new file mode 100644 index 0000000000..381ddb45a1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -0,0 +1,208 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ArithmeticOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Divya Srivastava + */ +class ArithmeticOperatorsUnitTests { + + @Test // DATAMONGO-2370 + void roundShouldWithoutPlace() { + + assertThat(valueOf("field").round().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Collections.singletonList("$field"))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlace() { + + assertThat(valueOf("field").roundToPlace(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", 3))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlaceFromField() { + + assertThat(valueOf("field").round().placeOf("my-field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", "$my-field"))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlaceFromExpression() { + + assertThat(valueOf("field").round().placeOf((ctx -> new Document("$first", "$source"))) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", new Document("$first", "$source")))); + } + + @Test // GH-3716 + void rendersDerivativeCorrectly() { + + assertThat( + valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $derivative: { input: \"$miles\", unit: \"hour\" } }"); + } + + @Test // GH-3721 + void rendersIntegral() { + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\" } }"); + } + + @Test // GH-3721 + void rendersIntegralWithUnit() { + assertThat(valueOf("kilowatts").integral(SetWindowFieldsOperation.WindowUnits.HOUR) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); + } + + @Test // GH-3728 + void rendersSin() { + + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sin : \"$angle\" }"); + } + + @Test // GH-3728 + void rendersSinWithValueInDegrees() { + + assertThat(valueOf("angle").sin(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sin : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3728 + void rendersSinh() { + + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sinh : \"$angle\" }"); + } + + @Test // GH-3728 + void rendersSinhWithValueInDegrees() { + + assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3708 + void rendersASin() { + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asin : \"$field\" }"); + } + + @Test // GH-3708 + void rendersASinh() { + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asinh : \"$field\" }"); + } + + @Test // GH-3710 + void rendersCos() { + + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cos : \"$angle\" }"); + } + + @Test // GH-3710 + void rendersCosWithValueInDegrees() { + + assertThat(valueOf("angle").cos(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cos : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3710 + void rendersCosh() { + + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cosh : \"$angle\" }"); + } + + @Test // GH-3707 + void rendersACos() { + assertThat(valueOf("field").acos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $acos : \"$field\" }"); + } + + @Test // GH-3707 + void rendersACosh() { + assertThat(valueOf("field").acosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $acosh : \"$field\" }"); + } + + @Test // GH-3710 + void rendersCoshWithValueInDegrees() { + + assertThat(valueOf("angle").cosh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cosh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3730 + void rendersTan() { + + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tan : \"$angle\" }"); + } + + @Test // GH-3730 + void rendersTanWithValueInDegrees() { + + assertThat(valueOf("angle").tan(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tan : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3730 + void rendersTanh() { + + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tanh : \"$angle\" }"); + } + + @Test // GH-3730 + void rendersTanhWithValueInDegrees() { + + assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3709 + void rendersATan() { + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atan : \"$field\" }"); + } + + @Test // GH-3709 + void rendersATan2() { + + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + } + + @Test // GH-3709 + void rendersATanh() { + + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atanh : \"$field\" }"); + } + + @Test // GH-3724 + void rendersRand() { + assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java new file mode 100644 index 0000000000..0ab5545f23 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java @@ -0,0 +1,205 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.ArrayToObject; + +/** + * Unit tests for {@link ArrayOperators} + * + * @author Christoph Strobl + * @author Shashank Sharma + * @author Divya Srivastava + * @currentRead Royal Assassin - Robin Hobb + */ +public class ArrayOperatorsUnitTests { + + static final List VALUE_LIST = Arrays.asList(1, "2", new Document("_id", 3)); + static final String VALUE_LIST_STRING = "[1, \"2\", { \"_id\" : 3 }]"; + static final String EXPRESSION_STRING = "{ \"$stablemaster\" : \"burrich\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2052 + public void toArrayWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("regal").toObject().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: \"$regal\" } "); + } + + @Test // DATAMONGO-2052 + public void toArrayWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).toObject().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: " + EXPRESSION_STRING + "} "); + } + + @Test // DATAMONGO-2052 + public void toArrayWithArgumentList() { + + List> source = new ArrayList<>(); + source.add(Arrays.asList("king", "shrewd")); + source.add(Arrays.asList("prince", "verity")); + + assertThat(ArrayToObject.arrayToObject(source).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: [ [ \"king\", \"shrewd\"], [ \"prince\", \"verity\" ] ] } "); + } + + @Test // DATAMONGO-2287 + public void arrayElementAtWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).elementAt(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayElemAt: [ " + VALUE_LIST_STRING + ", 1] } "); + } + + @Test // DATAMONGO-2287 + public void concatWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).concat("field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $concatArrays: [ " + VALUE_LIST_STRING + ", \"$field\"] } "); + } + + @Test // DATAMONGO-2287 + public void filterWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).filter().as("var").by(new Document()) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $filter: { \"input\" : " + VALUE_LIST_STRING + ", \"as\" : \"var\", \"cond\" : {} } } "); + } + + @Test // DATAMONGO-2287 + public void lengthWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).length().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $size: [ " + VALUE_LIST_STRING + "] } "); + } + + @Test // DATAMONGO-2287 + public void sliceWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).slice().itemCount(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $slice: [ " + VALUE_LIST_STRING + ", 3] } "); + } + + @Test // DATAMONGO-2287 + public void indexOfWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).indexOf("s1p").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $indexOfArray: [ " + VALUE_LIST_STRING + ", \"s1p\"] } "); + } + + @Test // DATAMONGO-2287 + public void reverseWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).reverse().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $reverseArray: [ " + VALUE_LIST_STRING + "] } "); + } + + @Test // DATAMONGO-2287 + public void zipWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).zipWith("field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $zip: { \"inputs\": [" + VALUE_LIST_STRING + ", \"$field\"]} } "); + } + + @Test // DATAMONGO-2287 + public void inWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).containsValue("$userName").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$in\" : [\"$userName\", " + VALUE_LIST_STRING + "] }"); + } + + @Test // GH-3694 + public void firstWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$first\" : " + VALUE_LIST_STRING + "}"); + } + + @Test // GH-3694 + public void firstWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$first\" : " + EXPRESSION_STRING + "}"); + } + + @Test // GH-3694 + public void firstWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("field").first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $first : \"$field\" }"); + } + + @Test // GH-3694 + public void lastWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$last\" : " + VALUE_LIST_STRING + "}"); + } + + @Test // GH-3694 + public void lastWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$last\" : " + EXPRESSION_STRING + "}"); + } + + @Test // GH-3694 + public void lastWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("field").last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $last : \"$field\" }"); + } + + @Test // GH-4139 + void sortByWithFieldRef() { + + assertThat(ArrayOperators.arrayOf("team").sort(Sort.by("name")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sortArray: { input: \"$team\", sortBy: { name: 1 } } }"); + } + + @Test // GH-4929 + public void sortArrayByValueAscending() { + + Document result = ArrayOperators.arrayOf("numbers").sort(Direction.ASC).toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(result).isEqualTo("{ $sortArray: { input: '$numbers', sortBy: 1 } }"); + } + + @Test // GH-4929 + public void sortArrayByValueDescending() { + + Document result = ArrayOperators.arrayOf("numbers").sort(Direction.DESC).toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(result).isEqualTo("{ $sortArray: { input: '$numbers', sortBy: -1 } }"); + } + + @Test // GH-4929 + void sortByWithDirection() { + + assertThat(ArrayOperators.arrayOf(List.of("a", "b", "d", "c")).sort(Direction.DESC) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sortArray: { input: [\"a\", \"b\", \"d\", \"c\"], sortBy: -1 } }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java new file mode 100644 index 0000000000..c5b73b6576 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoClientSettings; + +/** + * Unit tests for {@link BasicAggregationOperation}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class BasicAggregationOperationUnitTests { + + @Mock QueryMapper queryMapper; + @Mock MongoConverter converter; + + TypeBasedAggregationOperationContext ctx; + + @BeforeEach + void beforeEach() { + + // no field mapping though having a type based context + ctx = new TypeBasedAggregationOperationContext(Person.class, new MongoMappingContext(), queryMapper); + when(queryMapper.getConverter()).thenReturn(converter); + when(converter.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + } + + @Test // GH-4038 + void usesGivenDocumentAsIs() { + + Document source = new Document("value", 1); + assertThat(new BasicAggregationOperation(source).toDocument(ctx)).isSameAs(source); + } + + @Test // GH-4038 + void parsesJson() { + + Document source = new Document("value", 1); + assertThat(new BasicAggregationOperation(source.toJson()).toDocument(ctx)).isEqualTo(source); + } + + @Test // GH-4038 + void errorsOnInvalidValue() { + + BasicAggregationOperation agg = new BasicAggregationOperation(new Object()); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> agg.toDocument(ctx)); + } + + @Test // GH-4038 + void errorsOnNonJsonSting() { + + BasicAggregationOperation agg = new BasicAggregationOperation("#005BBB #FFD500"); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> agg.toDocument(ctx)); + } + + private static class Person { + + @Field("v-a-l-u-e") Object value; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java new file mode 100644 index 0000000000..af05e7bee0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; + +/** + * Unit tests for {@link BucketAutoOperation}. + * + * @author Mark Paluch + */ +public class BucketAutoOperationUnitTests { + + @Test // DATAMONGO-1552 + public void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new BucketAutoOperation((Field) null, 0)); + } + + @Test // DATAMONGO-1552 + public void rejectsNonPositiveIntegerNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new BucketAutoOperation(Fields.field("field"), 0)); + } + + @Test // DATAMONGO-1552 + public void shouldRenderBucketOutputExpressions() { + + BucketAutoOperation operation = Aggregation.bucketAuto("field", 5) // + .andOutputExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // + .andOutput("title").push().as("titles"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderEmptyAggregationExpression() { + assertThatIllegalStateException().isThrownBy(() -> bucket("groupby").andOutput("field").as("alias")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderBucketOutputOperators() { + + BucketAutoOperation operation = Aggregation.bucketAuto("field", 5) // + .andOutputCount().as("titles"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $sum: 1 } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderCorrectly() { + + Document agg = bucketAuto("field", 1).withBuckets(5).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $bucketAuto: { groupBy: \"$field\", buckets: 5 } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderGranulariy() { + + Document agg = bucketAuto("field", 1) // + .withGranularity(Granularities.E24) // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $bucketAuto: { buckets: 1, granularity: \"E24\", groupBy: \"$field\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumOperator() { + + BucketAutoOperation operation = bucketAuto("field", 5) // + .andOutput("score").sum().as("cummulated_score"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: \"$score\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumWithOwnOutputExpression() { + + BucketAutoOperation operation = bucketAuto("field", 5) // + .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }")); + } + + private static Document extractOutput(Document fromBucketClause) { + return getAsDocument(getAsDocument(fromBucketClause, "$bucketAuto"), "output"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java new file mode 100644 index 0000000000..36a943d1c1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java @@ -0,0 +1,209 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link BucketOperation}. + * + * @author Mark Paluch + */ +public class BucketOperationUnitTests { + + @Test // DATAMONGO-1552 + public void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new BucketOperation((Field) null)); + } + + @Test // DATAMONGO-1552 + public void shouldRenderBucketOutputExpressions() { + + BucketOperation operation = Aggregation.bucket("field") // + .andOutputExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // + .andOutput("title").push().as("titles"); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(dbObject)).isEqualTo(Document.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderEmptyAggregationExpression() { + assertThatIllegalStateException().isThrownBy(() -> bucket("groupby").andOutput("field").as("alias")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderBucketOutputOperators() { + + BucketOperation operation = Aggregation.bucket("field") // + .andOutputCount().as("titles"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $sum: 1 } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumAggregationExpression() { + + Document agg = bucket("field") // + .andOutput(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $bucket: { groupBy: \"$field\", boundaries: [], output : { quizTotal: { $sum: \"$quizzes\"} } } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderDefault() { + + Document agg = bucket("field").withDefaultBucket("default bucket").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $bucket: { groupBy: \"$field\", boundaries: [], default: \"default bucket\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderBoundaries() { + + Document agg = bucket("field") // + .withDefaultBucket("default bucket") // + .withBoundaries(0) // + .withBoundaries(10, 20).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $bucket: { boundaries: [0, 10, 20], default: \"default bucket\", groupBy: \"$field\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").sum().as("cummulated_score"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: \"$score\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumWithValueOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").sum(4).as("cummulated_score"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: 4 } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderAvgOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").avg().as("average"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ average : { $avg: \"$score\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderFirstOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").first().as("first_title"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ first_title : { $first: \"$title\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderLastOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").last().as("last_title"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ last_title : { $last: \"$title\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderMinOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("score").min().as("min_score"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ min_score : { $min: \"$score\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderPushOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").push().as("titles"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $push: \"$title\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderAddToSetOperator() { + + BucketOperation operation = bucket("field") // + .andOutput("title").addToSet().as("titles"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $addToSet: \"$title\" } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumWithExpression() { + + BucketOperation operation = bucket("field") // + .andOutputExpression("netPrice + tax").sum().as("total"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $sum: { $add : [\"$netPrice\", \"$tax\"]} } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderSumWithOwnOutputExpression() { + + BucketOperation operation = bucket("field") // + .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); + + Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }")); + } + + @Test // DATAMONGO-1552 + public void shouldExposeDefaultCountField() { + + BucketOperation operation = bucket("field"); + + assertThat(operation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(operation.getFields().getField("count")).isNotNull(); + } + + private static Document extractOutput(Document fromBucketClause) { + return (Document) ((Document) fromBucketClause.get("$bucket")).get("output"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java index dafd837c0f..7940f9e354 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java @@ -1,11 +1,73 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Objects; + +/** + * @author Thomas Darimont + * @author Mark Paluch + */ class City { String name; int population; + public City() {} + + public City(String name, int population) { + + this.name = name; + this.population = population; + } + public String toString() { return "City [name=" + name + ", population=" + population + "]"; } + + public String getName() { + return this.name; + } + + public int getPopulation() { + return this.population; + } + + public void setName(String name) { + this.name = name; + } + + public void setPopulation(int population) { + this.population = population; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + City city = (City) o; + return population == city.population && Objects.equals(name, city.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, population); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java new file mode 100644 index 0000000000..c11313176f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java @@ -0,0 +1,156 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link Cond}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +public class CondExpressionUnitTests { + + @Test // DATAMONGO-861 + void builderRejectsEmptyFieldName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when("")); + } + + @Test // DATAMONGO-861 + void builderRejectsNullFieldName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when((Document) null)); + } + + @Test // DATAMONGO-861 + void builderRejectsNullCriteriaName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when((Criteria) null)); + } + + @Test // DATAMONGO-861 + void builderRejectsBuilderAsThenValue() { + assertThatIllegalArgumentException().isThrownBy( + () -> newBuilder().when("isYellow").then(newBuilder().when("field").then("then-value")).otherwise("otherwise")); + } + + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void simpleBuilderShouldRenderCorrectly() { + + Cond operator = ConditionalOperators.when("isYellow").thenValueOf("bright").otherwise("dark"); + Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document expectedCondition = new Document() // + .append("if", "$isYellow") // + .append("then", "$bright") // + .append("else", "dark"); + + assertThat(document).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void simpleCriteriaShouldRenderCorrectly() { + + Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100)).thenValueOf("bright") + .otherwise("dark"); + Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document expectedCondition = new Document() // + .append("if", new Document("$gte", Arrays. asList("$luminosity", 100))) // + .append("then", "$bright") // + .append("else", "dark"); + + assertThat(document).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861, DATAMONGO-2242 + void andCriteriaShouldRenderCorrectly() { + + Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100) // + .andOperator(Criteria.where("hue").is(50), // + Criteria.where("saturation").lt(11))) + .thenValueOf("bright").otherwiseValueOf("dark-field"); + + Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document luminosity = new Document("$gte", Arrays. asList("$luminosity", 100)); + Document hue = new Document("$eq", Arrays. asList("$hue", 50)); + Document saturation = new Document("$lt", Arrays. asList("$saturation", 11)); + + Document expectedCondition = new Document() // + .append("if", Arrays. asList(luminosity, new Document("$and", Arrays.asList(hue, saturation)))) // + .append("then", "$bright") // + .append("else", "$dark-field"); + + assertThat(document).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void twoArgsCriteriaShouldRenderCorrectly() { + + Criteria criteria = Criteria.where("luminosity").gte(100) // + .and("saturation").and("chroma").is(200); + Cond operator = ConditionalOperators.when(criteria).thenValueOf("bright").otherwise("dark"); + + Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document gte = new Document("$gte", Arrays. asList("$luminosity", 100)); + Document is = new Document("$eq", Arrays. asList("$chroma", 200)); + + Document expectedCondition = new Document() // + .append("if", Arrays.asList(gte, is)) // + .append("then", "$bright") // + .append("else", "dark"); + + assertThat(document).containsEntry("$cond", expectedCondition); + } + + @Test // DATAMONGO-861, DATAMONGO-1542 + void nestedCriteriaShouldRenderCorrectly() { + + Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100)) // + .thenValueOf(newBuilder() // + .when(Criteria.where("luminosity").gte(200)) // + .then("verybright") // + .otherwise("not-so-bright")) // + .otherwise(newBuilder() // + .when(Criteria.where("luminosity").lt(50)) // + .then("very-dark") // + .otherwise("not-so-dark")); + + Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document trueCondition = new Document() // + .append("if", new Document("$gte", Arrays. asList("$luminosity", 200))) // + .append("then", "verybright") // + .append("else", "not-so-bright"); + + Document falseCondition = new Document() // + .append("if", new Document("$lt", Arrays. asList("$luminosity", 50))) // + .append("then", "very-dark") // + .append("else", "not-so-dark"); + + assertThat(document).containsEntry("$cond.then.$cond", trueCondition); + assertThat(document).containsEntry("$cond.else.$cond", falseCondition); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java new file mode 100644 index 0000000000..00f44194f6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ConditionalOperators}. + * + * @author Christoph Strobl + */ +class ConditionalOperatorsUnitTests { + + @Test // GH-3720 + void rendersIfNullWithMultipleConditionalValuesCorrectly() { + + assertThat(ifNull("description").orIfNull("quantity").then("Unspecified").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $ifNull: [ \"$description\", \"$quantity\", \"Unspecified\" ] }")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java new file mode 100644 index 0000000000..3600818a16 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java @@ -0,0 +1,232 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ConvertOperators}. + * + * @author Christoph Strobl + * @currentRead Royal Assassin - Robin Hobb + */ +public class ConvertOperatorsUnitTests { + + static final String EXPRESSION_STRING = "{ \"$molly\" : \"chandler\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2048 + public void convertToUsingStringIdentifier() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingIntIdentifier() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : 1 } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToTypeOf("fitz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"$fitz\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToTypeOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValue() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturn("foo") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : \"foo\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValueOfField() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturnValueOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : \"$verity\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValueOfExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturnValueOf(EXPRESSION) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : " + + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValue() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturn("foo") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : \"foo\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValueOfField() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturnValueOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : \"$verity\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValueOfExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturnValueOf(EXPRESSION) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void toBoolUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToBoolean().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toBool: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toBoolUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToBoolean().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toBool: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDateUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDate().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDate: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDateUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDate().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDate: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDecimalUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDecimal().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDecimal: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDecimalUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDecimal().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDecimal: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDoubleUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDouble().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDouble: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDoubleUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDouble().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDouble: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toIntUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToInt().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toInt: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toIntUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToInt().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toInt: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toLongUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToLong().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toLong: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toLongUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToLong().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toLong: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toObjectIdUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToObjectId().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toObjectId: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toObjectIdUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToObjectId().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toObjectId: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toStringUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toString: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toStringUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toString: " + EXPRESSION_STRING + " } ")); + } + + @Test // GH-3714 + void degreesToRadiansUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("angle_a").convertDegreesToRadians().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $degreesToRadians : \"$angle_a\"}")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java new file mode 100644 index 0000000000..eea05fbfaa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java @@ -0,0 +1,52 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link CountOperation}. + * + * @author Mark Paluch + */ +public class CountOperationUnitTests { + + @Test // DATAMONGO-1549 + public void rejectsEmptyFieldName() { + assertThatIllegalArgumentException().isThrownBy(() -> new CountOperation("")); + } + + @Test // DATAMONGO-1549 + public void shouldRenderCorrectly() { + + CountOperation countOperation = new CountOperation("field"); + assertThat(countOperation.toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{$count : \"field\" }")); + } + + @Test // DATAMONGO-1549 + public void countExposesFields() { + + CountOperation countOperation = new CountOperation("field"); + + assertThat(countOperation.getFields().exposesNoFields()).isFalse(); + assertThat(countOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(countOperation.getFields().getField("field")).isNotNull(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java index 1ea143d584..b1826fdb33 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java index 0ed5e1563e..a7d294e138 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java new file mode 100644 index 0000000000..2dd6e3beea --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -0,0 +1,147 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.time.DayOfWeek; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.TimeZone; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; + +/** + * Unit tests for {@link DateOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class DateOperatorsUnitTests { + + @Test // GH-3713 + void rendersDateAdd() { + + assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-3713 + void rendersDateAddWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); + } + + @Test // GH-4139 + void rendersDateSubtract() { + + assertThat(DateOperators.dateOf("purchaseDate").subtract(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-4139 + void rendersDateSubtractWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).subtract(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersDateDiff() { + + assertThat( + DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); + } + + @Test // GH-3713 + void rendersDateDiffWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) + .diffValueOf("delivered", DateOperators.TemporalUnit.from(ChronoUnit.DAYS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(ZoneOffset.ofHoursMinutes(3, 30)).getValue()).isEqualTo("+03:30"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("-06:00"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneId() { + assertThat(DateOperators.Timezone.fromZone(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("America/Chicago"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneId() { + assertThat(DateOperators.Timezone.fromZone(ZoneId.of("America/Chicago")).getValue()).isEqualTo("America/Chicago"); + } + + @Test // GH-4139 + void rendersDateTrunc() { + + assertThat(DateOperators.dateOf("purchaseDate").truncate("week").binSize(2).startOfWeek(DayOfWeek.MONDAY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateTrunc: { date: \"$purchaseDate\", unit: \"week\", binSize: 2, startOfWeek : \"monday\" } }"); + } + + @Test // GH-4139 + void rendersDateTruncWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).truncate("week").binSize(2).startOfWeek(DayOfWeek.MONDAY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateTrunc: { date: \"$purchaseDate\", unit: \"week\", binSize: 2, startOfWeek : \"monday\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-4139 + void rendersTsIncrement() { + + assertThat(DateOperators.dateOf("saleTimestamp").tsIncrement().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $tsIncrement: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void tsIncrementErrorsOnTimezone() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).tsIncrement()); + } + + @Test // GH-4139 + void rendersTsSecond() { + + assertThat(DateOperators.dateOf("saleTimestamp").tsSecond().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $tsSecond: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void tsSecondErrorsOnTimezone() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).tsSecond()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java new file mode 100644 index 0000000000..47176fd8ab --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Date; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DensifyOperation.DensifyUnits; +import org.springframework.data.mongodb.core.aggregation.DensifyOperation.Range; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link DensifyOperation}. + * + * @author Christoph Strobl + */ +class DensifyOperationUnitTests { + + @Test // GH-4139 + void rendersFieldNamesAsIsForUntypedContext() { + + DensifyOperation densify = DensifyOperation.builder().densify("ts") + .range(Range.bounded("2021-05-18T00:00:00", "2021-05-18T08:00:00").incrementBy(1).unit(DensifyUnits.HOUR)) + .build(); + + assertThat(densify.toDocument(contextFor(null))).isEqualTo(""" + { + $densify: { + field: "ts", + range: { + step: 1, + unit: "hour", + bounds:[ "2021-05-18T00:00:00", "2021-05-18T08:00:00" ] + } + } + } + """); + } + + @Test // GH-4139 + void rendersFieldNamesCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("ts") + .range(Range.bounded("2021-05-18T00:00:00", "2021-05-18T08:00:00").incrementBy(1).unit(DensifyUnits.HOUR)) + .build(); + + assertThat(densify.toDocument(contextFor(Weather.class))).isEqualTo(""" + { + $densify: { + field: "timestamp", + range: { + step: 1, + unit: "hour", + bounds:[ "2021-05-18T00:00:00", "2021-05-18T08:00:00" ] + } + } + } + """); + } + + @Test // GH-4139 + void rendersPartitonNamesCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("alt").partitionBy("var") + .fullRange(range -> range.incrementBy(200)).build(); + + assertThat(densify.toDocument(contextFor(Coffee.class))).isEqualTo(""" + { + $densify: { + field: "altitude", + partitionByFields : [ "variety" ], + range: { + step: 200, + bounds: "full" + } + } + } + """); + } + + @Test // GH-4139 + void rendersPartitionRangeCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("alt").partitionBy("var") + .partitionRange(range -> range.incrementBy(200)).build(); + + assertThat(densify.toDocument(contextFor(Coffee.class))).isEqualTo(""" + { + $densify: { + field: "altitude", + partitionByFields : [ "variety" ], + range: { + step: 200, + bounds: "partition" + } + } + } + """); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + class Weather { + + @Field("timestamp") Date ts; + + @Field("temp") Long temperature; + } + + class Coffee { + + @Field("altitude") Long alt; + + @Field("variety") String var; + + Float score; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java new file mode 100644 index 0000000000..c647e1c6c7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link DocumentOperators}. + * + * @author Christoph Strobl + */ +class DocumentOperatorsUnitTests { + + @Test // GH-3715 + void rendersRank() { + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $rank: { } }"); + } + + @Test // GH-3715 + void rendersDenseRank() { + assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $denseRank: { } }"); + } + + @Test // GH-3717 + void rendersDocumentNumber() { + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $documentNumber: { } }"); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java new file mode 100644 index 0000000000..60e6541281 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EvaluationOperators}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class EvaluationOperatorsUnitTests { + + @Test // GH-3790 + void shouldRenderExprCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").expr().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $expr: \"$foo\" }"); + } + + @Test // GH-4139 + void shouldRenderLocfCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").locf().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $locf: \"$foo\" }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java index 8672235a18..ee20b15291 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,42 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; /** * Unit tests for {@link ExposedFields}. - * + * * @author Oliver Gierke * @author Thomas Darimont */ public class ExposedFieldsUnitTests { - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFields() { - ExposedFields.from((ExposedField) null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.from((ExposedField) null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldsForSynthetics() { - ExposedFields.synthetic(null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.synthetic(null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldsForNonSynthetics() { - ExposedFields.nonSynthetic(null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.nonSynthetic(null)); } @Test public void exposesSingleField() { ExposedFields fields = ExposedFields.synthetic(Fields.fields("foo")); - assertThat(fields.exposesSingleFieldOnly(), is(true)); + assertThat(fields.exposesSingleFieldOnly()).isTrue(); fields = fields.and(new ExposedField("bar", true)); - assertThat(fields.exposesSingleFieldOnly(), is(false)); + assertThat(fields.exposesSingleFieldOnly()).isFalse(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java new file mode 100644 index 0000000000..2cfc941bda --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.bson.Document; +import org.junit.Test; + +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link FacetOperation}. + * + * @author Mark Paluch + * @author Jérôme Guyon + * @soundtrack Stanley Foort - You Make Me Believe In Magic (Extended Mix) + */ +public class FacetOperationUnitTests { + + @Test // DATAMONGO-1552 + public void shouldRenderCorrectly() { + + FacetOperation facetOperation = new FacetOperation().and(match(Criteria.where("price").exists(true)), // + bucket("price") // + .withBoundaries(0, 150, 200, 300, 400) // + .withDefaultBucket("Other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles")) // + .as("categorizedByPrice") // + .and(bucketAuto("year", 5)).as("categorizedByYears"); + + Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $facet: { categorizedByPrice: [" + "{ $match: { price: { $exists: true } } }, " + + "{ $bucket: { boundaries: [ 0, 150, 200, 300, 400 ], groupBy: \"$price\", default: \"Other\", " + + "output: { count: { $sum: 1 }, titles: { $push: \"$title\" } } } } ]," + + "categorizedByYears: [ { $bucketAuto: { buckets: 5, groupBy: \"$year\" } } ] } }")); + } + + @Test // DATAMONGO-1552 + public void shouldRenderEmpty() { + + FacetOperation facetOperation = facet(); + + Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $facet: { } }")); + } + + @Test(expected = IllegalArgumentException.class) // DATAMONGO-1552 + public void shouldRejectNonExistingFields() { + + FacetOperation facetOperation = new FacetOperation().and(project("price"), // + bucket("price") // + .withBoundaries(0, 150, 200, 300, 400) // + .withDefaultBucket("Other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles")) // + .as("categorizedByPrice"); + + Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $facet: { categorizedByPrice: [" + "{ $match: { price: { $exists: true } } }, " + + "{ $bucket: {boundaries: [ 0, 150, 200, 300, 400 ], groupBy: \"$price\", default: \"Other\", " + + "output: { count: { $sum: 1 }, titles: { $push: \"$title\" } } } } ]," + + "categorizedByYears: [ { $bucketAuto: { buckets: 5, groupBy: \"$year\" } } ] } }")); + } + + @Test // DATAMONGO-1552 + public void shouldHonorProjectedFields() { + + FacetOperation facetOperation = new FacetOperation().and(project("price").and("title").as("name"), // + bucketAuto("price", 5) // + .andOutput("name").push().as("titles")) // + .as("categorizedByPrice"); + + Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $facet: { categorizedByPrice: [" + + "{ $project: { price: 1, name: \"$title\" } }, " + "{ $bucketAuto: { buckets: 5, groupBy: \"$price\", " + + "output: { titles: { $push: \"$name\" } } } } ] } }")); + } + + @Test // DATAMONGO-1553 + public void shouldRenderSortByCountCorrectly() { + + FacetOperation facetOperation = new FacetOperation() // + .and(sortByCount("country")) // + .as("categorizedByCountry"); + + Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).containsEntry("$facet.categorizedByCountry.[0].$sortByCount", "$country"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java index 77cd200207..cfeb411387 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,29 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import org.hamcrest.Matchers; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.aggregation.Fields.*; /** * Unit tests for {@link Fields}. - * + * * @author Oliver Gierke * @author Thomas Darimont */ public class FieldsUnitTests { - @Rule public ExpectedException exception = ExpectedException.none(); - - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldVarArgs() { - Fields.from((Field[]) null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.from((Field[]) null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldNameVarArgs() { - Fields.fields((String[]) null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.fields((String[]) null)); } @Test @@ -55,19 +50,19 @@ public void createsFieldFromNameAndTarget() { verify(Fields.field("foo", "bar"), "foo", "bar"); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldName() { - Fields.field(null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field(null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldNameIfTargetGiven() { - Fields.field(null, "foo"); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field(null, "foo")); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsEmptyFieldName() { - Fields.field(""); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field("")); } @Test @@ -76,8 +71,8 @@ public void createsFieldsFromFieldInstances() { AggregationField reference = new AggregationField("foo"); Fields fields = Fields.from(reference); - assertThat(fields, is(Matchers. iterableWithSize(1))); - assertThat(fields, hasItem(reference)); + assertThat(fields).hasSize(1); + assertThat(fields).contains(reference); } @Test @@ -90,7 +85,7 @@ public void fieldsFactoryMethod() { Fields fields = fields("a", "b").and("c").and("d", "e"); - assertThat(fields, is(Matchers. iterableWithSize(4))); + assertThat(fields).hasSize(4); verify(fields.getField("a"), "a", null); verify(fields.getField("b"), "b", null); @@ -100,44 +95,39 @@ public void fieldsFactoryMethod() { @Test public void rejectsAmbiguousFieldNames() { - - exception.expect(IllegalArgumentException.class); - - fields("b", "a.b"); + assertThatIllegalArgumentException().isThrownBy(() -> fields("b", "a.b")); } - /** - * @see DATAMONGO-774 - */ - @Test + @Test // DATAMONGO-774 public void stripsLeadingDollarsFromName() { - assertThat(Fields.field("$name").getName(), is("name")); - assertThat(Fields.field("$$$$name").getName(), is("name")); + assertThat(Fields.field("$name").getName()).isEqualTo("name"); + assertThat(Fields.field("$$$$name").getName()).isEqualTo("name"); } - /** - * @see DATAMONGO-774 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-774 public void rejectsNameConsistingOfDollarOnly() { - Fields.field("$"); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field("$")); } - /** - * @see DATAMONGO-774 - */ - @Test + @Test // DATAMONGO-774 public void stripsLeadingDollarsFromTarget() { - assertThat(Fields.field("$target").getTarget(), is("target")); - assertThat(Fields.field("$$$$target").getTarget(), is("target")); + assertThat(Fields.field("$target").getTarget()).isEqualTo("target"); + assertThat(Fields.field("$$$$target").getTarget()).isEqualTo("target"); + } + + @Test // GH-4123 + public void keepsRawMappingToDbRefId() { + + assertThat(Fields.field("$id").getName()).isEqualTo("id"); + assertThat(Fields.field("person.$id").getTarget()).isEqualTo("person.$id"); } private static void verify(Field field, String name, String target) { - assertThat(field, is(notNullValue())); - assertThat(field.getName(), is(name)); - assertThat(field.getTarget(), is(target != null ? target : name)); + assertThat(field).isNotNull(); + assertThat(field.getName()).isEqualTo(name); + assertThat(field.getTarget()).isEqualTo(target != null ? target : name); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java new file mode 100644 index 0000000000..a5df9647a1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java @@ -0,0 +1,149 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.filter; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class FilterExpressionUnitTests { + + private AggregationOperationContext aggregationContext; + private MongoMappingContext mappingContext; + + @BeforeEach + void setUp() { + + mappingContext = new MongoMappingContext(); + aggregationContext = new TypeBasedAggregationOperationContext(Sales.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + } + + @Test // DATAMONGO-1491 + void shouldConstructFilterExpressionCorrectly() { + + TypedAggregation agg = Aggregation.newAggregation(Sales.class, + Aggregation.project() + .and(filter("items").as("item").by(ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100))) + .as("items")); + + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); + Document expected = Document.parse("{" + // + "input: \"$items\"," + // + "as: \"item\"," + // + "cond: { $gte: [ \"$$item.price\", 100 ] }" + // + "}"); + + assertThat($filter).isEqualTo(new Document(expected)); + } + + @Test // DATAMONGO-1491 + void shouldConstructFilterExpressionCorrectlyWhenUsingFilterOnProjectionBuilder() { + + TypedAggregation agg = Aggregation.newAggregation(Sales.class, Aggregation.project().and("items") + .filter("item", ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100)).as("items")); + + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); + Document expected = Document.parse("{" + // + "input: \"$items\"," + // + "as: \"item\"," + // + "cond: { $gte: [ \"$$item.price\", 100 ] }" + // + "}"); + + assertThat($filter).isEqualTo(expected); + } + + @Test // DATAMONGO-1491 + void shouldConstructFilterExpressionCorrectlyWhenInputMapToArray() { + + TypedAggregation agg = Aggregation.newAggregation(Sales.class, + Aggregation.project().and(filter(Arrays. asList(1, "a", 2, null, 3.1D, 4, "5")).as("num") + .by(ComparisonOperators.valueOf("num").greaterThanEqualToValue(3))).as("items")); + + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); + Document expected = Document.parse("{" + // + "input: [ 1, \"a\", 2, null, 3.1, 4, \"5\" ]," + // + "as: \"num\"," + // + "cond: { $gte: [ \"$$num\", 3 ] }" + // + "}"); + + assertThat($filter).isEqualTo(expected); + } + + @Test // DATAMONGO-2320 + void shouldConstructFilterExpressionCorrectlyWhenConditionContainsFieldReference() { + + Aggregation agg = Aggregation.newAggregation(Aggregation.project().and((ctx) -> new Document()).as("field-1") + .and(filter("items").as("item").by(ComparisonOperators.valueOf("item.price").greaterThan("field-1"))) + .as("items")); + + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", Aggregation.DEFAULT_CONTEXT)); + Document expected = Document.parse("{" + // + "input: \"$items\"," + // + "as: \"item\"," + // + "cond: { $gt: [ \"$$item.price\", \"$field-1\" ] }" + // + "}"); + + assertThat($filter).isEqualTo(new Document(expected)); + } + + @Test // GH-4394 + void filterShouldAcceptExpression() { + + Document $filter = ArrayOperators.arrayOf(ObjectOperators.valueOf("data.metadata").toArray()).filter().as("item") + .by(ComparisonOperators.valueOf("item.price").greaterThan("field-1")).toDocument(Aggregation.DEFAULT_CONTEXT); + + Document expected = Document.parse(""" + { $filter : { + input: { $objectToArray: "$data.metadata" }, + as: "item", + cond: { $gt: [ "$$item.price", "$field-1" ] } + }} + """); + + assertThat($filter).isEqualTo(expected); + } + + private Document extractFilterOperatorFromDocument(Document source) { + + List pipeline = DocumentTestUtils.getAsDBList(source, "pipeline"); + Document $project = DocumentTestUtils.getAsDocument((Document) pipeline.get(0), "$project"); + Document items = DocumentTestUtils.getAsDocument($project, "items"); + return DocumentTestUtils.getAsDocument(items, "$filter"); + } + + static class Sales { + + List items; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java index 95e2f13a73..9496a51c03 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,253 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; -import org.springframework.data.mongodb.core.query.NearQuery; +import java.util.Arrays; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Distance; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; /** * Unit tests for {@link GeoNearOperation}. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl */ public class GeoNearOperationUnitTests { - /** - * @see DATAMONGO-1127 - */ - @Test + @Test // DATAMONGO-1127 public void rendersNearQueryAsAggregationOperation() { NearQuery query = NearQuery.near(10.0, 10.0); GeoNearOperation operation = new GeoNearOperation(query, "distance"); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document nearClause = DocumentTestUtils.getAsDocument(document, "$geoNear"); + + Document expected = new Document(query.toDocument()).append("distanceField", "distance"); + assertThat(nearClause).isEqualTo(expected); + } + + @Test // DATAMONGO-2050 + public void rendersNearQueryWithKeyCorrectly() { + + NearQuery query = NearQuery.near(10.0, 10.0); + GeoNearOperation operation = new GeoNearOperation(query, "distance").useIndex("geo-index-1"); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(DocumentTestUtils.getAsDocument(document, "$geoNear")).containsEntry("key", "geo-index-1"); + } + + @Test // DATAMONGO-2264 + public void rendersMaxDistanceCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).maxDistance(new Distance(30.0)); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).maxDistance(30.0).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersMinDistanceCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).minDistance(new Distance(30.0)); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).minDistance(30.0).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersSphericalCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).spherical(true); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).spherical(true).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersDistanceMultiplier() { + + NearQuery query = NearQuery.near(10.0, 20.0).inKilometers(); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).spherical(true).distanceMultiplier(6378.137).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersIndexKey() { + + NearQuery query = NearQuery.near(10.0, 20.0); + + assertThat( + new GeoNearOperation(query, "distance").useIndex("index-1").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).key("index-1").doc()); + } + + @Test // DATAMONGO-2264 + public void rendersQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).query(Query.query(Criteria.where("city").is("Austin"))); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).query(new Document("city", "Austin")).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersMappedQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).query(Query.query(Criteria.where("city").is("Austin"))); + + assertThat( + new GeoNearOperation(query, "distance").toPipelineStages(typedAggregationOperationContext(GeoDocument.class))) + .containsExactly($geoNear().near(10.0, 20.0).query(new Document("ci-ty", "Austin")).doc()); + } - DBObject nearClause = DBObjectTestUtils.getAsDBObject(dbObject, "$geoNear"); + @Test // DATAMONGO-2264 + public void appliesSkipFromNearQuery() { - DBObject expected = (DBObject) new BasicDBObject(query.toDBObject().toMap()).append("distanceField", "distance"); - assertThat(nearClause, is(expected)); + NearQuery query = NearQuery.near(10.0, 20.0).skip(10L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$skip", 10L)); + } + + @Test // DATAMONGO-2264 + public void appliesLimitFromNearQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).limit(10L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$limit", 10L)); + } + + @Test // DATAMONGO-2264 + public void appliesSkipAndLimitInOrder() { + + NearQuery query = NearQuery.near(10.0, 20.0).limit(10L).skip(3L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$skip", 3L), new Document("$limit", 10L)); + } + + private TypeBasedAggregationOperationContext typedAggregationOperationContext(Class type) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + return new TypeBasedAggregationOperationContext(type, mappingContext, new QueryMapper(converter)); + } + + GeoNearDocumentBuilder $geoNear() { + return new GeoNearDocumentBuilder(); + } + + static class GeoDocument { + + @Id String id; + @Field("ci-ty") String city; } + + static class GeoNearDocumentBuilder { + + Document target = new Document("distanceField", "distance").append("distanceMultiplier", 1.0D).append("spherical", + false); + + GeoNearDocumentBuilder maxDistance(@Nullable Number value) { + + if (value != null) { + target.put("maxDistance", value); + } else { + target.remove("maxDistance"); + } + return this; + } + + GeoNearDocumentBuilder minDistance(@Nullable Number value) { + + if (value != null) { + target.put("minDistance", value); + } else { + target.remove("minDistance"); + } + return this; + } + + GeoNearDocumentBuilder near(Number... coordinates) { + + target.put("near", Arrays.asList(coordinates)); + return this; + } + + GeoNearDocumentBuilder spherical(@Nullable Boolean value) { + + if (value != null) { + target.put("spherical", value); + } else { + target.remove("spherical"); + } + return this; + } + + GeoNearDocumentBuilder distanceField(@Nullable String value) { + + if (value != null) { + target.put("distanceField", value); + } else { + target.remove("distanceField"); + } + return this; + } + + GeoNearDocumentBuilder distanceMultiplier(Number value) { + + if (value != null) { + target.put("distanceMultiplier", value); + } else { + target.remove("distanceMultiplier"); + } + return this; + } + + GeoNearDocumentBuilder key(String value) { + + if (value != null) { + target.put("key", value); + } else { + target.remove("key"); + } + return this; + } + + GeoNearDocumentBuilder query(Document value) { + + if (value != null) { + target.put("query", value); + } else { + target.remove("query"); + } + return this; + } + + Document doc() { + return new Document("$geoNear", new Document(target)); + } + + } + + // TODO: we need to test this to the full extend } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java new file mode 100644 index 0000000000..b752fab793 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java @@ -0,0 +1,161 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link GraphLookupOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +public class GraphLookupOperationUnitTests { + + @Test // DATAMONGO-1551 + public void rejectsNullFromCollection() { + assertThatIllegalArgumentException().isThrownBy(() -> GraphLookupOperation.builder().from(null)); + } + + @Test // DATAMONGO-1551 + public void shouldRenderCorrectly() { + + GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + .from("employees") // + .startWith("reportsTo") // + .connectFrom("reportsTo") // + .connectTo("name") // + .depthField("depth") // + .maxDepth(42) // + .as("reportingHierarchy"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(document).containsEntry("$graphLookup.depthField", "depth").containsEntry("$graphLookup.maxDepth", 42L); + } + + @Test // DATAMONGO-1551 + public void shouldRenderCriteriaCorrectly() { + + GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + .from("employees") // + .startWith("reportsTo") // + .connectFrom("reportsTo") // + .connectTo("name") // + .restrict(Criteria.where("key").is("value")) // + .as("reportingHierarchy"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(document).containsEntry("$graphLookup.restrictSearchWithMatch", new Document("key", "value")); + } + + @Test // DATAMONGO-1551 + public void shouldRenderArrayOfStartsWithCorrectly() { + + GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + .from("employees") // + .startWith("reportsTo", "boss") // + .connectFrom("reportsTo") // + .connectTo("name") // + .as("reportingHierarchy"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document) + .isEqualTo(Document.parse("{ $graphLookup : { from: \"employees\", startWith: [\"$reportsTo\", \"$boss\"], " + + "connectFromField: \"reportsTo\", connectToField: \"name\", as: \"reportingHierarchy\" } }")); + } + + @Test // DATAMONGO-1551 + public void shouldRenderMixedArrayOfStartsWithCorrectly() { + + GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + .from("employees") // + .startWith("reportsTo", LiteralOperators.Literal.asLiteral("$boss")) // + .connectFrom("reportsTo") // + .connectTo("name") // + .as("reportingHierarchy"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.startWith", + Arrays.asList("$reportsTo", new Document("$literal", "$boss"))); + } + + @Test // DATAMONGO-1551 + public void shouldRejectUnknownTypeInMixedArrayOfStartsWithCorrectly() { + assertThatIllegalArgumentException().isThrownBy(() -> GraphLookupOperation.builder() // + .from("employees") // + .startWith("reportsTo", new Person()) // + .connectFrom("reportsTo") // + .connectTo("name") // + .as("reportingHierarchy")); + } + + @Test // DATAMONGO-1551 + public void shouldRenderStartWithAggregationExpressions() { + + GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + .from("employees") // + .startWith(LiteralOperators.Literal.asLiteral("hello")) // + .connectFrom("reportsTo") // + .connectTo("name") // + .as("reportingHierarchy"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.startWith", new Document("$literal", "hello")); + } + + @Test // DATAMONGO-2096 + public void connectFromShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("contacts.userId").connectTo("_id").depthField("numConnections").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.startWith", "$contacts.userId"); + } + + @Test // DATAMONGO-2096 + public void connectToShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("userId").connectTo("connectto.field").depthField("numConnections").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.connectToField", "connectto.field"); + } + + @Test // DATAMONGO-2096 + public void depthFieldShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("contacts.userId").connectTo("_id").depthField("foo.bar").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.depthField", "foo.bar"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java index e2bcb939dd..dc6219c7f1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,198 +15,274 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.aggregation.AggregationFunctionExpressions.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; import java.util.Arrays; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; +import org.springframework.data.mongodb.core.aggregation.SelectionOperators.Bottom; +import org.springframework.data.mongodb.core.query.Criteria; /** * Unit tests for {@link GroupOperation}. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Gustavo de Geus + * @author Julia Lee */ -public class GroupOperationUnitTests { +class GroupOperationUnitTests { - @Test(expected = IllegalArgumentException.class) - public void rejectsNullFields() { - new GroupOperation((Fields) null); + @Test + void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new GroupOperation((Fields) null)); } - /** - * @see DATAMONGO-759 - */ - @Test - public void groupOperationWithNoGroupIdFieldsShouldGenerateNullAsGroupId() { + @Test // DATAMONGO-759 + void groupOperationWithNoGroupIdFieldsShouldGenerateNullAsGroupId() { GroupOperation operation = new GroupOperation(Fields.from()); ExposedFields fields = operation.getFields(); - DBObject groupClause = extractDbObjectFromGroupOperation(operation); + Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(fields.exposesSingleFieldOnly(), is(true)); - assertThat(fields.exposesNoFields(), is(false)); - assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue())); + assertThat(fields.exposesSingleFieldOnly()).isTrue(); + assertThat(fields.exposesNoFields()).isFalse(); + assertThat(groupClause.get(UNDERSCORE_ID)).isNull(); } - /** - * @see DATAMONGO-759 - */ - @Test - public void groupOperationWithNoGroupIdFieldsButAdditionalFieldsShouldGenerateNullAsGroupId() { + @Test // DATAMONGO-759 + void groupOperationWithNoGroupIdFieldsButAdditionalFieldsShouldGenerateNullAsGroupId() { GroupOperation operation = new GroupOperation(Fields.from()).count().as("cnt").last("foo").as("foo"); ExposedFields fields = operation.getFields(); - DBObject groupClause = extractDbObjectFromGroupOperation(operation); + Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(fields.exposesSingleFieldOnly(), is(false)); - assertThat(fields.exposesNoFields(), is(false)); - assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue())); - assertThat((BasicDBObject) groupClause.get("cnt"), is(new BasicDBObject("$sum", 1))); - assertThat((BasicDBObject) groupClause.get("foo"), is(new BasicDBObject("$last", "$foo"))); + assertThat(fields.exposesSingleFieldOnly()).isFalse(); + assertThat(fields.exposesNoFields()).isFalse(); + assertThat(groupClause.get(UNDERSCORE_ID)).isNull(); + assertThat((Document) groupClause.get("cnt")).isEqualTo(new Document("$sum", 1)); + assertThat((Document) groupClause.get("foo")).isEqualTo(new Document("$last", "$foo")); } @Test - public void createsGroupOperationWithSingleField() { + void createsGroupOperationWithSingleField() { GroupOperation operation = new GroupOperation(fields("a")); - DBObject groupClause = extractDbObjectFromGroupOperation(operation); + Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(groupClause.get(UNDERSCORE_ID), is((Object) "$a")); + assertThat(groupClause).containsEntry(UNDERSCORE_ID, "$a"); } @Test - public void createsGroupOperationWithMultipleFields() { + void createsGroupOperationWithMultipleFields() { GroupOperation operation = new GroupOperation(fields("a").and("b", "c")); - DBObject groupClause = extractDbObjectFromGroupOperation(operation); - DBObject idClause = DBObjectTestUtils.getAsDBObject(groupClause, UNDERSCORE_ID); + Document groupClause = extractDocumentFromGroupOperation(operation); + Document idClause = DocumentTestUtils.getAsDocument(groupClause, UNDERSCORE_ID); - assertThat(idClause.get("a"), is((Object) "$a")); - assertThat(idClause.get("b"), is((Object) "$c")); + assertThat(idClause).containsEntry("a", "$a").containsEntry("b", "$c"); } @Test - public void groupFactoryMethodWithMultipleFieldsAndSumOperation() { + void groupFactoryMethodWithMultipleFieldsAndSumOperation() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("e"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject eOp = DBObjectTestUtils.getAsDBObject(groupClause, "e"); - assertThat(eOp, is((DBObject) new BasicDBObject("$sum", "$e"))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document eOp = DocumentTestUtils.getAsDocument(groupClause, "e"); + assertThat(eOp).isEqualTo(new Document("$sum", "$e")); } @Test - public void groupFactoryMethodWithMultipleFieldsAndSumOperationWithAlias() { + void groupFactoryMethodWithMultipleFieldsAndSumOperationWithAlias() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("ee"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject eOp = DBObjectTestUtils.getAsDBObject(groupClause, "ee"); - assertThat(eOp, is((DBObject) new BasicDBObject("$sum", "$e"))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document eOp = DocumentTestUtils.getAsDocument(groupClause, "ee"); + assertThat(eOp).isEqualTo(new Document("$sum", "$e")); } @Test - public void groupFactoryMethodWithMultipleFieldsAndCountOperationWithout() { + void groupFactoryMethodWithMultipleFieldsAndCountOperationWithout() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .count().as("count"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject eOp = DBObjectTestUtils.getAsDBObject(groupClause, "count"); - assertThat(eOp, is((DBObject) new BasicDBObject("$sum", 1))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document eOp = DocumentTestUtils.getAsDocument(groupClause, "count"); + assertThat(eOp).isEqualTo(new Document("$sum", 1)); } @Test - public void groupFactoryMethodWithMultipleFieldsAndMultipleAggregateOperationsWithAlias() { + void groupFactoryMethodWithMultipleFieldsAndMultipleAggregateOperationsWithAlias() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("sum") // .min("e").as("min"); // - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject sum = DBObjectTestUtils.getAsDBObject(groupClause, "sum"); - assertThat(sum, is((DBObject) new BasicDBObject("$sum", "$e"))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document sum = DocumentTestUtils.getAsDocument(groupClause, "sum"); + assertThat(sum).isEqualTo(new Document("$sum", "$e")); - DBObject min = DBObjectTestUtils.getAsDBObject(groupClause, "min"); - assertThat(min, is((DBObject) new BasicDBObject("$min", "$e"))); + Document min = DocumentTestUtils.getAsDocument(groupClause, "min"); + assertThat(min).isEqualTo(new Document("$min", "$e")); } @Test - public void groupOperationPushWithValue() { + void groupOperationPushWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").push(1).as("x"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject push = DBObjectTestUtils.getAsDBObject(groupClause, "x"); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((DBObject) new BasicDBObject("$push", 1))); + assertThat(push).isEqualTo(new Document("$push", 1)); } @Test - public void groupOperationPushWithReference() { + void groupOperationPushWithReference() { GroupOperation groupOperation = Aggregation.group("a", "b").push("ref").as("x"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject push = DBObjectTestUtils.getAsDBObject(groupClause, "x"); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((DBObject) new BasicDBObject("$push", "$ref"))); + assertThat(push).isEqualTo(new Document("$push", "$ref")); } @Test - public void groupOperationAddToSetWithReference() { + void groupOperationAddToSetWithReference() { GroupOperation groupOperation = Aggregation.group("a", "b").addToSet("ref").as("x"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject push = DBObjectTestUtils.getAsDBObject(groupClause, "x"); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((DBObject) new BasicDBObject("$addToSet", "$ref"))); + assertThat(push).isEqualTo(new Document("$addToSet", "$ref")); } @Test - public void groupOperationAddToSetWithValue() { + void groupOperationAddToSetWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").addToSet(42).as("x"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject push = DBObjectTestUtils.getAsDBObject(groupClause, "x"); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((DBObject) new BasicDBObject("$addToSet", 42))); + assertThat(push).isEqualTo(new Document("$addToSet", 42)); } - /** - * @see DATAMONGO-979 - */ - @Test - public void shouldRenderSizeExpressionInGroup() { + @Test // DATAMONGO-979 + void shouldRenderSizeExpressionInGroup() { GroupOperation groupOperation = Aggregation // .group("username") // - .first(SIZE.of(field("tags"))) // + .first(ArrayOperators.arrayOf("tags").length()) // .as("tags_count"); - DBObject groupClause = extractDbObjectFromGroupOperation(groupOperation); - DBObject tagsCount = DBObjectTestUtils.getAsDBObject(groupClause, "tags_count"); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document tagsCount = DocumentTestUtils.getAsDocument(groupClause, "tags_count"); + + assertThat(tagsCount).containsEntry("$first", new Document("$size", "$tags")); + } + + @Test // DATAMONGO-1327 + void groupOperationStdDevSampWithValue() { + + GroupOperation groupOperation = Aggregation.group("a", "b").stdDevSamp("field").as("fieldStdDevSamp"); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "fieldStdDevSamp"); + + assertThat(push).isEqualTo(new Document("$stdDevSamp", "$field")); + } + + @Test // DATAMONGO-1327 + void groupOperationStdDevPopWithValue() { + + GroupOperation groupOperation = Aggregation.group("a", "b").stdDevPop("field").as("fieldStdDevPop"); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document push = DocumentTestUtils.getAsDocument(groupClause, "fieldStdDevPop"); + + assertThat(push).isEqualTo(new Document("$stdDevPop", "$field")); + } + + @Test // DATAMONGO-1784 + void shouldRenderSumWithExpressionInGroup() { + + GroupOperation groupOperation = Aggregation // + .group("username") // + .sum(ConditionalOperators // + .when(Criteria.where("foo").is("bar")) // + .then(1) // + .otherwise(-1)) // + .as("foobar"); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document foobar = DocumentTestUtils.getAsDocument(groupClause, "foobar"); + + assertThat(foobar).containsEntry("$sum", new Document("$cond", + new Document("if", new Document("$eq", Arrays.asList("$foo", "bar"))).append("then", 1).append("else", -1))); + } + + @Test // DATAMONGO-1784 + void sumWithNullExpressionShouldThrowException() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Aggregation.group("username").sum((AggregationExpression) null)); + } + + @Test // DATAMONGO-2651 + void accumulatorShouldBeAllowedOnGroupOperation() { + + GroupOperation groupOperation = Aggregation.group("id") + .accumulate( + ScriptOperators.accumulatorBuilder().init("inti").accumulate("acc").merge("merge").finalize("finalize")) + .as("accumulated-value"); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document accumulatedValue = DocumentTestUtils.getAsDocument(groupClause, "accumulated-value"); + + assertThat(accumulatedValue).containsKey("$accumulator"); + } + + @Test // GH-4139 + void groupOperationAllowsToAddFieldsComputedViaExpression() { + + GroupOperation groupOperation = Aggregation.group("id").and("playerId", + Bottom.bottom().output("playerId", "score").sortBy(Sort.by(Direction.DESC, "score"))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + + assertThat(groupClause).containsEntry("playerId", + Document.parse("{ $bottom : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}")); + } + + @Test // GH-4473 + void groupOperationAllowsAddingFieldWithPercentileAggregationExpression() { + + GroupOperation groupOperation = Aggregation.group("id").and("scorePercentile", + Percentile.percentileOf("score").percentages(0.2)); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); - assertThat(tagsCount.get("$first"), is((Object) new BasicDBObject("$size", Arrays.asList("$tags")))); + assertThat(groupClause).containsEntry("scorePercentile", + Document.parse("{ $percentile : { input: \"$score\", method: \"approximate\", p: [0.2]}}")); } - private DBObject extractDbObjectFromGroupOperation(GroupOperation groupOperation) { - DBObject dbObject = groupOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject groupClause = DBObjectTestUtils.getAsDBObject(dbObject, "$group"); + private Document extractDocumentFromGroupOperation(GroupOperation groupOperation) { + Document document = groupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document groupClause = DocumentTestUtils.getAsDocument(document, "$group"); return groupClause; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java index 81533b38fc..fb70bab918 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java index d745605fd7..e668dc3ed5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java index e025973bb7..32ac758a50 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,11 @@ */ public class LineItem { - final String id; + String id; - final String caption; + String caption; - final double price; + double price; int quantity = 1; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java index 9ff31ecaab..58bae3f43d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,17 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import java.util.List; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.query.Criteria; /** * Unit tests for {@link LookupOperation}. @@ -33,134 +36,184 @@ */ public class LookupOperationUnitTests { - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void rejectsNullForFrom() { - new LookupOperation(null, Fields.field("localField"), Fields.field("foreignField"), Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(null, Fields.field("localField"), Fields.field("foreignField"), Fields.field("as"))); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void rejectsNullLocalFieldField() { - new LookupOperation(Fields.field("from"), null, Fields.field("foreignField"), Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(Fields.field("from"), null, Fields.field("foreignField"), Fields.field("as"))); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void rejectsNullForeignField() { - new LookupOperation(Fields.field("from"), Fields.field("localField"), null, Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(Fields.field("from"), Fields.field("localField"), null, Fields.field("as"))); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void rejectsNullForAs() { - new LookupOperation(Fields.field("from"), Fields.field("localField"), Fields.field("foreignField"), null); + assertThatIllegalArgumentException().isThrownBy(() -> new LookupOperation(Fields.field("from"), + Fields.field("localField"), Fields.field("foreignField"), null)); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupOperationWithValues() { LookupOperation lookupOperation = Aggregation.lookup("a", "b", "c", "d"); - DBObject lookupClause = extractDbObjectFromLookupOperation(lookupOperation); + Document lookupClause = extractDocumentFromLookupOperation(lookupOperation); - assertThat(lookupClause, - isBsonObject().containing("from", "a") // - .containing("localField", "b") // - .containing("foreignField", "c") // - .containing("as", "d")); + org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") // + .containsEntry("localField", "b") // + .containsEntry("foreignField", "c") // + .containsEntry("as", "d"); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupOperationExposesAsField() { LookupOperation lookupOperation = Aggregation.lookup("a", "b", "c", "d"); - assertThat(lookupOperation.getFields().exposesNoFields(), is(false)); - assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(lookupOperation.getFields().getField("d"), notNullValue()); + assertThat(lookupOperation.getFields().exposesNoFields()).isFalse(); + assertThat(lookupOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(lookupOperation.getFields().getField("d")).isNotNull(); } - private DBObject extractDbObjectFromLookupOperation(LookupOperation lookupOperation) { + private Document extractDocumentFromLookupOperation(LookupOperation lookupOperation) { - DBObject dbObject = lookupOperation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject lookupClause = DBObjectTestUtils.getAsDBObject(dbObject, "$lookup"); + Document document = lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document lookupClause = DocumentTestUtils.getAsDocument(document, "$lookup"); return lookupClause; } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void builderRejectsNullFromField() { - LookupOperation.newLookup().from(null); + assertThatIllegalArgumentException().isThrownBy(() -> LookupOperation.newLookup().from(null)); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void builderRejectsNullLocalField() { - LookupOperation.newLookup().from("a").localField(null); + assertThatIllegalArgumentException().isThrownBy(() -> LookupOperation.newLookup().from("a").localField(null)); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void builderRejectsNullForeignField() { - LookupOperation.newLookup().from("a").localField("b").foreignField(null); + assertThatIllegalArgumentException() + .isThrownBy(() -> LookupOperation.newLookup().from("a").localField("b").foreignField(null)); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void builderRejectsNullAsField() { - LookupOperation.newLookup().from("a").localField("b").foreignField("c").as(null); + assertThatIllegalArgumentException() + .isThrownBy(() -> LookupOperation.newLookup().from("a").localField("b").foreignField("c").as(null)); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupBuilderBuildsCorrectClause() { LookupOperation lookupOperation = LookupOperation.newLookup().from("a").localField("b").foreignField("c").as("d"); - DBObject lookupClause = extractDbObjectFromLookupOperation(lookupOperation); + Document lookupClause = extractDocumentFromLookupOperation(lookupOperation); - assertThat(lookupClause, - isBsonObject().containing("from", "a") // - .containing("localField", "b") // - .containing("foreignField", "c") // - .containing("as", "d")); + org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") // + .containsEntry("localField", "b") // + .containsEntry("foreignField", "c") // + .containsEntry("as", "d"); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupBuilderExposesFields() { LookupOperation lookupOperation = LookupOperation.newLookup().from("a").localField("b").foreignField("c").as("d"); - assertThat(lookupOperation.getFields().exposesNoFields(), is(false)); - assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(lookupOperation.getFields().getField("d"), notNullValue()); + assertThat(lookupOperation.getFields().exposesNoFields()).isFalse(); + assertThat(lookupOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(lookupOperation.getFields().getField("d")).isNotNull(); + } + + @Test // GH-3322 + void buildsLookupWithLetAndPipeline() { + + LookupOperation lookupOperation = LookupOperation.newLookup().from("warehouses") + .let(newVariable("order_item").forField("item"), newVariable("order_qty").forField("ordered")) + .pipeline(match(ctx -> new Document("$expr", + new Document("$and", List.of(Document.parse("{ $eq: [ \"$stock_item\", \"$$order_item\" ] }"), + Document.parse("{ $gte: [ \"$instock\", \"$$order_qty\" ] }")))))) + .as("stockdata"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: { + from: "warehouses", + let: { order_item: "$item", order_qty: "$ordered" }, + pipeline: [ + { $match: + { $expr: + { $and: + [ + { $eq: [ "$stock_item", "$$order_item" ] }, + { $gte: [ "$instock", "$$order_qty" ] } + ] + } + } + } + ], + as: "stockdata" + }} + """); + } + + @Test // GH-3322 + void buildsLookupWithJustPipeline() { + + LookupOperation lookupOperation = LookupOperation.newLookup().from("holidays") // + .pipeline( // + match(Criteria.where("year").is(2018)), // + project().andExclude("_id").and(ctx -> new Document("name", "$name").append("date", "$date")).as("date"), // + Aggregation.replaceRoot("date") // + ).as("holidays"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: + { + from: "holidays", + pipeline: [ + { $match: { year: 2018 } }, + { $project: { _id: 0, date: { name: "$name", date: "$date" } } }, + { $replaceRoot: { newRoot: "$date" } } + ], + as: "holidays" + } + }} + """); + } + + @Test // GH-3322 + void buildsLookupWithLocalAndForeignFieldAsWellAsLetAndPipeline() { + + LookupOperation lookupOperation = Aggregation.lookup().from("restaurants") // + .localField("restaurant_name") + .foreignField("name") + .let(newVariable("orders_drink").forField("drink")) // + .pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages"))))) + .as("matches"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: { + from: "restaurants", + localField: "restaurant_name", + foreignField: "name", + let: { orders_drink: "$drink" }, + pipeline: [{ + $match: { + $expr: { $in: [ "$$orders_drink", "$beverages" ] } + } + }], + as: "matches" + }} + """); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java new file mode 100644 index 0000000000..ec3decb7a8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -0,0 +1,23 @@ +package org.springframework.data.mongodb.core.aggregation; + + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link MatchOperation}. + * + * @author Divya Srivastava + */ +class MatchOperationUnitTests { + + @Test // GH-3790 + void matchShouldRenderCorrectly() { + + MatchOperation operation = Aggregation.match(ArithmeticOperators.valueOf("quiz").stdDevPop()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo("{ $match: { \"$stdDevPop\" : \"$quiz\" } } "); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java new file mode 100644 index 0000000000..311496ba8d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java @@ -0,0 +1,128 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.aggregation.MergeOperation.*; +import static org.springframework.data.mongodb.core.aggregation.MergeOperation.WhenDocumentsMatch.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link MergeOperation}. + * + * @author Christoph Strobl + */ +class MergeOperationUnitTests { + + private static final String OUT_COLLECTION = "target-collection"; + private static final String OUT_DB = "target-db"; + + private static final Document OUT = new Document("db", OUT_DB).append("coll", OUT_COLLECTION); + + @Test // DATAMONGO-2363 + void justCollection() { + + assertThat(mergeInto(OUT_COLLECTION).toDocument(DEFAULT_CONTEXT)).isEqualTo(new Document("$merge", OUT_COLLECTION)); + } + + @Test // DATAMONGO-2363 + void collectionInDatabase() { + + assertThat(merge().intoCollection(OUT_COLLECTION).inDatabase("target-db").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT))); + } + + @Test // DATAMONGO-2363 + void singleOn() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("id-field").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT_COLLECTION).append("on", "id-field"))); + } + + @Test // DATAMONGO-2363 + void multipleOn() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("field-1", "field-2").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", + new Document("into", OUT_COLLECTION).append("on", Arrays.asList("field-1", "field-2")))); + } + + @Test // DATAMONGO-2363 + void collectionAndSimpleArgs() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("_id").whenMatched(replaceDocument()) + .whenNotMatched(WhenDocumentsDontMatch.insertNewDocument()).build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT_COLLECTION).append("on", "_id") + .append("whenMatched", "replace").append("whenNotMatched", "insert"))); + } + + @Test // DATAMONGO-2363 + void whenMatchedWithAggregation() { + + String expected = "{ \"$merge\" : {\"into\": \"" + OUT_COLLECTION + "\", \"whenMatched\": [" + + "{ \"$addFields\" : {" // + + "\"thumbsup\": { \"$sum\":[ \"$thumbsup\", \"$$new.thumbsup\" ] }," + + "\"thumbsdown\": { \"$sum\": [ \"$thumbsdown\", \"$$new.thumbsdown\" ] } } } ]" // + + "} }"; + + Aggregation update = Aggregation + .newAggregation(AddFieldsOperation.addField("thumbsup").withValueOf(Sum.sumOf("thumbsup").and("$$new.thumbsup")) + .addField("thumbsdown").withValueOf(Sum.sumOf("thumbsdown").and("$$new.thumbsdown")).build()); + + assertThat( + merge().intoCollection(OUT_COLLECTION).whenDocumentsMatchApply(update).build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(Document.parse(expected)); + } + + @Test // DATAMONGO-2363 + void mapsFieldNames() { + + assertThat(merge().intoCollection("newrestaurants").on("date", "postCode").build() + .toDocument(contextFor(Restaurant.class))).isEqualTo( + Document.parse("{ \"$merge\": { \"into\": \"newrestaurants\", \"on\": [ \"date\", \"post_code\" ] } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } + + static class Restaurant { + + @Field("post_code") String postCode; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java index 29744bd304..52b51ad251 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java new file mode 100644 index 0000000000..05d5a2d758 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java @@ -0,0 +1,157 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.ObjectOperators.MergeObjects; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * Unit tests for {@link ObjectOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Royal Assassin - Robin Hobb + */ +public class ObjectOperatorsUnitTests { + + static final String EXPRESSION_STRING = "{ \"$king-in-waiting\" : \"verity\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2053 + public void mergeSingleFieldReference() { + + assertThat(ObjectOperators.valueOf("kettricken").merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: \"$kettricken\" } ")); + } + + @Test // DATAMONGO-2053 + public void mergeSingleExpression() { + + assertThat(ObjectOperators.valueOf(EXPRESSION).merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2053 + public void mergeEmpty() { + + assertThat(MergeObjects.merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMuliFieldReference() { + + assertThat( + ObjectOperators.valueOf("kettricken").mergeWithValuesOf("verity").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [ \"$kettricken\", \"$verity\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMixed() { + + assertThat(ObjectOperators.valueOf("kettricken").mergeWithValuesOf(EXPRESSION).mergeWithValuesOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $mergeObjects: [ \"$kettricken\", " + EXPRESSION_STRING + ", \"$verity\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeWithSystemVariable() { + + assertThat( + ObjectOperators.valueOf(EXPRESSION).mergeWith(SystemVariable.ROOT).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [ " + EXPRESSION_STRING + ", \"$$ROOT\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMany() { + + assertThat(ObjectOperators.valueOf("kettricken").mergeWithValuesOf(EXPRESSION) + .mergeWith(new Document("fitz", "chivalry")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse( + "{ $mergeObjects: [ \"$kettricken\", " + EXPRESSION_STRING + ", { \"fitz\" : \"chivalry\" } ] } ")); + } + + @Test // DATAMONGO-2052 + public void toArrayWithFieldReference() { + + assertThat(ObjectOperators.valueOf("verity").toArray().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $objectToArray : \"$verity\" }")); + } + + @Test // DATAMONGO-2052 + public void toArrayWithExpression() { + + assertThat(ObjectOperators.valueOf(EXPRESSION).toArray().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $objectToArray : " + EXPRESSION_STRING + " }")); + } + + @Test // GH-4139 + public void getField() { + + assertThat(ObjectOperators.valueOf("batman").getField("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $getField : { field : \"robin\", input : \"$batman\" }}")); + } + + @Test // GH-4464 + public void getFieldOfCurrent() { + + assertThat(ObjectOperators.valueOf(Aggregation.CURRENT).getField("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $getField : { field : \"robin\", input : \"$$CURRENT\" }}")); + } + + @Test // GH-4464 + public void getFieldOfMappedKey() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + converter.afterPropertiesSet(); + + assertThat(ObjectOperators.getValueOf("population").toDocument(new RelaxedTypeBasedAggregationOperationContext(ZipInfo.class, converter.getMappingContext(), new QueryMapper(converter)))) + .isEqualTo(Document.parse("{ $getField : { field : \"pop\", input : \"$$CURRENT\" } }")); + } + + @Test // GH-4139 + public void setField() { + + assertThat(ObjectOperators.valueOf("batman").setField("friend").toValue("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $setField : { field : \"friend\", value : \"robin\", input : \"$batman\" }}")); + } + + @Test // GH-4464 + public void setFieldOfMappedKey() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + converter.afterPropertiesSet(); + + assertThat(ObjectOperators.setValueTo("population", "robin").toDocument(new RelaxedTypeBasedAggregationOperationContext(ZipInfo.class, converter.getMappingContext(), new QueryMapper(converter)))) + .isEqualTo(Document.parse("{ $setField : { field : \"pop\", value : \"robin\", input : \"$$CURRENT\" }}")); + } + + @Test // GH-4139 + public void removeField() { + + assertThat(ObjectOperators.valueOf("batman").removeField("joker").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $setField : { field : \"joker\", value : \"$$REMOVE\", input : \"$batman\" }}")); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java index 5534c3bb18..1174507e1c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java new file mode 100644 index 0000000000..f8812448b3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link OutOperation}. + * + * @author Nikolay Bogdanov + * @author Christoph Strobl + * @author Mark Paluch + */ +public class OutOperationUnitTest { + + @Test // DATAMONGO-1418 + public void shouldCheckNPEInCreation() { + assertThatIllegalArgumentException().isThrownBy(() -> new OutOperation(null)); + } + + @Test // DATAMONGO-2259 + public void shouldUsePreMongoDB42FormatWhenOnlyCollectionIsPresent() { + assertThat(out("out-col").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$out", "out-col")); + } + + @Test // DATAMONGO-2259 + public void shouldUseMongoDB42ExtendedFormatWhenAdditionalParametersPresent() { + + assertThat(out("out-col").insertDocuments().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$out", new Document("to", "out-col").append("mode", "insertDocuments"))); + } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithJsonStringKey() { + + assertThat(out("out-col").insertDocuments() // + .in("database-2") // + .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}") // + .toDocument(Aggregation.DEFAULT_CONTEXT)) // + .containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1).append("field-2", 1)); + } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithSingleFieldKey() { + + assertThat(out("out-col").insertDocuments().in("database-2") // + .uniqueKey("field-1").toDocument(Aggregation.DEFAULT_CONTEXT)) // + .containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1)); + } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithMultiFieldKey() { + + assertThat(out("out-col").insertDocuments().in("database-2") // + .uniqueKeyOf(Arrays.asList("field-1", "field-2")) // + .toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1).append("field-2", 1)); + } + + @Test // DATAMONGO-2259 + public void shouldErrorOnExtendedFormatWithoutMode() { + + assertThatThrownBy(() -> out("out-col").in("database-2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isInstanceOf(IllegalStateException.class); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java index 7b457789c5..59d374bd73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java old mode 100644 new mode 100755 index 2f826ff938..b904807d65 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,230 +15,263 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.aggregation.AggregationFunctionExpressions.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import static org.springframework.data.mongodb.util.DBObjectUtils.*; +import static org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; import java.util.Arrays; import java.util.List; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.Subtract; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.Variable; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Slice; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.data.mongodb.core.aggregation.StringOperators.Concat; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** * Unit tests for {@link ProjectionOperation}. - * + * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl + * @author Divya Srivastava + * @author Mark Paluch */ public class ProjectionOperationUnitTests { - static final String MOD = "$mod"; - static final String ADD = "$add"; - static final String SUBTRACT = "$subtract"; - static final String MULTIPLY = "$multiply"; - static final String DIVIDE = "$divide"; - static final String PROJECT = "$project"; + private static final String MOD = "$mod"; + private static final String ADD = "$add"; + private static final String SUBTRACT = "$subtract"; + private static final String MULTIPLY = "$multiply"; + private static final String DIVIDE = "$divide"; + private static final String PROJECT = "$project"; - @Test(expected = IllegalArgumentException.class) - public void rejectsNullFields() { - new ProjectionOperation(null); + @Test // DATAMONGO-586 + void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation((Fields) null)); } - @Test - public void declaresBackReferenceCorrectly() { + @Test // DATAMONGO-586 + void declaresBackReferenceCorrectly() { ProjectionOperation operation = new ProjectionOperation(); operation = operation.and("prop").previousOperation(); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - assertThat(projectClause.get("prop"), is((Object) Fields.UNDERSCORE_ID_REF)); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + assertThat(projectClause.get("prop")).isEqualTo(Fields.UNDERSCORE_ID_REF); } - @Test - public void alwaysUsesExplicitReference() { + @Test // DATAMONGO-586 + void alwaysUsesExplicitReference() { ProjectionOperation operation = new ProjectionOperation(Fields.fields("foo").and("bar", "foobar")); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("foo"), is((Object) 1)); - assertThat(projectClause.get("bar"), is((Object) "$foobar")); + assertThat(projectClause.get("foo")).isEqualTo(1); + assertThat(projectClause.get("bar")).isEqualTo("$foobar"); } - @Test - public void aliasesSimpleFieldProjection() { + @Test // DATAMONGO-586 + void aliasesSimpleFieldProjection() { ProjectionOperation operation = new ProjectionOperation(); - DBObject dbObject = operation.and("foo").as("bar").toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); + Document document = operation.and("foo").as("bar").toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("bar"), is((Object) "$foo")); + assertThat(projectClause.get("bar")).isEqualTo("$foo"); } - @Test - public void aliasesArithmeticProjection() { + @Test // DATAMONGO-586 + void aliasesArithmeticProjection() { ProjectionOperation operation = new ProjectionOperation(); - DBObject dbObject = operation.and("foo").plus(41).as("bar").toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject barClause = DBObjectTestUtils.getAsDBObject(projectClause, "bar"); + Document document = operation.and("foo").plus(41).as("bar").toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document barClause = DocumentTestUtils.getAsDocument(projectClause, "bar"); List addClause = (List) barClause.get("$add"); - assertThat(addClause, hasSize(2)); - assertThat(addClause.get(0), is((Object) "$foo")); - assertThat(addClause.get(1), is((Object) 41)); + assertThat(addClause).hasSize(2); + assertThat(addClause.get(0)).isEqualTo("$foo"); + assertThat(addClause.get(1)).isEqualTo(41); } - public void arithmenticProjectionOperationWithoutAlias() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationWithoutAlias() { String fieldName = "a"; ProjectionOperationBuilder operation = new ProjectionOperation().and(fieldName).plus(1); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject oper = exctractOperation(fieldName, projectClause); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document oper = extractOperation(fieldName, projectClause); - assertThat(oper.containsField(ADD), is(true)); - assertThat(oper.get(ADD), is((Object) Arrays. asList("$a", 1))); + assertThat(oper.containsKey(ADD)).isTrue(); + assertThat(oper.get(ADD)).isEqualTo(Arrays. asList("$a", 1)); } - @Test - public void arithmenticProjectionOperationPlus() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationPlus() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).plus(1).as(fieldAlias); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - DBObject oper = exctractOperation(fieldAlias, projectClause); - assertThat(oper.containsField(ADD), is(true)); - assertThat(oper.get(ADD), is((Object) Arrays. asList("$a", 1))); + Document oper = extractOperation(fieldAlias, projectClause); + assertThat(oper.containsKey(ADD)).isTrue(); + assertThat(oper.get(ADD)).isEqualTo(Arrays. asList("$a", 1)); } - @Test - public void arithmenticProjectionOperationMinus() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationMinus() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).minus(1).as(fieldAlias); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject oper = exctractOperation(fieldAlias, projectClause); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document oper = extractOperation(fieldAlias, projectClause); - assertThat(oper.containsField(SUBTRACT), is(true)); - assertThat(oper.get(SUBTRACT), is((Object) Arrays. asList("$a", 1))); + assertThat(oper.containsKey(SUBTRACT)).isTrue(); + assertThat(oper.get(SUBTRACT)).isEqualTo(Arrays. asList("$a", 1)); } - @Test - public void arithmenticProjectionOperationMultiply() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationMultiply() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).multiply(1).as(fieldAlias); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject oper = exctractOperation(fieldAlias, projectClause); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document oper = extractOperation(fieldAlias, projectClause); - assertThat(oper.containsField(MULTIPLY), is(true)); - assertThat(oper.get(MULTIPLY), is((Object) Arrays. asList("$a", 1))); + assertThat(oper.containsKey(MULTIPLY)).isTrue(); + assertThat(oper.get(MULTIPLY)).isEqualTo(Arrays. asList("$a", 1)); } - @Test - public void arithmenticProjectionOperationDivide() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationDivide() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).divide(1).as(fieldAlias); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject oper = exctractOperation(fieldAlias, projectClause); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document oper = extractOperation(fieldAlias, projectClause); - assertThat(oper.containsField(DIVIDE), is(true)); - assertThat(oper.get(DIVIDE), is((Object) Arrays. asList("$a", 1))); + assertThat(oper.containsKey(DIVIDE)).isTrue(); + assertThat(oper.get(DIVIDE)).isEqualTo(Arrays. asList("$a", 1)); } - @Test(expected = IllegalArgumentException.class) - public void arithmenticProjectionOperationDivideByZeroException() { - - new ProjectionOperation().and("a").divide(0); + @Test // DATAMONGO-586 + void arithmeticProjectionOperationDivideByZeroException() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").divide(0)); } - @Test - public void arithmenticProjectionOperationMod() { + @Test // DATAMONGO-586 + void arithmeticProjectionOperationMod() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).mod(3).as(fieldAlias); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - DBObject oper = exctractOperation(fieldAlias, projectClause); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + Document oper = extractOperation(fieldAlias, projectClause); - assertThat(oper.containsField(MOD), is(true)); - assertThat(oper.get(MOD), is((Object) Arrays. asList("$a", 3))); + assertThat(oper.containsKey(MOD)).isTrue(); + assertThat(oper.get(MOD)).isEqualTo(Arrays. asList("$a", 3)); } - /** - * @see DATAMONGO-758 - */ - @Test(expected = IllegalArgumentException.class) - public void excludeShouldThrowExceptionForFieldsOtherThanUnderscoreId() { + @Test // DATAMONGO-758, DATAMONGO-1893 + void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { - new ProjectionOperation().andExclude("foo"); + ProjectionOperation projectionOp = new ProjectionOperation().andExclude("foo"); + Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectionOp.inheritsFields()).isTrue(); + assertThat((Integer) projectClause.get("foo")).isEqualTo(0); } - /** - * @see DATAMONGO-758 - */ - @Test - public void excludeShouldAllowExclusionOfUnderscoreId() { + @Test // DATAMONGO-1893 + void includeShouldNotInheritFields() { + + ProjectionOperation projectionOp = new ProjectionOperation().andInclude("foo"); + + assertThat(projectionOp.inheritsFields()).isFalse(); + } + + @Test // DATAMONGO-758 + void excludeShouldAllowExclusionOfUnderscoreId() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude(Fields.UNDERSCORE_ID); - DBObject dbObject = projectionOp.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - assertThat((Integer) projectClause.get(Fields.UNDERSCORE_ID), is(0)); + Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + assertThat((Integer) projectClause.get(Fields.UNDERSCORE_ID)).isEqualTo(0); } - /** - * @see DATAMONGO-757 - */ - @Test - public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { + @Test // DATAMONGO-1906 + void rendersConditionalProjectionCorrectly() { - ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2") - .andExclude("_id"); + TypedAggregation aggregation = Aggregation.newAggregation(Book.class, + Aggregation.project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle").equalToValue("")) + .then("$$REMOVE").otherwiseValueOf("author.middle")) + .as("author.middle")); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); + Document document = aggregation.toDocument("books", Aggregation.DEFAULT_CONTEXT); - assertThat(projectClause.get("foo"), is((Object) 1)); // implicit - assertThat(projectClause.get("bar"), is((Object) "$foobar")); // explicit - assertThat(projectClause.get("inc1"), is((Object) 1)); // include shortcut - assertThat(projectClause.get("inc2"), is((Object) 1)); - assertThat(projectClause.get("_id"), is((Object) 0)); + assertThat(document).isEqualTo(Document.parse( + "{\"aggregate\" : \"books\", \"pipeline\" : [{\"$project\" : {\"title\" : 1, \"author.middle\" : {\"$cond\" : {\"if\" : {\"$eq\" : [\"$author.middle\", \"\"]}, \"then\" : \"$$REMOVE\",\"else\" : \"$author.middle\"} }}}]}")); } - @Test(expected = IllegalArgumentException.class) - public void arithmenticProjectionOperationModByZeroException() { + @Test // DATAMONGO-757 + void usesImplictAndExplicitFieldAliasAndIncludeExclude() { + + ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2") + .andExclude("_id"); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - new ProjectionOperation().and("a").mod(0); + assertThat(projectClause.get("foo")).isEqualTo(1); // implicit + assertThat(projectClause.get("bar")).isEqualTo("$foobar"); // explicit + assertThat(projectClause.get("inc1")).isEqualTo(1); // include shortcut + assertThat(projectClause.get("inc2")).isEqualTo(1); + assertThat(projectClause.get("_id")).isEqualTo(0); } - /** - * @see DATAMONGO-769 - */ @Test - public void allowArithmeticOperationsWithFieldReferences() { + void arithmeticProjectionOperationModByZeroException() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").mod(0)); + } + + @Test // DATAMONGO-769 + void allowArithmeticOperationsWithFieldReferences() { ProjectionOperation operation = Aggregation.project() // .and("foo").plus("bar").as("fooPlusBar") // @@ -247,42 +280,35 @@ public void allowArithmeticOperationsWithFieldReferences() { .and("foo").divide("bar").as("fooDivideBar") // .and("foo").mod("bar").as("fooModBar"); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - DBObject projectClause = DBObjectTestUtils.getAsDBObject(dbObject, PROJECT); - - assertThat((BasicDBObject) projectClause.get("fooPlusBar"), // - is(new BasicDBObject("$add", dbList("$foo", "$bar")))); - assertThat((BasicDBObject) projectClause.get("fooMinusBar"), // - is(new BasicDBObject("$subtract", dbList("$foo", "$bar")))); - assertThat((BasicDBObject) projectClause.get("fooMultiplyBar"), // - is(new BasicDBObject("$multiply", dbList("$foo", "$bar")))); - assertThat((BasicDBObject) projectClause.get("fooDivideBar"), // - is(new BasicDBObject("$divide", dbList("$foo", "$bar")))); - assertThat((BasicDBObject) projectClause.get("fooModBar"), // - is(new BasicDBObject("$mod", dbList("$foo", "$bar")))); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause.get("fooPlusBar")). // + isEqualTo(new Document("$add", Arrays.asList("$foo", "$bar"))); + assertThat(projectClause.get("fooMinusBar")). // + isEqualTo(new Document("$subtract", Arrays.asList("$foo", "$bar"))); + assertThat(projectClause.get("fooMultiplyBar")). // + isEqualTo(new Document("$multiply", Arrays.asList("$foo", "$bar"))); + assertThat(projectClause.get("fooDivideBar")). // + isEqualTo(new Document("$divide", Arrays.asList("$foo", "$bar"))); + assertThat(projectClause.get("fooModBar")). // + isEqualTo(new Document("$mod", Arrays.asList("$foo", "$bar"))); } - /** - * @see DATAMONGO-774 - */ - @Test - public void projectionExpressions() { + @Test // DATAMONGO-774 + void projectionExpressions() { ProjectionOperation operation = Aggregation.project() // .andExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // .and("foo").as("bar"); // - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - assertThat( - dbObject.toString(), - is("{ \"$project\" : { \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"bar\" : \"$foo\"}}")); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(document).isEqualTo(Document.parse( + "{ \"$project\" : { \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"bar\" : \"$foo\"}}")); } - /** - * @see DATAMONGO-975 - */ - @Test - public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { + @Test // DATAMONGO-975 + void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { ProjectionOperation operation = Aggregation.project() // .and("date").extractHour().as("hour") // @@ -297,28 +323,25 @@ public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorr .and("date").extractDayOfWeek().as("dayOfWeek") // ; - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(notNullValue())); - - DBObject projected = exctractOperation("$project", dbObject); - - assertThat(projected.get("hour"), is((Object) new BasicDBObject("$hour", Arrays.asList("$date")))); - assertThat(projected.get("min"), is((Object) new BasicDBObject("$minute", Arrays.asList("$date")))); - assertThat(projected.get("second"), is((Object) new BasicDBObject("$second", Arrays.asList("$date")))); - assertThat(projected.get("millis"), is((Object) new BasicDBObject("$millisecond", Arrays.asList("$date")))); - assertThat(projected.get("year"), is((Object) new BasicDBObject("$year", Arrays.asList("$date")))); - assertThat(projected.get("month"), is((Object) new BasicDBObject("$month", Arrays.asList("$date")))); - assertThat(projected.get("week"), is((Object) new BasicDBObject("$week", Arrays.asList("$date")))); - assertThat(projected.get("dayOfYear"), is((Object) new BasicDBObject("$dayOfYear", Arrays.asList("$date")))); - assertThat(projected.get("dayOfMonth"), is((Object) new BasicDBObject("$dayOfMonth", Arrays.asList("$date")))); - assertThat(projected.get("dayOfWeek"), is((Object) new BasicDBObject("$dayOfWeek", Arrays.asList("$date")))); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(document).isNotNull(); + + Document projected = extractOperation("$project", document); + + assertThat(projected.get("hour")).isEqualTo(new Document("$hour", Arrays.asList("$date"))); + assertThat(projected.get("min")).isEqualTo(new Document("$minute", Arrays.asList("$date"))); + assertThat(projected.get("second")).isEqualTo(new Document("$second", Arrays.asList("$date"))); + assertThat(projected.get("millis")).isEqualTo(new Document("$millisecond", Arrays.asList("$date"))); + assertThat(projected.get("year")).isEqualTo(new Document("$year", Arrays.asList("$date"))); + assertThat(projected.get("month")).isEqualTo(new Document("$month", Arrays.asList("$date"))); + assertThat(projected.get("week")).isEqualTo(new Document("$week", Arrays.asList("$date"))); + assertThat(projected.get("dayOfYear")).isEqualTo(new Document("$dayOfYear", Arrays.asList("$date"))); + assertThat(projected.get("dayOfMonth")).isEqualTo(new Document("$dayOfMonth", Arrays.asList("$date"))); + assertThat(projected.get("dayOfWeek")).isEqualTo(new Document("$dayOfWeek", Arrays.asList("$date"))); } - /** - * @see DATAMONGO-975 - */ - @Test - public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { + @Test // DATAMONGO-975 + void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project() // .andExpression("date + 86400000") // @@ -326,21 +349,16 @@ public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorre .as("dayOfYearPlus1Day") // ; - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(notNullValue())); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(document).isNotNull(); - DBObject projected = exctractOperation("$project", dbObject); - assertThat( - projected.get("dayOfYearPlus1Day"), - is((Object) new BasicDBObject("$dayOfYear", Arrays.asList(new BasicDBObject("$add", Arrays. asList( - "$date", 86400000)))))); + Document projected = extractOperation("$project", document); + assertThat(projected.get("dayOfYearPlus1Day")).isEqualTo( + new Document("$dayOfYear", Arrays.asList(new Document("$add", Arrays. asList("$date", 86400000))))); } - /** - * @see DATAMONGO-979 - */ - @Test - public void shouldRenderSizeExpressionInProjection() { + @Test // DATAMONGO-979 + void shouldRenderSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -348,30 +366,2056 @@ public void shouldRenderSizeExpressionInProjection() { .size()// .as("tags_count"); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - DBObject projected = exctractOperation("$project", dbObject); - assertThat(projected.get("tags_count"), is((Object) new BasicDBObject("$size", Arrays.asList("$tags")))); + Document projected = extractOperation("$project", document); + assertThat(projected.get("tags_count")).isEqualTo(new Document("$size", Arrays.asList("$tags"))); } - /** - * @see DATAMONGO-979 - */ - @Test - public void shouldRenderGenericSizeExpressionInProjection() { + @Test // DATAMONGO-979 + void shouldRenderGenericSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // - .and(SIZE.of(field("tags"))) // + .and(ArrayOperators.arrayOf("tags").length()) // .as("tags_count"); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + Document projected = extractOperation("$project", document); + assertThat(projected.get("tags_count")).isEqualTo(new Document("$size", "$tags")); + } + + @Test // DATAMONGO-1457 + void shouldRenderSliceCorrectly() throws Exception { + + ProjectionOperation operation = Aggregation.project().and("field").slice(10).as("renamed"); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projected = extractOperation("$project", document); + + assertThat(projected.get("renamed")).isEqualTo(new Document("$slice", Arrays. asList("$field", 10))); + } + + @Test // DATAMONGO-1457 + void shouldRenderSliceWithPositionCorrectly() throws Exception { + + ProjectionOperation operation = Aggregation.project().and("field").slice(10, 5).as("renamed"); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projected = extractOperation("$project", document); + + assertThat(projected.get("renamed")).isEqualTo(new Document("$slice", Arrays. asList("$field", 5, 10))); + } + + @Test // DATAMONGO-784 + void shouldRenderCmpCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").cmp(10).as("cmp10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.cmp10.$cmp.[0]", "$field") + .containsEntry("$project.cmp10.$cmp.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderEqCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").eq(10).as("eq10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.eq10.$eq.[0]", "$field") + .containsEntry("$project.eq10.$eq.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderGtCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").gt(10).as("gt10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.gt10.$gt.[0]", "$field") + .containsEntry("$project.gt10.$gt.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderGteCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").gte(10).as("gte10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.gte10.$gte.[0]", "$field") + .containsEntry("$project.gte10.$gte.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderLtCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").lt(10).as("lt10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.lt10.$lt.[0]", "$field") + .containsEntry("$project.lt10.$lt.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderLteCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").lte(10).as("lte10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.lte10.$lte.[0]", "$field") + .containsEntry("$project.lte10.$lte.[1]", 10); + } + + @Test // DATAMONGO-784 + void shouldRenderNeCorrectly() { + + ProjectionOperation operation = Aggregation.project().and("field").ne(10).as("ne10"); + + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$project.ne10.$ne.[0]", "$field") + .containsEntry("$project.ne10.$ne.[1]", 10); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetEquals() { + + Document agg = project("A", "B").and("A").equalsArrays("B").as("sameElements") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, sameElements: { $setEquals: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetEqualsAggregationExpresssion() { + + Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isEqualTo("B")).as("sameElements") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, sameElements: { $setEquals: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetIntersection() { + + Document agg = project("A", "B").and("A").intersectsArrays("B").as("commonToBoth") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { A: 1, B: 1, commonToBoth: { $setIntersection: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetIntersectionAggregationExpresssion() { + + Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").intersects("B")).as("commonToBoth") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { A: 1, B: 1, commonToBoth: { $setIntersection: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetUnion() { + + Document agg = project("A", "B").and("A").unionArrays("B").as("allValues").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, allValues: { $setUnion: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetUnionAggregationExpresssion() { + + Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").union("B")).as("allValues") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, allValues: { $setUnion: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetDifference() { + + Document agg = project("A", "B").and("B").differenceToArray("A").as("inBOnly") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, inBOnly: { $setDifference: [ \"$B\", \"$A\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetDifferenceAggregationExpresssion() { + + Document agg = project("A", "B").and(SetOperators.arrayAsSet("B").differenceTo("A")).as("inBOnly") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, inBOnly: { $setDifference: [ \"$B\", \"$A\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetIsSubset() { + + Document agg = project("A", "B").and("A").subsetOfArray("B").as("aIsSubsetOfB") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, aIsSubsetOfB: { $setIsSubset: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSetIsSubsetAggregationExpresssion() { + + Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isSubsetOf("B")).as("aIsSubsetOfB") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { A: 1, B: 1, aIsSubsetOfB: { $setIsSubset: [ \"$A\", \"$B\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAnyElementTrue() { + + Document agg = project("responses").and("responses").anyElementInArrayTrue().as("isAnyTrue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { responses: 1, isAnyTrue: { $anyElementTrue: [ \"$responses\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAnyElementTrueAggregationExpresssion() { + + Document agg = project("responses").and(SetOperators.arrayAsSet("responses").anyElementTrue()).as("isAnyTrue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { responses: 1, isAnyTrue: { $anyElementTrue: [ \"$responses\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAllElementsTrue() { + + Document agg = project("responses").and("responses").allElementsInArrayTrue().as("isAllTrue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { responses: 1, isAllTrue: { $allElementsTrue: [ \"$responses\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAllElementsTrueAggregationExpresssion() { + + Document agg = project("responses").and(SetOperators.arrayAsSet("responses").allElementsTrue()).as("isAllTrue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { responses: 1, isAllTrue: { $allElementsTrue: [ \"$responses\" ] }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAbs() { + + Document agg = project().and("anyNumber").absoluteValue().as("absoluteValue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { absoluteValue : { $abs: \"$anyNumber\" }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAbsAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).abs()).as("delta") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { delta: { $abs: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAddAggregationExpresssion() { + + Document agg = project().and(ArithmeticOperators.valueOf("price").add("fee")).as("total") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse(" { $project: { total: { $add: [ \"$price\", \"$fee\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderCeil() { + + Document agg = project().and("anyNumber").ceil().as("ceilValue").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { ceilValue : { $ceil: \"$anyNumber\" }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderCeilAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).ceil()).as("delta") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { delta: { $ceil: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDivide() { + + Document agg = project().and("value").divide(ArithmeticOperators.valueOf("start").subtract("end")).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { result: { $divide: [ \"$value\", { $subtract: [ \"$start\", \"$end\" ] }] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDivideAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("anyNumber").divideBy(ArithmeticOperators.valueOf("start").subtract("end"))) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { result: { $divide: [ \"$anyNumber\", { $subtract: [ \"$start\", \"$end\" ] }] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderExp() { + + Document agg = project().and("value").exp().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $exp: \"$value\" } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderExpAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).exp()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $exp: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderFloor() { + + Document agg = project().and("value").floor().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $floor: \"$value\" } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderFloorAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).floor()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $floor: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLn() { + + Document agg = project().and("value").ln().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $ln: \"$value\"} }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLnAggregationExpresssion() { + + Document agg = project().and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).ln()) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $ln: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLog() { + + Document agg = project().and("value").log(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $log: [ \"$value\", 2] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLogAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).log(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $log: [ { $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLog10() { + + Document agg = project().and("value").log10().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $log10: \"$value\" } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLog10AggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).log10()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $log10: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMod() { + + Document agg = project().and("value").mod(ArithmeticOperators.valueOf("start").subtract("end")).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { result: { $mod: [\"$value\", { $subtract: [ \"$start\", \"$end\" ] }] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderModAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).mod(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $mod: [{ $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMultiply() { + + Document agg = project().and("value").multiply(ArithmeticOperators.valueOf("start").subtract("end")).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { result: { $multiply: [\"$value\", { $subtract: [ \"$start\", \"$end\" ] }] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMultiplyAggregationExpresssion() { + + Document agg = project().and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")) + .multiplyBy(2).multiplyBy("refToAnotherNumber")).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { result: { $multiply: [{ $subtract: [ \"$start\", \"$end\" ] }, 2, \"$refToAnotherNumber\"] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderPow() { + + Document agg = project().and("value").pow(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $pow: [\"$value\", 2] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderPowAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).pow(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $pow: [{ $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSqrt() { + + Document agg = project().and("value").sqrt().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $sqrt: \"$value\" } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSqrtAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).sqrt()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $sqrt: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSubtract() { + + Document agg = project().and("numericField").minus(ArrayOperators.arrayOf("someArray").length()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : \"$someArray\"}] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSubtractAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("numericField").subtract(ArrayOperators.arrayOf("someArray").length())) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : \"$someArray\"}] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderTrunc() { + + Document agg = project().and("value").trunc().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result : { $trunc: \"$value\" }}}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderTruncAggregationExpresssion() { + + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).trunc()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $trunc: { $subtract: [ \"$start\", \"$end\" ] } } }}")); + } + + @Test // DATAMONGO-1536 + void shouldRenderConcat() { + + Document agg = project().and("item").concat(" - ", field("description")).as("itemDescription") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { itemDescription: { $concat: [ \"$item\", \" - \", \"$description\" ] } } }")); - DBObject projected = exctractOperation("$project", dbObject); - assertThat(projected.get("tags_count"), is((Object) new BasicDBObject("$size", Arrays.asList("$tags")))); } - private static DBObject exctractOperation(String field, DBObject fromProjectClause) { - return (DBObject) fromProjectClause.get(field); + @Test // DATAMONGO-1536 + void shouldRenderConcatAggregationExpression() { + + Document agg = project().and(StringOperators.valueOf("item").concat(" - ").concatValueOf("description")) + .as("itemDescription").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { itemDescription: { $concat: [ \"$item\", \" - \", \"$description\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSubstr() { + + Document agg = project().and("quarter").substring(0, 2).as("yearSubstring").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { yearSubstring: { $substr: [ \"$quarter\", 0, 2 ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSubstrAggregationExpression() { + + Document agg = project().and(StringOperators.valueOf("quarter").substring(0, 2)).as("yearSubstring") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { yearSubstring: { $substr: [ \"$quarter\", 0, 2 ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderToLower() { + + Document agg = project().and("item").toLower().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { item: { $toLower: \"$item\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderToLowerAggregationExpression() { + + Document agg = project().and(StringOperators.valueOf("item").toLower()).as("item") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { item: { $toLower: \"$item\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderToUpper() { + + Document agg = project().and("item").toUpper().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { item: { $toUpper: \"$item\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderToUpperAggregationExpression() { + + Document agg = project().and(StringOperators.valueOf("item").toUpper()).as("item") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { item: { $toUpper: \"$item\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderStrCaseCmp() { + + Document agg = project().and("quarter").strCaseCmp("13q4").as("comparisonResult") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { comparisonResult: { $strcasecmp: [ \"$quarter\", \"13q4\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderStrCaseCmpAggregationExpression() { + + Document agg = project().and(StringOperators.valueOf("quarter").strCaseCmp("13q4")).as("comparisonResult") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { comparisonResult: { $strcasecmp: [ \"$quarter\", \"13q4\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderArrayElementAt() { + + Document agg = project().and("favorites").arrayElementAt(0).as("first").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { first: { $arrayElemAt: [ \"$favorites\", 0 ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderArrayElementAtAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").elementAt(0)).as("first") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { first: { $arrayElemAt: [ \"$favorites\", 0 ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderConcatArrays() { + + Document agg = project().and("instock").concatArrays("ordered").as("items").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { items: { $concatArrays: [ \"$instock\", \"$ordered\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderConcatArraysAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("instock").concat("ordered")).as("items") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { items: { $concatArrays: [ \"$instock\", \"$ordered\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderIsArray() { + + Document agg = project().and("instock").isArray().as("isAnArray").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { isAnArray: { $isArray: \"$instock\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderIsArrayAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("instock").isArray()).as("isAnArray") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { isAnArray: { $isArray: \"$instock\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSizeAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("instock").length()).as("arraySize") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { arraySize: { $size: \"$instock\" } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSliceAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().itemCount(3)).as("threeFavorites") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { threeFavorites: { $slice: [ \"$favorites\", 3 ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSliceWithPositionAggregationExpression() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().offset(2).itemCount(3)) + .as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { threeFavorites: { $slice: [ \"$favorites\", 2, 3 ] } } }")); + } + + @Test // DATAMONGO-4857 + void shouldRenderSliceWithExpressions() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").slice() + .offset(Subtract.valueOf(ArrayOperators.Size.lengthOfArray("myArray")).subtract(1)) + .itemCount(ArithmeticOperators.rand())).as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { threeFavorites: { $slice: [ \"$favorites\", { \"$subtract\": [ {\"$size\": \"$myArray\"}, 1]}, { $rand : {} } ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLiteral() { + + Document agg = project().and("$1").asLiteral().as("literalOnly").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { literalOnly: { $literal: \"$1\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLiteralAggregationExpression() { + + Document agg = project().and(LiteralOperators.valueOf("$1").asLiteral()).as("literalOnly") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { literalOnly: { $literal: \"$1\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDayOfYearAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").dayOfYear()).as("dayOfYear") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { dayOfYear: { $dayOfYear: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDayOfYearAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfYear()).as("dayOfYear") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderTimeZoneFromField() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.ofField("tz")).dayOfYear()) + .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : \"$tz\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderTimeZoneFromExpression() { + + Document agg = project() + .and(DateOperators.dateOf("date") + .withTimezone(Timezone.ofExpression(LiteralOperators.valueOf("America/Chicago").asLiteral())).dayOfYear()) + .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : { $literal: \"America/Chicago\"} } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDayOfMonthAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").dayOfMonth()).as("day") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { day: { $dayOfMonth: \"$date\" }} }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfMonth()).as("day") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { day: { $dayOfMonth: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDayOfWeekAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").dayOfWeek()).as("dayOfWeek") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { dayOfWeek: { $dayOfWeek: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfWeek()).as("dayOfWeek") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfWeek: { $dayOfWeek: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderYearAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").year()).as("year") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { year: { $year: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderYearAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).year()) + .as("year").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { year: { $year: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMonthAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").month()).as("month") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { month: { $month: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderMonthAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).month()) + .as("month").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { month: { $month: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderWeekAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").week()).as("week") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { week: { $week: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderWeekAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).week()) + .as("week").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { week: { $week: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderHourAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").hour()).as("hour") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { hour: { $hour: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderHourAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).hour()) + .as("hour").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { hour: { $hour: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMinuteAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").minute()).as("minute") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { minute: { $minute: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderMinuteAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).minute()).as("minute") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { minute: { $minute: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSecondAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").second()).as("second") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { second: { $second: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderSecondAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).second()).as("second") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { second: { $second: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMillisecondAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").millisecond()).as("msec") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { msec: { $millisecond: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderMillisecondAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).millisecond()).as("msec") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { msec: { $millisecond: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDateToString() { + + Document agg = project().and("date").dateAsFormattedString("%H:%M:%S:%L").as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithoutFormatOption() { + + Document agg = project().and("date").dateAsFormattedString().as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { time: { $dateToString: { date: \"$date\" } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderDateToStringAggregationExpression() { + + Document agg = project().and(DateOperators.dateOf("date").toString("%H:%M:%S:%L")).as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } }")); + } + + @Test // DATAMONGO-1834, DATAMONGO-2047 + void shouldRenderDateToStringAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L")) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\", \"timezone\" : \"America/Chicago\" } } } } } }")); + + Document removedTimezone = project().and(DateOperators.dateOf("date") + .withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L").withTimezone(Timezone.none())) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(removedTimezone).isEqualTo( + Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNull() { + + Document agg = project() + .and(DateOperators.dateOf("date").toStringWithDefaultFormat().onNullReturnValueOf("fallback-field")).as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : \"$fallback-field\" } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNullExpression() { + + Document agg = project() + .and(DateOperators.dateOf("date").toStringWithDefaultFormat() + .onNullReturnValueOf(LiteralOperators.valueOf("my-literal").asLiteral())) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : { \"$literal\": \"my-literal\"} } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNullAndTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").toStringWithDefaultFormat() + .onNullReturnValueOf("fallback-field").withTimezone(Timezone.ofField("foo"))).as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : \"$fallback-field\", \"timezone\": \"$foo\" } } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSumAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { quizTotal: { $sum: \"$quizzes\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderSumWithMultipleArgsAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("final").sum().and("midterm")).as("examTotal") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { examTotal: { $sum: [ \"$final\", \"$midterm\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAvgAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("quizzes").avg()).as("quizAvg") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { quizAvg: { $avg: \"$quizzes\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderAvgWithMultipleArgsAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("final").avg().and("midterm")).as("examAvg") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { examAvg: { $avg: [ \"$final\", \"$midterm\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMaxAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("quizzes").max()).as("quizMax") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { quizMax: { $max: \"$quizzes\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMaxWithMultipleArgsAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("final").max().and("midterm")).as("examMax") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { examMax: { $max: [ \"$final\", \"$midterm\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMinAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("quizzes").min()).as("quizMin") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { quizMin: { $min: \"$quizzes\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderMinWithMultipleArgsAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("final").min().and("midterm")).as("examMin") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { examMin: { $min: [ \"$final\", \"$midterm\" ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderStdDevPopAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevPop()).as("stdDev") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { stdDev: { $stdDevPop: \"$scores\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderStdDevSampAggregationExpression() { + + Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevSamp()).as("stdDev") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { stdDev: { $stdDevSamp: \"$scores\"} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderCmpAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").compareToValue(250)).as("cmp250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { cmp250: { $cmp: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderEqAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(250)).as("eq250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { eq250: { $eq: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-2513 + void shouldRenderEqAggregationExpressionWithListComparison() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(Arrays.asList(250))).as("eq250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { eq250: { $eq: [\"$qty\", [250]]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderGtAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanValue(250)).as("gt250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { gt250: { $gt: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderGteAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanEqualToValue(250)).as("gte250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { gte250: { $gte: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLtAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanValue(250)).as("lt250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { lt250: { $lt: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLteAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanEqualToValue(250)).as("lte250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { lte250: { $lte: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderNeAggregationExpression() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").notEqualToValue(250)).as("ne250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { ne250: { $ne: [\"$qty\", 250]} } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLogicAndAggregationExpression() { + + Document agg = project() + .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(100)) + .and(ComparisonOperators.valueOf("qty").lessThanValue(250))) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { result: { $and: [ { $gt: [ \"$qty\", 100 ] }, { $lt: [ \"$qty\", 250 ] } ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderLogicOrAggregationExpression() { + + Document agg = project() + .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(250)) + .or(ComparisonOperators.valueOf("qty").lessThanValue(200))) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { result: { $or: [ { $gt: [ \"$qty\", 250 ] }, { $lt: [ \"$qty\", 200 ] } ] } } }")); + } + + @Test // DATAMONGO-1536 + void shouldRenderNotAggregationExpression() { + + Document agg = project().and(BooleanOperators.not(ComparisonOperators.valueOf("qty").greaterThanValue(250))) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { result: { $not: [ { $gt: [ \"$qty\", 250 ] } ] } } }")); + } + + @Test // DATAMONGO-1540 + void shouldRenderMapAggregationExpression() { + + Document agg = Aggregation.project() + .and(VariableOperators.mapItemsOf("quizzes").as("grade").andApply(ArithmeticOperators.valueOf("grade").add(2))) + .as("adjustedGrades").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project:{ adjustedGrades:{ $map: { input: \"$quizzes\", as: \"grade\",in: { $add: [ \"$$grade\", 2 ] }}}}}")); + } + + @Test // DATAMONGO-1540 + void shouldRenderMapAggregationExpressionOnExpression() { + + Document agg = Aggregation.project() + .and(VariableOperators.mapItemsOf(ArrayOperators.arrayOf("foo").length()).as("grade") + .andApply(ArithmeticOperators.valueOf("grade").add(2))) + .as("adjustedGrades").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project:{ adjustedGrades:{ $map: { input: { $size : \"$foo\"}, as: \"grade\",in: { $add: [ \"$$grade\", 2 ] }}}}}")); + } + + @Test // DATAMONGO-861, DATAMONGO-1542 + void shouldRenderIfNullConditionAggregationExpression() { + + Document agg = project().and( + ConditionalOperators.ifNull(ArrayOperators.arrayOf("array").elementAt(1)).then("a more sophisticated value")) + .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { result: { $ifNull: [ { $arrayElemAt: [\"$array\", 1] }, \"a more sophisticated value\" ] } } }")); + } + + @Test // DATAMONGO-1542 + void shouldRenderIfNullValueAggregationExpression() { + + Document agg = project() + .and(ConditionalOperators.ifNull("field").then(ArrayOperators.arrayOf("array").elementAt(1))).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project: { result: { $ifNull: [ \"$field\", { $arrayElemAt: [\"$array\", 1] } ] } } }")); + } + + @Test // DATAMONGO-861, DATAMONGO-1542 + void fieldReplacementIfNullShouldRenderCorrectly() { + + Document agg = project().and(ConditionalOperators.ifNull("optional").thenValueOf("$never-null")).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { result: { $ifNull: [ \"$optional\", \"$never-null\" ] } } }")); + } + + @Test // DATAMONGO-1538 + void shouldRenderLetExpressionCorrectly() { + + Document agg = Aggregation.project() + .and(VariableOperators + .define(newVariable("total").forExpression(ArithmeticOperators.valueOf("price").add("tax")), + newVariable("discounted") + .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D))) + .andApply(ArithmeticOperators.valueOf("total").multiplyBy("discounted"))) // + .as("finalTotal").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project:{ \"finalTotal\" : { \"$let\": {" + // + "\"vars\": {" + // + "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // + "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // + "}," + // + "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // + "}}}}")); + } + + @Test // DATAMONGO-1538 + void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { + + ExpressionVariable var1 = newVariable("total").forExpression(ArithmeticOperators.valueOf("price").add("tax")); + + ExpressionVariable var2 = newVariable("discounted") + .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D)); + + Document agg = Aggregation.project().and("foo") + .let(Arrays.asList(var1, var2), ArithmeticOperators.valueOf("total").multiplyBy("discounted")).as("finalTotal") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project:{ \"finalTotal\" : { \"$let\": {" + // + "\"vars\": {" + // + "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // + "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // + "}," + // + "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // + "}}}}")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIndexOfBytesCorrectly() { + + Document agg = project().and(StringOperators.valueOf("item").indexOf("foo")).as("byteLocation") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project: { byteLocation: { $indexOfBytes: [ \"$item\", \"foo\" ] } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIndexOfBytesWithRangeCorrectly() { + + Document agg = project() + .and(StringOperators.valueOf("item").indexOf("foo") + .within(Range.from(Bound.inclusive(5L)).to(Bound.exclusive(9L)))) + .as("byteLocation").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).containsEntry("$project.byteLocation.$indexOfBytes.[2]", 5L) + .containsEntry("$project.byteLocation.$indexOfBytes.[3]", 9L); + } + + @Test // DATAMONGO-1548 + void shouldRenderIndexOfCPCorrectly() { + + Document agg = project().and(StringOperators.valueOf("item").indexOfCP("foo")).as("cpLocation") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { cpLocation: { $indexOfCP: [ \"$item\", \"foo\" ] } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIndexOfCPWithRangeCorrectly() { + + Document agg = project() + .and(StringOperators.valueOf("item").indexOfCP("foo") + .within(Range.from(Bound.inclusive(5L)).to(Bound.exclusive(9L)))) + .as("cpLocation").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).containsEntry("$project.cpLocation.$indexOfCP.[2]", 5L) + .containsEntry("$project.cpLocation.$indexOfCP.[3]", 9L); + } + + @Test // DATAMONGO-1548 + void shouldRenderSplitCorrectly() { + + Document agg = project().and(StringOperators.valueOf("city").split(", ")).as("city_state") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { city_state : { $split: [\"$city\", \", \"] }} }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderStrLenBytesCorrectly() { + + Document agg = project().and(StringOperators.valueOf("name").length()).as("length") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { \"length\": { $strLenBytes: \"$name\" } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderStrLenCPCorrectly() { + + Document agg = project().and(StringOperators.valueOf("name").lengthCP()).as("length") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { \"length\": { $strLenCP: \"$name\" } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderSubstrCPCorrectly() { + + Document agg = project().and(StringOperators.valueOf("quarter").substringCP(0, 2)).as("yearSubstring") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); + } + + @Test // GH-3725 + void shouldRenderRegexFindCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // GH-3725 + void shouldRenderRegexMatchCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexMatch: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIndexOfArrayCorrectly() { + + Document agg = project().and(ArrayOperators.arrayOf("items").indexOf(2)).as("index") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { index: { $indexOfArray: [ \"$items\", 2 ] } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderRangeCorrectly() { + + Document agg = project().and(ArrayOperators.RangeOperator.rangeStartingAt(0L).to("distance").withStepSize(25L)) + .as("rest_stops").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).containsEntry("$project.rest_stops.$range.[0]", 0L) + .containsEntry("$project.rest_stops.$range.[1]", "$distance") + .containsEntry("$project.rest_stops.$range.[2]", 25L); + } + + @Test // DATAMONGO-1548 + void shouldRenderReverseArrayCorrectly() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").reverse()).as("reverseFavorites") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { reverseFavorites: { $reverseArray: \"$favorites\" } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderReduceWithSimpleObjectCorrectly() { + + Document agg = project() + .and(ArrayOperators.arrayOf("probabilityArr") + .reduce(ArithmeticOperators.valueOf("$$value").multiplyBy("$$this")).startingWith(1)) + .as("results").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { \"results\": { $reduce: { input: \"$probabilityArr\", initialValue: 1, in: { $multiply: [ \"$$value\", \"$$this\" ] } } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderReduceWithComplexObjectCorrectly() { + + PropertyExpression sum = PropertyExpression.property("sum").definedAs( + ArithmeticOperators.valueOf(Variable.VALUE.referringTo("sum").getName()).add(Variable.THIS.getName())); + PropertyExpression product = PropertyExpression.property("product").definedAs(ArithmeticOperators + .valueOf(Variable.VALUE.referringTo("product").getName()).multiplyBy(Variable.THIS.getName())); + + Document agg = project() + .and(ArrayOperators.arrayOf("probabilityArr").reduce(sum, product) + .startingWith(new Document().append("sum", 5).append("product", 2))) + .as("results").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { \"results\": { $reduce: { input: \"$probabilityArr\", initialValue: { \"sum\" : 5 , \"product\" : 2} , in: { \"sum\": { $add : [\"$$value.sum\", \"$$this\"] }, \"product\": { $multiply: [ \"$$value.product\", \"$$this\" ] } } } } } }")); + } + + @Test // DATAMONGO-1843 + void shouldRenderReduceWithInputAndInExpressionsCorrectly() { + + Document expected = Document.parse( + "{ \"$project\" : { \"results\" : { \"$reduce\" : { \"input\" : { \"$slice\" : [\"$array\", 5] }, \"initialValue\" : \"\", \"in\" : { \"$concat\" : [\"$$value\", \"/\", \"$$this\"] } } } } }"); + + Reduce reduceEntryPoint = Reduce.arrayOf(Slice.sliceArrayOf("array").itemCount(5)) // + .withInitialValue("") // + .reduce(Concat.valueOf("$$value").concat("/").concatValueOf("$$this")); + + Reduce arrayEntryPoint = ArrayOperators.arrayOf(Slice.sliceArrayOf("array").itemCount(5)) // + .reduce(Concat.valueOf("$$value").concat("/").concatValueOf("$$this")) // + .startingWith(""); + + assertThat(project().and(reduceEntryPoint).as("results").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(expected); + + assertThat(project().and(arrayEntryPoint).as("results").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(expected); + } + + @Test // DATAMONGO-1548 + void shouldRenderZipCorrectly() { + + AggregationExpression elemAt0 = ArrayOperators.arrayOf("matrix").elementAt(0); + AggregationExpression elemAt1 = ArrayOperators.arrayOf("matrix").elementAt(1); + AggregationExpression elemAt2 = ArrayOperators.arrayOf("matrix").elementAt(2); + + Document agg = project().and( + ArrayOperators.arrayOf(elemAt0).zipWith(elemAt1, elemAt2).useLongestLength().defaultTo(new Object[] { 1, 2 })) + .as("transposed").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { transposed: { $zip: { inputs: [ { $arrayElemAt: [ \"$matrix\", 0 ] }, { $arrayElemAt: [ \"$matrix\", 1 ] }, { $arrayElemAt: [ \"$matrix\", 2 ] } ], useLongestLength : true, defaults: [1,2] } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderInCorrectly() { + + Document agg = project().and(ArrayOperators.arrayOf("in_stock").containsValue("bananas")).as("has_bananas") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project : { has_bananas : { $in : [\"bananas\", \"$in_stock\" ] } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIsoDayOfWeekCorrectly() { + + Document agg = project().and(DateOperators.dateOf("birthday").isoDayOfWeek()).as("dayOfWeek") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { dayOfWeek: { $isoDayOfWeek: \"$birthday\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("birthday").withTimezone(Timezone.valueOf("America/Chicago")).isoDayOfWeek()) + .as("dayOfWeek").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { dayOfWeek: { $isoDayOfWeek: { \"date\" : \"$birthday\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIsoWeekCorrectly() { + + Document agg = project().and(DateOperators.dateOf("date").isoWeek()).as("weekNumber") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { weekNumber: { $isoWeek: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoWeekWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeek()).as("weekNumber") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { weekNumber: { $isoWeek: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderIsoWeekYearCorrectly() { + + Document agg = project().and(DateOperators.dateOf("date").isoWeekYear()).as("yearNumber") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { yearNumber: { $isoWeekYear: \"$date\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoWeekYearWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear()) + .as("yearNumber").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { yearNumber: { $isoWeekYear: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + + @Test // DATAMONGO-1548 + void shouldRenderSwitchCorrectly() { + + String expected = "$switch:\n" + // + "{\n" + // + " branches: [\n" + // + " {\n" + // + " case: { $gte : [ { $avg : \"$scores\" }, 90 ] },\n" + // + " then: \"Doing great\"\n" + // + " },\n" + // + " {\n" + // + " case: { $and : [ { $gte : [ { $avg : \"$scores\" }, 80 ] },\n" + // + " { $lt : [ { $avg : \"$scores\" }, 90 ] } ] },\n" + // + " then: \"Doing pretty well.\"\n" + // + " },\n" + // + " {\n" + // + " case: { $lt : [ { $avg : \"$scores\" }, 80 ] },\n" + // + " then: \"Needs improvement.\"\n" + // + " }\n" + // + " ],\n" + // + " default: \"No scores found.\"\n" + // + " }\n" + // + "}"; + + CaseOperator cond1 = CaseOperator + .when(ComparisonOperators.Gte.valueOf(AccumulatorOperators.Avg.avgOf("scores")).greaterThanEqualToValue(90)) + .then("Doing great"); + CaseOperator cond2 = CaseOperator + .when(BooleanOperators.And.and( + ComparisonOperators.Gte.valueOf(AccumulatorOperators.Avg.avgOf("scores")).greaterThanEqualToValue(80), + ComparisonOperators.Lt.valueOf(AccumulatorOperators.Avg.avgOf("scores")).lessThanValue(90))) + .then("Doing pretty well."); + CaseOperator cond3 = CaseOperator + .when(ComparisonOperators.Lt.valueOf(AccumulatorOperators.Avg.avgOf("scores")).lessThanValue(80)) + .then("Needs improvement."); + + Document agg = project().and(ConditionalOperators.switchCases(cond1, cond2, cond3).defaultTo("No scores found.")) + .as("summary").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { summary: {" + expected + "} } }")); + } + + @Test // DATAMONGO-1548 + void shouldTypeCorrectly() { + + Document agg = project().and(DataTypeOperators.Type.typeOf("a")).as("a").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { a: { $type: \"$a\" } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromPartsWithJustTheYear() { + + Document agg = project().and(DateOperators.dateFromParts().year(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateFromParts: { year : 2018 } } } }")); + } + + @Test // DATAMONGO-1834, DATAMONGO-2671 + void shouldRenderDateFromParts() { + + Document agg = project() + .and(DateOperators.dateFromParts().year(2018).month(3).day(23).hour(14).minute(25).second(10).millisecond(2)) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { year : 2018, month : 3, day : 23, hour : 14, minute : 25, second : 10, millisecond : 2 } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).year(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateFromParts: { year : 2018, timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoDateFromPartsWithJustTheYear() { + + Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018 } } } }")); + } + + @Test // DATAMONGO-1834, DATAMONGO-2671 + void shouldRenderIsoDateFromParts() { + + Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018).isoWeek(12).isoDayOfWeek(5).hour(14) + .minute(30).second(42).millisecond(2)).as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018, isoWeek : 12, isoDayOfWeek : 5, hour : 14, minute : 30, second : 42, millisecond : 2 } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoDateFromPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear(2018)) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018, timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToParts() { + + Document agg = project().and(DateOperators.dateOf("date").toParts()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateToParts: { date : \"$date\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToIsoParts() { + + Document agg = project().and(DateOperators.dateOf("date").toParts().iso8601()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { newDate: { $dateToParts: { date : \"$date\", iso8601 : true } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toParts()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateToParts: { date : \"$date\", timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromString() { + + Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787")).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromStringWithFieldReference() { + + Document agg = project().and(DateOperators.dateOf("date").fromString()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project : { newDate: { $dateFromString: { dateString : \"$date\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromStringWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withTimezone(Timezone.valueOf("America/Chicago"))) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\", timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateFromStringWithFormat() { + + Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withFormat("dd/mm/yyyy")) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\", format : \"dd/mm/yyyy\" } } } }")); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldIncludeTopLevelFieldsOfType() { + + ProjectionOperation operation = Aggregation.project(Book.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(2) // + .containsEntry("title", 1) // + .containsEntry("author", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldMapFieldNames() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + + Document document = Aggregation.project(BookRenamed.class) + .toDocument(new TypeBasedAggregationOperationContext(Book.class, mappingContext, new QueryMapper(converter))); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(2) // + .containsEntry("ti_tl_e", 1) // + .containsEntry("author", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldIncludeInterfaceProjectionValues() { + + ProjectionOperation operation = Aggregation.project(ProjectionInterface.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(1) // + .containsEntry("title", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldBeEmptyIfNoPropertiesFound() { + + ProjectionOperation operation = Aggregation.project(EmptyType.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause).isEmpty(); + } + + @Test // DATAMONGO-2312 + void simpleFieldReferenceAsArray() { + + org.bson.Document doc = Aggregation.newAggregation(project("x", "y", "someField").asArray("myArray")) + .toDocument("coll", Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse( + "{\"aggregate\":\"coll\", \"pipeline\":[ { $project: { myArray: [ \"$x\", \"$y\", \"$someField\" ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void mappedFieldReferenceAsArray() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + + org.bson.Document doc = Aggregation + .newAggregation(BookWithFieldAnnotation.class, project("title", "author").asArray("myArray")) + .toDocument("coll", new TypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(doc).isEqualTo(Document + .parse("{\"aggregate\":\"coll\", \"pipeline\":[ { $project: { myArray: [ \"$ti_t_le\", \"$author\" ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void arrayWithNullValue() { + + Document doc = project() // + .andArrayOf(Fields.field("field-1"), null, "value").as("myArray") // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse("{ $project: { \"myArray\" : [ \"$field-1\", null, \"value\" ] } }")); + } + + @Test // DATAMONGO-2312 + void nestedArrayField() { + + Document doc = project("_id", "value") // + .andArrayOf(Fields.field("field-1"), "plain - string", ArithmeticOperators.valueOf("field-1").sum().and(10)) + .as("myArray") // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse( + "{ $project: { \"_id\" : 1, \"value\" : 1, \"myArray\" : [ \"$field-1\", \"plain - string\", { \"$sum\" : [\"$field-1\", 10] } ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void nestedMappedFieldReferenceInArrayField() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + + Document doc = project("author") // + .andArrayOf(Fields.field("title"), "plain - string", ArithmeticOperators.valueOf("title").sum().and(10)) + .as("myArray") // + .toDocument(new TypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(doc).isEqualTo(Document.parse( + "{ $project: { \"author\" : 1, \"myArray\" : [ \"$ti_t_le\", \"plain - string\", { \"$sum\" : [\"$ti_t_le\", 10] } ] } } ] }")); + } + + @Test // GH-4473 + void shouldRenderPercentileAggregationExpression() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("score").percentile(0.3, 0.9)).as("scorePercentiles") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }} } }")); + } + + @Test // GH-4473 + void shouldRenderPercentileWithMultipleArgsAggregationExpression() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("scoreOne").percentile(0.4).and("scoreTwo")).as("scorePercentiles") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: [\"$scoreOne\", \"$scoreTwo\"], method: \"approximate\", p: [0.4] } }} } }")); + } + + @Test // GH-4472 + void shouldRenderMedianAggregationExpressions() { + + Document singleArgAgg = project() + .and(ArithmeticOperators.valueOf("score").median()).as("medianValue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(singleArgAgg).isEqualTo(Document.parse("{ $project: { medianValue: { $median: { input: \"$score\", method: \"approximate\" } }} } }")); + + Document multipleArgsAgg = project() + .and(ArithmeticOperators.valueOf("score").median().and("scoreTwo")).as("medianValue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(multipleArgsAgg).isEqualTo(Document.parse("{ $project: { medianValue: { $median: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\" } }} } }")); + } + + private static Document extractOperation(String field, Document fromProjectClause) { + return (Document) fromProjectClause.get(field); + } + + static class Book { + + String title; + Author author; + + public Book() {} + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.Book(title=" + this.getTitle() + ", author=" + this.getAuthor() + ")"; + } + } + + static class BookWithFieldAnnotation { + + @Field("ti_t_le") String title; + Author author; + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.BookWithFieldAnnotation(title=" + this.getTitle() + ", author=" + + this.getAuthor() + ")"; + } + } + + static class BookRenamed { + + @Field("ti_tl_e") String title; + Author author; + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.BookRenamed(title=" + this.getTitle() + ", author=" + this.getAuthor() + ")"; + } + } + + static class Author { + + String first; + String last; + String middle; + + public String getFirst() { + return this.first; + } + + public String getLast() { + return this.last; + } + + public String getMiddle() { + return this.middle; + } + + public void setFirst(String first) { + this.first = first; + } + + public void setLast(String last) { + this.last = last; + } + + public void setMiddle(String middle) { + this.middle = middle; + } + + public String toString() { + return "ProjectionOperationUnitTests.Author(first=" + this.getFirst() + ", last=" + this.getLast() + ", middle=" + + this.getMiddle() + ")"; + } + } + + interface ProjectionInterface { + String getTitle(); + } + + private static class EmptyType { + + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java new file mode 100644 index 0000000000..55d6bf3b60 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java @@ -0,0 +1,184 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.AssertionsForInterfaceTypes.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.test.StepVerifier; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.geo.Box; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; +import org.springframework.data.mongodb.core.Venue; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +/** + * Integration test for aggregation via {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveAggregationTests { + + private static final String INPUT_COLLECTION = "aggregation_test_collection"; + private static final String OUTPUT_COLLECTION = "aggregation_test_out"; + + @Autowired ReactiveMongoTemplate reactiveMongoTemplate; + + @Before + public void setUp() { + cleanDb(); + } + + @After + public void cleanUp() { + cleanDb(); + } + + private void cleanDb() { + + reactiveMongoTemplate.dropCollection(INPUT_COLLECTION) // + .then(reactiveMongoTemplate.dropCollection(OUTPUT_COLLECTION)) // + .then(reactiveMongoTemplate.dropCollection(Product.class)) // + .then(reactiveMongoTemplate.dropCollection(City.class)) // + .then(reactiveMongoTemplate.dropCollection(Venue.class)).as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1646 + public void expressionsInProjectionExampleShowcase() { + + Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); + reactiveMongoTemplate.insert(product).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + double shippingCosts = 1.2; + + TypedAggregation agg = newAggregation(Product.class, // + project("name", "netPrice") // + .andExpression("netPrice * 10", shippingCosts).as("salesPrice") // + ); + + reactiveMongoTemplate.aggregate(agg, Document.class).as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual).containsEntry("_id", product.id); + assertThat(actual).containsEntry("name", product.name); + assertThat(actual).containsEntry("salesPrice", product.netPrice * 10); + }).verifyComplete(); + } + + @Test // DATAMONGO-1646 + public void shouldProjectMultipleDocuments() { + + City dresden = new City("Dresden", 100); + City linz = new City("Linz", 101); + City braunschweig = new City("Braunschweig", 102); + City weinheim = new City("Weinheim", 103); + + reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim)).as(StepVerifier::create) + .expectNextCount(4).verifyComplete(); + + Aggregation agg = newAggregation( // + match(where("population").lt(103))); + + reactiveMongoTemplate.aggregate(agg, "city", City.class).collectList().as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(3).contains(dresden, linz, braunschweig); + }).verifyComplete(); + } + + @Test // DATAMONGO-1646 + public void shouldAggregateToOutCollection() { + + City dresden = new City("Dresden", 100); + City linz = new City("Linz", 101); + City braunschweig = new City("Braunschweig", 102); + City weinheim = new City("Weinheim", 103); + + reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim)).as(StepVerifier::create) + .expectNextCount(4).verifyComplete(); + + Aggregation agg = newAggregation( // + out(OUTPUT_COLLECTION)); + + reactiveMongoTemplate.aggregate(agg, "city", City.class).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + reactiveMongoTemplate.find(new Query(), City.class, OUTPUT_COLLECTION).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + } + + @Test // DATAMONGO-1986 + public void runMatchOperationCriteriaThroughQueryMapperForTypedAggregation() { + + reactiveMongoTemplate.insertAll(TestEntities.geolocation().newYork()).as(StepVerifier::create).expectNextCount(12) + .verifyComplete(); + + Aggregation aggregation = newAggregation(Venue.class, + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + reactiveMongoTemplate.aggregate(aggregation, "newyork", Document.class).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + } + + @Test // DATAMONGO-1986 + public void runMatchOperationCriteriaThroughQueryMapperForUntypedAggregation() { + + reactiveMongoTemplate.insertAll(TestEntities.geolocation().newYork()).as(StepVerifier::create).expectNextCount(12) + .verifyComplete(); + + Aggregation aggregation = newAggregation( + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + reactiveMongoTemplate.aggregate(aggregation, "newyork", Document.class).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + } + + @Test // DATAMONGO-2356 + public void skipOutputDoesNotReadBackAggregationResults() { + + Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); + reactiveMongoTemplate.insert(product).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + double shippingCosts = 1.2; + + TypedAggregation agg = newAggregation(Product.class, // + project("name", "netPrice") // + .andExpression("netPrice * 10", shippingCosts).as("salesPrice") // + ).withOptions(AggregationOptions.builder().skipOutput().build()); + + reactiveMongoTemplate.aggregate(agg, Document.class).as(StepVerifier::create).verifyComplete(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java new file mode 100644 index 0000000000..68afdb4bfa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.query.Collation; + +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class ReactiveAggregationUnitTests { + + private static final String INPUT_COLLECTION = "collection-1"; + + private ReactiveMongoTemplate template; + private ReactiveMongoDatabaseFactory factory; + @Mock MongoClient mongoClient; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + @Mock AggregatePublisher publisher; + + @BeforeEach + void setUp() { + + factory = new SimpleReactiveMongoDatabaseFactory(mongoClient, "db"); + template = new ReactiveMongoTemplate(factory); + + when(mongoClient.getDatabase("db")).thenReturn(db); + when(db.getCollection(eq(INPUT_COLLECTION), any(Class.class))).thenReturn(collection); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(publisher); + when(publisher.allowDiskUse(any())).thenReturn(publisher); + when(publisher.collation(any())).thenReturn(publisher); + } + + @Test // DATAMONGO-1646 + void shouldHandleMissingInputCollection() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.aggregate(newAggregation(), (String) null, TagCount.class)); + } + + @Test // DATAMONGO-1646 + void shouldHandleMissingAggregationPipeline() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(null, INPUT_COLLECTION, TagCount.class)); + } + + @Test // DATAMONGO-1646 + void shouldHandleMissingEntityClass() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(newAggregation(), INPUT_COLLECTION, null)); + } + + @Test // DATAMONGO-1646 + void errorsOnExplainUsage() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().explain(true).build()), + INPUT_COLLECTION, TagCount.class).subscribe()); + } + + @Test // DATAMONGO-1646, DATAMONGO-1311 + void appliesBatchSizeWhenPresent() { + + when(publisher.batchSize(anyInt())).thenReturn(publisher); + + AggregationOptions options = AggregationOptions.builder().cursorBatchSize(1234).build(); + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(options), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher).batchSize(1234); + } + + @Test // DATAMONGO-1646 + void appliesCollationCorrectlyWhenPresent() { + + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().collation(Collation.of("en_US")).build()), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("en_US").build())); + } + + @Test // DATAMONGO-1646 + void doesNotSetCollationWhenNotPresent() { + + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().build()), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher, never()).collation(any()); + } + + @Test // DATAMONGO-1646 + void appliesDiskUsageCorrectly() { + + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().allowDiskUse(true).build()), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher).allowDiskUse(eq(true)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java new file mode 100644 index 0000000000..24566089e7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link RedactOperation}. + * + * @author Christoph Strobl + */ +class RedactOperationUnitTests { + + Document expected = new Document("$redact", + new Document("$cond", new Document("if", new Document("$eq", Arrays.asList("$level", 5))) + .append("then", "$$PRUNE").append("else", "$$DESCEND"))); + Document expectedMapped = new Document("$redact", + new Document("$cond", new Document("if", new Document("$eq", Arrays.asList("$le_v_el", 5))) + .append("then", "$$PRUNE").append("else", "$$DESCEND"))); + + @Test // DATAMONGO-931 + void errorsOnNullExpression() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new RedactOperation(null)); + } + + @Test // DATAMONGO-931 + void mapsAggregationExpressionCorrectly() { + + assertThat(new RedactOperation(ConditionalOperators.when(Criteria.where("level").is(5)) // + .then(RedactOperation.PRUNE) // + .otherwise(RedactOperation.DESCEND)).toDocument(contextFor(null))).isEqualTo(expected); + } + + @Test // DATAMONGO-931 + void mapsAggregationExpressionViaBuilderCorrectly() { + + assertThat(RedactOperation.builder().when(Criteria.where("level").is(5)) // + .thenPrune() // + .otherwiseDescend().build().toDocument(contextFor(null))).isEqualTo(expected); + } + + @Test // DATAMONGO-931 + void mapsTypedAggregationExpressionCorrectly() { + + assertThat(new RedactOperation(ConditionalOperators.when(Criteria.where("level").is(5)) // + .then(RedactOperation.PRUNE) // + .otherwise(RedactOperation.DESCEND)).toDocument(contextFor(DomainType.class))).isEqualTo(expectedMapped); + } + + static class DomainType { + + @Field("le_v_el") String level; + + public String getLevel() { + return this.level; + } + + public void setLevel(String level) { + this.level = level; + } + + public String toString() { + return "RedactOperationUnitTests.DomainType(level=" + this.getLevel() + ")"; + } + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java new file mode 100644 index 0000000000..23480dd390 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java @@ -0,0 +1,101 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperation; + +/** + * Unit tests for {@link ReplaceRootOperation}. + * + * @author Mark Paluch + */ +class ReplaceRootOperationUnitTests { + + @Test // DATAMONGO-1550 + void rejectsNullField() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((Field) null)); + } + + @Test // DATAMONGO-1550 + void rejectsNullExpression() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((AggregationExpression) null)); + } + + @Test // DATAMONGO-1550 + void shouldRenderCorrectly() { + + ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder() + .withDocument(new Document("hello", "world")); + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceRoot : { newRoot: { hello: \"world\" } } }")); + } + + @Test // DATAMONGO-1550 + void shouldRenderExpressionCorrectly() { + + ReplaceRootOperation operation = new ReplaceRootOperation(VariableOperators // + .mapItemsOf("array") // + .as("element") // + .andApply(ArithmeticOperators.valueOf("$$element").multiplyBy(10))); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceRoot : { newRoot : { " + + "$map : { input : \"$array\" , as : \"element\" , in : { $multiply : [ \"$$element\" , 10]} } " + "} } }")); + } + + @Test // DATAMONGO-1550 + void shouldComposeDocument() { + + ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder().withDocument() // + .andValue("value").as("key") // + .and(ArithmeticOperators.valueOf("$$element").multiplyBy(10)).as("multiply"); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document + .parse("{ $replaceRoot : { newRoot: { key: \"value\", multiply: { $multiply : [ \"$$element\" , 10]} } } }")); + } + + @Test // DATAMONGO-1550 + void shouldComposeSubDocument() { + + Document partialReplacement = new Document("key", "override").append("key2", "value2"); + + ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder().withDocument() // + .andValue("value").as("key") // + .andValuesOf(partialReplacement); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject) + .isEqualTo(Document.parse("{ $replaceRoot : { newRoot: { key: \"override\", key2: \"value2\"} } } }")); + } + + @Test // DATAMONGO-1550 + void shouldNotExposeFields() { + + ReplaceRootOperation operation = new ReplaceRootOperation(Fields.field("field")); + + assertThat(operation.getFields().exposesNoFields()).isTrue(); + assertThat(operation.getFields().exposesSingleFieldOnly()).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java new file mode 100644 index 0000000000..9d8a1502e2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ReplaceRootOperation}. + * + * @author Christoph Strobl + */ +class ReplaceWithOperationUnitTests { + + @Test // DATAMONGO-2331 + void rejectsNullField() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceWithOperation(null)); + } + + @Test // DATAMONGO-2331 + void shouldRenderValueCorrectly() { + + ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValue(new Document("hello", "world")); + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceWith : { hello: \"world\" } }")); + } + + @Test // DATAMONGO-2331 + void shouldRenderExpressionCorrectly() { + + ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValueOf(VariableOperators // + .mapItemsOf("array") // + .as("element") // + .andApply(ArithmeticOperators.valueOf("$$element").multiplyBy(10))); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceWith : { " + + "$map : { input : \"$array\" , as : \"element\" , in : { $multiply : [ \"$$element\" , 10]} } " + "} }")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java new file mode 100644 index 0000000000..718a084f37 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link SampleOperation}. + * + * @author Gustavo de Geus + */ +public class SampleOperationUnitTests { + + private static final String SIZE = "size"; + private static final String OP = "$sample"; + + @Test // DATAMONGO-1325 + public void rejectsNegativeSample() { + assertThatIllegalArgumentException().isThrownBy(() -> new SampleOperation(-1L)); + } + + @Test // DATAMONGO-1325 + public void rejectsZeroSample() { + assertThatIllegalArgumentException().isThrownBy(() -> new SampleOperation(0L)); + } + + @Test // DATAMONGO-1325 + public void rendersSampleOperation() { + + long sampleSize = 5L; + + SampleOperation sampleOperation = Aggregation.sample(sampleSize); + + Document sampleOperationDocument = sampleOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(sampleOperationDocument.get(OP)).isNotNull(); + assertThat(sampleOperationDocument.get(OP)).isInstanceOf(Document.class); + + Document sampleSizeDocument = sampleOperationDocument.get(OP, Document.class); + assertThat(sampleSizeDocument.get(SIZE)).isEqualTo(sampleSize); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java new file mode 100644 index 0000000000..60a8a6b013 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ScriptOperators.*; + +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ScriptOperators}. + * + * @author Christoph Strobl + */ +class ScriptOperatorsUnitTests { + + private static final String FUNCTION_BODY = "function(name) { return hex_md5(name) == \"15b0a220baa16331e8d80e15367677ad\" }"; + private static final Document EMPTY_ARGS_FUNCTION_DOCUMENT = new Document("body", FUNCTION_BODY) + .append("args", Collections.emptyList()).append("lang", "js"); + + private static final String INIT_FUNCTION = "function() { return { count: 0, sum: 0 } }"; + private static final String ACC_FUNCTION = "function(state, numCopies) { return { count: state.count + 1, sum: state.sum + numCopies } }"; + private static final String MERGE_FUNCTION = "function(state1, state2) { return { count: state1.count + state2.count, sum: state1.sum + state2.sum } }"; + private static final String FINALIZE_FUNCTION = "function(state) { return (state.sum / state.count) }"; + + private static final Document $ACCUMULATOR = Document.parse("{" + // + " $accumulator:" + // + " {" + // + " init: '" + INIT_FUNCTION + "'," + // + " accumulate: '" + ACC_FUNCTION + "'," + // + " accumulateArgs: [\"$copies\"]," + // + " merge: '" + MERGE_FUNCTION + "'," + // + " finalize: '" + FINALIZE_FUNCTION + "'," + // + " lang: \"js\"" + // + " }" + // + " }" + // + " }"); + + @Test // DATAMONGO-2623 + void functionWithoutArgsShouldBeRenderedCorrectly() { + + assertThat(function(FUNCTION_BODY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo($function(EMPTY_ARGS_FUNCTION_DOCUMENT)); + } + + @Test // DATAMONGO-2623 + void functionWithArgsShouldBeRenderedCorrectly() { + + assertThat(function(FUNCTION_BODY).args("$name").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + $function(new Document(EMPTY_ARGS_FUNCTION_DOCUMENT).append("args", Collections.singletonList("$name")))); + } + + @Test // DATAMONGO-2623 + void accumulatorWithStringInput() { + + Accumulator accumulator = accumulatorBuilder() // + .init(INIT_FUNCTION) // + .accumulate(ACC_FUNCTION).accumulateArgs("$copies") // + .merge(MERGE_FUNCTION) // + .finalize(FINALIZE_FUNCTION); + + assertThat(accumulator.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo($ACCUMULATOR); + } + + @Test // DATAMONGO-2623 + void accumulatorWithFunctionInput() { + + Accumulator accumulator = accumulatorBuilder() // + .init(function(INIT_FUNCTION)) // + .accumulate(function(ACC_FUNCTION).args("$copies")) // + .merge(MERGE_FUNCTION) // + .finalize(FINALIZE_FUNCTION); + + assertThat(accumulator.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo($ACCUMULATOR); + } + + static Document $function(Document source) { + return new Document("$function", source); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java new file mode 100644 index 0000000000..1737d35215 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java @@ -0,0 +1,180 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * @author Christoph Strobl + */ +class SelectionOperatorUnitTests { + + @Test // GH-4139 + void bottomRenderedCorrectly() { + + Document document = SelectionOperators.Bottom.bottom().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottom: + { + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void bottomMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Bottom.bottom().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottom: + { + output: [ "$player_id", "$s_cor_e" ], + sortBy: { "s_cor_e": -1 } + } + } + """)); + } + + @Test // GH-4139 + void bottomNRenderedCorrectly() { + + Document document = SelectionOperators.Bottom.bottom(3).output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottomN: + { + n : 3, + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void topMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Top.top().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $top: + { + output: [ "$player_id", "$s_cor_e" ], + sortBy: { "s_cor_e": -1 } + } + } + """)); + } + + @Test // GH-4139 + void topNRenderedCorrectly() { + + Document document = SelectionOperators.Top.top(3).output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $topN: + { + n : 3, + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void firstNMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.First.first(3).of("score").toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $firstN: + { + n: 3, + input: "$s_cor_e" + } + } + """)); + } + + @Test // GH-4139 + void lastNMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Last.last(3).of("score").toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $lastN: + { + n: 3, + input: "$s_cor_e" + } + } + """)); + } + + static class Player { + + @Field("player_id") String playerId; + + @Field("s_cor_e") Integer score; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java new file mode 100644 index 0000000000..093d4af7a0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java @@ -0,0 +1,153 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link SetOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class SetOperationUnitTests { + + @Test // DATAMONGO-2331 + void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new SetOperation(null, "value")); + } + + @Test // DATAMONGO-2331 + void rendersFieldReferenceCorrectly() { + + assertThat(new SetOperation("name", "value").toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersMappedFieldReferenceCorrectly() { + + assertThat(new SetOperation("student", "value").toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$set\" : {\"student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersNestedMappedFieldReferenceCorrectly() { + + assertThat( + new SetOperation("scoresWithMappedField.student", "value").toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse("{\"$set\" : {\"scoresWithMappedField.student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersTargetValueFieldReferenceCorrectly() { + + assertThat(new SetOperation("name", Fields.field("value")).toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"name\":\"$value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersMappedTargetValueFieldReferenceCorrectly() { + + assertThat( + new SetOperation("student", Fields.field("homework")).toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$set\" : {\"student_name\":\"$home_work\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersNestedMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new SetOperation("scoresWithMappedField.student", Fields.field("scoresWithMappedField.homework")) + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document + .parse("{\"$set\" : {\"scoresWithMappedField.student_name\":\"$scoresWithMappedField.home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void appliesSpelExpressionCorrectly() { + + SetOperation operation = SetOperation.builder().set("totalHomework").withValueOfExpression("sum(homework) * [0]", + 2); + + assertThat(operation.toPipelineStages(contextFor(AddFieldsOperationUnitTests.ScoresWrapper.class))).contains( + Document.parse("{\"$set\" : {\"totalHomework\": { $multiply : [{ \"$sum\" : [\"$homework\"] }, 2] }}}")); + } + + @Test // DATAMONGO-2331 + void rendersTargetValueExpressionCorrectly() { + + assertThat(SetOperation.builder().set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()) + .toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"totalHomework\": { \"$sum\" : \"$homework\" }}}")); + } + + @Test // DATAMONGO-2331 + void exposesFieldsCorrectly() { + + ExposedFields fields = SetOperation.builder().set("totalHomework").toValue("A+") // + .and() // + .set("totalQuiz").toValue("B-") // + .getFields(); + + assertThat(fields.getField("totalHomework")).isNotNull(); + assertThat(fields.getField("totalQuiz")).isNotNull(); + assertThat(fields.getField("does-not-exist")).isNull(); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new RelaxedTypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Scores { + + String student; + List homework; + } + + static class ScoresWithMappedField { + + @Field("student_name") String student; + @Field("home_work") List homework; + } + + static class ScoresWrapper { + + Scores scores; + ScoresWithMappedField scoresWithMappedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java new file mode 100644 index 0000000000..bc0edcea0b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -0,0 +1,201 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") +class SetWindowFieldsOperationTests { + + @Template // + private static MongoTestTemplate mongoTemplate; + + @AfterEach + void afterEach() { + mongoTemplate.flush(CakeSale.class); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-4745 + void exposesFieldsToNextStageCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation, + /* and now project on the field to see it can be referenced */ + Aggregation.project("cumulativeQuantityForState"))) + .all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationWithPartitionExpressionCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByExpression(Year.yearOf("date")) // resolves to $year: "$orderDate" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(134, 296, 104, 224, + 145, 285); + } + + void initCakeSales() { + + mongoTemplate.execute(CakeSale.class, collection -> { + + List source = Arrays.asList(Document.parse( + "{ _id: 0, type: \"chocolate\", orderDate: { $date : \"2020-05-18T14:10:30Z\" }, state: \"CA\", price: 13, quantity: 120 }"), + Document.parse( + "{ _id: 1, type: \"chocolate\", orderDate: { $date : \"2021-03-20T11:30:05Z\"}, state: \"WA\", price: 14, quantity: 140 }"), + Document.parse( + "{ _id: 2, type: \"vanilla\", orderDate: { $date : \"2021-01-11T06:31:15Z\"}, state: \"CA\", price: 12, quantity: 145 }"), + Document.parse( + "{ _id: 3, type: \"vanilla\", orderDate: { $date : \"2020-02-08T13:13:23Z\"}, state: \"WA\", price: 13, quantity: 104 }"), + Document.parse( + "{ _id: 4, type: \"strawberry\", orderDate: { $date : \"2019-05-18T16:09:01Z\"}, state: \"CA\", price: 41, quantity: 162 }"), + Document.parse( + "{ _id: 5, type: \"strawberry\", orderDate: { $date : \"2019-01-08T06:12:03Z\"}, state: \"WA\", price: 43, quantity: 134 }")); + + collection.insertMany(source); + return "OK"; + }); + } + + static class CakeSale { + + @Id Integer id; + + String state; + + @Field("orderDate") // + Date date; + + @Field("quantity") // + Integer qty; + + String type; + + public Integer getId() { + return this.id; + } + + public String getState() { + return this.state; + } + + public Date getDate() { + return this.date; + } + + public Integer getQty() { + return this.qty; + } + + public String getType() { + return this.type; + } + + public void setId(Integer id) { + this.id = id; + } + + public void setState(String state) { + this.state = state; + } + + public void setDate(Date date) { + this.date = date; + } + + public void setQty(Integer qty) { + this.qty = qty; + } + + public void setType(String type) { + this.type = type; + } + + public String toString() { + return "SetWindowFieldsOperationTests.CakeSale(id=" + this.getId() + ", state=" + this.getState() + ", date=" + + this.getDate() + ", qty=" + this.getQty() + ", type=" + this.getType() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java new file mode 100644 index 0000000000..b5f5f596e6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -0,0 +1,113 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ +class SetWindowFieldsOperationUnitTests { + + @Test // GH-3711 + void rendersTargetFieldNamesCorrectly() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { partitionBy: \"$state\", sortBy: { orderDate: 1 }, output: { cumulativeQuantityForState: { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } } } } }")); + } + + @Test // GH-3711 + void exposesTargetFieldNames() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + assertThat(setWindowFieldsOperation.getFields()).map(ExposedField::getName).containsExactly("f1", "f2"); + } + + @Test // GH-3711 + void rendersMuiltipleOutputFields() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { output: { f1 : { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } }, f2 : { $avg: \"$quantity\", window: { documents: [ -1, 0 ] } } } } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class CakeSale { + + String state; + + @Field("orderDate") Date date; + + @Field("quantity") Integer qty; + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java index 60bd05d060..73f78348fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,31 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; - -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link SkipOperation}. - * + * * @author Oliver Gierke */ public class SkipOperationUnitTests { static final String OP = "$skip"; - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNegativeSkip() { - new SkipOperation(-1L); + assertThatIllegalArgumentException().isThrownBy(() -> new SkipOperation(-1L)); } @Test public void rendersSkipOperation() { SkipOperation operation = new SkipOperation(10L); - DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject.get(OP), is((Object) 10L)); + assertThat(document.get(OP)).isEqualTo((Object) 10L); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java new file mode 100644 index 0000000000..923778dea4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link SortByCountOperation}. + * + * @author Mark Paluch + */ +public class SortByCountOperationUnitTests { + + @Test // DATAMONGO-1553 + public void shouldRenderFieldCorrectly() { + + SortByCountOperation operation = sortByCount("country"); + Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(result).containsEntry("$sortByCount", "$country"); + } + + @Test // DATAMONGO-1553 + public void shouldRenderExpressionCorrectly() { + + SortByCountOperation operation = sortByCount(StringOperators.valueOf("foo").substring(5)); + Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(result).containsEntry("$sortByCount.$substr", Arrays.asList("$foo", 5, -1)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java index aa23d222ae..19f80f2d08 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,42 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import com.mongodb.DBObject; - /** * Unit tests for {@link SortOperation}. - * + * * @author Oliver Gierke + * @author Mark Paluch */ public class SortOperationUnitTests { @Test - public void createsDBObjectForAscendingSortCorrectly() { + public void createsDocumentForAscendingSortCorrectly() { - SortOperation operation = new SortOperation(new Sort(Direction.ASC, "foobar")); - DBObject result = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + SortOperation operation = new SortOperation(Sort.by(Direction.ASC, "foobar")); + Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - DBObject sortValue = getAsDBObject(result, "$sort"); - assertThat(sortValue, is(notNullValue())); - assertThat(sortValue.get("foobar"), is((Object) 1)); + Document sortValue = getAsDocument(result, "$sort"); + assertThat(sortValue).isNotNull(); + assertThat(sortValue.get("foobar")).isEqualTo((Object) 1); } @Test - public void createsDBObjectForDescendingSortCorrectly() { + public void createsDocumentForDescendingSortCorrectly() { - SortOperation operation = new SortOperation(new Sort(Direction.DESC, "foobar")); - DBObject result = operation.toDBObject(Aggregation.DEFAULT_CONTEXT); + SortOperation operation = new SortOperation(Sort.by(Direction.DESC, "foobar")); + Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - DBObject sortValue = getAsDBObject(result, "$sort"); - assertThat(sortValue, is(notNullValue())); - assertThat(sortValue.get("foobar"), is((Object) (0 - 1))); + Document sortValue = getAsDocument(result, "$sort"); + assertThat(sortValue).isNotNull(); + assertThat(sortValue.get("foobar")).isEqualTo((Object) (0 - 1)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java index a5260b712d..86f4ff03cf 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,38 +15,33 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link SpelExpressionTransformer}. - * - * @see DATAMONGO-774 + * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class SpelExpressionTransformerIntegrationTests { - @Autowired MongoDbFactory mongoDbFactory; - - @Rule public ExpectedException exception = ExpectedException.none(); + @Autowired MongoDatabaseFactory mongoDbFactory; SpelExpressionTransformer transformer; DbRefResolver dbRefResolver; @@ -57,25 +52,25 @@ public void setUp() { this.dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); } - @Test + @Test // DATAMONGO-774 public void shouldConvertCompoundExpressionToPropertyPath() { MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); TypeBasedAggregationOperationContext ctxt = new TypeBasedAggregationOperationContext(Data.class, new MongoMappingContext(), new QueryMapper(converter)); - assertThat(transformer.transform("item.primitiveIntValue", ctxt, new Object[0]).toString(), - is("$item.primitiveIntValue")); + assertThat(transformer.transform("item.primitiveIntValue", ctxt, new Object[0]).toString()) + .isEqualTo("$item.primitiveIntValue"); } - @Test + @Test // DATAMONGO-774 public void shouldThrowExceptionIfNestedPropertyCannotBeFound() { - exception.expect(MappingException.class); - exception.expectMessage("value2"); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); TypeBasedAggregationOperationContext ctxt = new TypeBasedAggregationOperationContext(Data.class, new MongoMappingContext(), new QueryMapper(converter)); - assertThat(transformer.transform("item.value2", ctxt, new Object[0]).toString(), is("$item.value2")); + + assertThatExceptionOfType(InvalidPersistentPropertyPath.class).isThrownBy(() -> { + transformer.transform("item.value2", ctxt, new Object[0]).toString(); + }); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index ad72d97fa4..abe2fd5605 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,34 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; import org.springframework.data.mongodb.core.Person; /** * Unit tests for {@link SpelExpressionTransformer}. - * - * @see DATAMONGO-774 + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl + * @author Divya Srivastava + * @author Julia Lee */ public class SpelExpressionTransformerUnitTests { - SpelExpressionTransformer transformer = new SpelExpressionTransformer(); + private SpelExpressionTransformer transformer = new SpelExpressionTransformer(); - Data data; + private Data data; - @Before - public void setup() { + @BeforeEach + void beforeEach() { this.data = new Data(); this.data.primitiveLongValue = 42; @@ -47,167 +52,1239 @@ public void setup() { this.data.item.primitiveIntValue = 21; } - @Test - public void shouldRenderConstantExpression() { + @Test // DATAMONGO-774 + void shouldRenderConstantExpression() { - assertThat(transform("1"), is("1")); - assertThat(transform("-1"), is("-1")); - assertThat(transform("1.0"), is("1.0")); - assertThat(transform("-1.0"), is("-1.0")); - assertThat(transform("null"), is(nullValue())); + assertThat(transformValue("1")).isEqualTo("1"); + assertThat(transformValue("-1")).isEqualTo("-1"); + assertThat(transformValue("1.0")).isEqualTo("1.0"); + assertThat(transformValue("-1.0")).isEqualTo("-1.0"); + assertThat(transformValue("null")).isNull(); } - @Test - public void shouldSupportKnownOperands() { + @Test // DATAMONGO-774 + void shouldSupportKnownOperands() { - assertThat(transform("a + b"), is("{ \"$add\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a - b"), is("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a * b"), is("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a / b"), is("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a % b"), is("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a + b")).isEqualTo("{ \"$add\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a - b")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a * b")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a / b")).isEqualTo("{ \"$divide\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a % b")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } - @Test(expected = IllegalArgumentException.class) - public void shouldThrowExceptionOnUnknownOperand() { - transform("a ^ 1"); + @Test // DATAMONGO-774 + void shouldThrowExceptionOnUnknownOperand() { + assertThatIllegalArgumentException().isThrownBy(() -> transform("a++")); } - @Test - public void shouldRenderSumExpression() { - assertThat(transform("a + 1"), is("{ \"$add\" : [ \"$a\" , 1]}")); + @Test // DATAMONGO-774 + void shouldRenderSumExpression() { + assertThat(transform("a + 1")).isEqualTo("{ \"$add\" : [ \"$a\" , 1]}"); } - @Test - public void shouldRenderFormula() { + @Test // DATAMONGO-774 + void shouldRenderFormula() { - assertThat( - transform("(netPrice + surCharge) * taxrate + 42"), - is("{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } - @Test - public void shouldRenderFormulaInCurlyBrackets() { + @Test // DATAMONGO-774 + void shouldRenderFormulaInCurlyBrackets() { - assertThat( - transform("{(netPrice + surCharge) * taxrate + 42}"), - is("{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } - @Test - public void shouldRenderFieldReference() { + @Test // DATAMONGO-774 + void shouldRenderFieldReference() { - assertThat(transform("foo"), is("$foo")); - assertThat(transform("$foo"), is("$foo")); + assertThat(transformValue("foo")).isEqualTo("$foo"); + assertThat(transformValue("$foo")).isEqualTo("$foo"); } - @Test - public void shouldRenderNestedFieldReference() { + @Test // DATAMONGO-774 + void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar"), is("$foo.bar")); - assertThat(transform("$foo.bar"), is("$foo.bar")); + assertThat(transformValue("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("$foo.bar")).isEqualTo("$foo.bar"); } - @Test - @Ignore - public void shouldRenderNestedIndexedFieldReference() { + @Test // DATAMONGO-774 + @Disabled + void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar"), is("$foo[3].bar")); + assertThat(transformValue("foo[3].bar")).isEqualTo("$foo[3].bar"); } - @Test - public void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1"), is("{ \"$add\" : [ 1 , 1 , 1]}")); + @Test // DATAMONGO-774 + void shouldRenderConsecutiveOperation() { + assertThat(transform("1 + 1 + 1")).isEqualTo("{ \"$add\" : [ 1 , 1 , 1]}"); } - @Test - public void shouldRenderComplexExpression0() { + @Test // DATAMONGO-774 + void shouldRenderComplexExpression0() { - assertThat(transform("-(1 + q)"), is("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); + assertThat(transform("-(1 + q)")) + .isEqualTo("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}"); } - @Test - public void shouldRenderComplexExpression1() { + @Test // DATAMONGO-774 + void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)"), - is("{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}")); + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo( + "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}"); } - @Test - public void shouldRenderComplexExpression2() { + @Test // DATAMONGO-774 + void shouldRenderComplexExpression2() { - assertThat( - transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)"), - is("{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}")); + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo( + "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}"); } - @Test - public void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { + @Test // DATAMONGO-774 + void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1"), is("{ \"$add\" : [ -4 , 1]}")); - assertThat(transform("1 + -4"), is("{ \"$add\" : [ 1 , -4]}")); + assertThat(transform("-4 + 1")).isEqualTo("{ \"$add\" : [ -4 , 1]}"); + assertThat(transform("1 + -4")).isEqualTo("{ \"$add\" : [ 1 , -4]}"); } - @Test - public void shouldRenderConsecutiveOperationsInComplexExpression() { + @Test // DATAMONGO-774 + void shouldRenderConsecutiveOperationsInComplexExpression() { - assertThat(transform("1 + 1 + (1 + 1 + 1) / q"), - is("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) + .isEqualTo("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}"); } - @Test - public void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3), is("{ \"$add\" : [ 1 , 2 , 3]}")); + @Test // DATAMONGO-774 + void shouldRenderParameterExpressionResults() { + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo("{ \"$add\" : [ 1 , 2 , 3]}"); } - @Test - public void shouldRenderNestedParameterExpressionResults() { + @Test // DATAMONGO-774 + void shouldRenderNestedParameterExpressionResults() { - assertThat(transform("[0].primitiveLongValue + [0].primitiveDoubleValue + [0].doubleValue.longValue()", data), - is("{ \"$add\" : [ 42 , 1.2345 , 23]}")); + assertThat( + ((Document) transform("[0].primitiveLongValue + [0].primitiveDoubleValue + [0].doubleValue.longValue()", data)) + .toJson()) + .isEqualTo(Document + .parse("{ \"$add\" : [ { $numberLong : \"42\"} , 1.2345 , { $numberLong : \"23\" } ]}").toJson()); } - @Test - public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + @Test // DATAMONGO-774 + void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + + Document target = ((Document) transform( + "((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", data)); assertThat( - transform("((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", data), - is("{ \"$multiply\" : [ { \"$add\" : [ 1 , 42 , 1.2345]} , 23]}")); + ((Document) transform("((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", + data))) + .isEqualTo(new Document("$multiply", + Arrays. asList(new Document("$add", Arrays. asList(1, 42L, 1.2345D)), 23L))); } - /** - * @see DATAMONGO-840 - */ - @Test - public void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { + @Test // DATAMONGO-840 + void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person), is("{ \"$add\" : [ 10 , \"$a.c\"]}")); + assertThat(transform("[0].age + a.c", person)).isEqualTo("{ \"$add\" : [ 10 , \"$a.c\"] }"); + } + + @Test // DATAMONGO-840 + void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { + + assertThat(transform("a.b + a.c")).isEqualTo("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeAnd() { + assertThat(transform("and(a, b)")).isEqualTo("{ \"$and\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeOr() { + assertThat(transform("or(a, b)")).isEqualTo("{ \"$or\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeNot() { + assertThat(transform("not(a)")).isEqualTo("{ \"$not\" : [ \"$a\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSetEquals() { + assertThat(transform("setEquals(a, b)")).isEqualTo("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSetEqualsForArrays() { + assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) + .isEqualTo("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { + assertThat(transform("setEquals(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSetIntersection() { + assertThat(transform("setIntersection(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSetUnion() { + assertThat(transform("setUnion(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSeDifference() { + assertThat(transform("setDifference(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSetIsSubset() { + assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceAnyElementTrue() { + assertThat(transform("anyElementTrue(a)")).isEqualTo("{ \"$anyElementTrue\" : [ \"$a\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceAllElementsTrue() { + assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceCmp() { + assertThat(transform("cmp(a, 250)")).isEqualTo("{ \"$cmp\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceEq() { + assertThat(transform("eq(a, 250)")).isEqualTo("{ \"$eq\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceGt() { + assertThat(transform("gt(a, 250)")).isEqualTo("{ \"$gt\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceGte() { + assertThat(transform("gte(a, 250)")).isEqualTo("{ \"$gte\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLt() { + assertThat(transform("lt(a, 250)")).isEqualTo("{ \"$lt\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLte() { + assertThat(transform("lte(a, 250)")).isEqualTo("{ \"$lte\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNe() { + assertThat(transform("ne(a, 250)")).isEqualTo("{ \"$ne\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceAbs() { + assertThat(transform("abs(1)")).isEqualTo("{ \"$abs\" : 1}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceAdd() { + assertThat(transform("add(a, 250)")).isEqualTo("{ \"$add\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceCeil() { + assertThat(transform("ceil(7.8)")).isEqualTo("{ \"$ceil\" : 7.8}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceDivide() { + assertThat(transform("divide(a, 250)")).isEqualTo("{ \"$divide\" : [ \"$a\" , 250]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceExp() { + assertThat(transform("exp(2)")).isEqualTo("{ \"$exp\" : 2}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceFloor() { + assertThat(transform("floor(2)")).isEqualTo("{ \"$floor\" : 2}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLn() { + assertThat(transform("ln(2)")).isEqualTo("{ \"$ln\" : 2}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLog() { + assertThat(transform("log(100, 10)")).isEqualTo("{ \"$log\" : [ 100 , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLog10() { + assertThat(transform("log10(100)")).isEqualTo("{ \"$log10\" : 100}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeMod() { + assertThat(transform("mod(a, b)")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeMultiply() { + assertThat(transform("multiply(a, b)")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodePow() { + assertThat(transform("pow(a, 2)")).isEqualTo("{ \"$pow\" : [ \"$a\" , 2]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSqrt() { + assertThat(transform("sqrt(2)")).isEqualTo("{ \"$sqrt\" : 2}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSubtract() { + assertThat(transform("subtract(a, b)")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceTrunc() { + assertThat(transform("trunc(2.1)")).isEqualTo("{ \"$trunc\" : 2.1}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeConcat() { + assertThat(transform("concat(a, b, 'c')")).isEqualTo("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSubstrc() { + assertThat(transform("substr(a, 0, 1)")).isEqualTo("{ \"$substr\" : [ \"$a\" , 0 , 1]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceToLower() { + assertThat(transform("toLower(a)")).isEqualTo("{ \"$toLower\" : \"$a\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceToUpper() { + assertThat(transform("toUpper(a)")).isEqualTo("{ \"$toUpper\" : \"$a\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeStrCaseCmp() { + assertThat(transform("strcasecmp(a, b)")).isEqualTo("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceMeta() { + assertThat(transform("meta('textScore')")).isEqualTo("{ \"$meta\" : \"textScore\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeArrayElemAt() { + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo("{ \"$arrayElemAt\" : [ \"$a\" , 10]}"); + } + + @Test // GH-3694 + void shouldRenderMethodReferenceNodeFirst() { + assertThat(transform("first(a)")).isEqualTo("{ \"$first\" : \"$a\" }"); + } + + @Test // GH-3694 + void shouldRenderMethodReferenceNodeLast() { + assertThat(transform("last(a)")).isEqualTo("{ \"$last\" : \"$a\" }"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeConcatArrays() { + assertThat(transform("concatArrays(a, b, c)")) + .isEqualTo("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeFilter() { + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo( + "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceIsArray() { + assertThat(transform("isArray(a)")).isEqualTo("{ \"$isArray\" : \"$a\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceIsSize() { + assertThat(transform("size(a)")).isEqualTo("{ \"$size\" : \"$a\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSlice() { + assertThat(transform("slice(a, 10)")).isEqualTo("{ \"$slice\" : [ \"$a\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeMap() { + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo( + "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeLet() { + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo( + "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLiteral() { + assertThat(transform("literal($1)")).isEqualTo("{ \"$literal\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceDayOfYear() { + assertThat(transform("dayOfYear($1)")).isEqualTo("{ \"$dayOfYear\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceDayOfMonth() { + assertThat(transform("dayOfMonth($1)")).isEqualTo("{ \"$dayOfMonth\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceDayOfWeek() { + assertThat(transform("dayOfWeek($1)")).isEqualTo("{ \"$dayOfWeek\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceYear() { + assertThat(transform("year($1)")).isEqualTo("{ \"$year\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceMonth() { + assertThat(transform("month($1)")).isEqualTo("{ \"$month\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceWeek() { + assertThat(transform("week($1)")).isEqualTo("{ \"$week\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceHour() { + assertThat(transform("hour($1)")).isEqualTo("{ \"$hour\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceMinute() { + assertThat(transform("minute($1)")).isEqualTo("{ \"$minute\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceSecond() { + assertThat(transform("second($1)")).isEqualTo("{ \"$second\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceMillisecond() { + assertThat(transform("millisecond($1)")).isEqualTo("{ \"$millisecond\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceDateToString() { + assertThat(transform("dateToString('%Y-%m-%d', $date)")) + .isEqualTo("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceCond() { + assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( + "{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeIfNull() { + assertThat(transform("ifNull(a, 10)")).isEqualTo("{ \"$ifNull\" : [ \"$a\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeSum() { + assertThat(transform("sum(a, b)")).isEqualTo("{ \"$sum\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeAvg() { + assertThat(transform("avg(a, b)")).isEqualTo("{ \"$avg\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceFirst() { + assertThat(transform("first($1)")).isEqualTo("{ \"$first\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceLast() { + assertThat(transform("last($1)")).isEqualTo("{ \"$last\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeMax() { + assertThat(transform("max(a, b)")).isEqualTo("{ \"$max\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeMin() { + assertThat(transform("min(a, b)")).isEqualTo("{ \"$min\" : [ \"$a\" , \"$b\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodePush() { + assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) + .isEqualTo("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceAddToSet() { + assertThat(transform("addToSet($1)")).isEqualTo("{ \"$addToSet\" : \"$1\"}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeStdDevPop() { + assertThat(transform("stdDevPop(scores.score)")) + .isEqualTo("{ \"$stdDevPop\" : [ \"$scores.score\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderMethodReferenceNodeStdDevSamp() { + assertThat(transform("stdDevSamp(age)")).isEqualTo("{ \"$stdDevSamp\" : [ \"$age\"]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeEq() { + assertThat(transform("foo == 10")).isEqualTo("{ \"$eq\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeNe() { + assertThat(transform("foo != 10")).isEqualTo("{ \"$ne\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeGt() { + assertThat(transform("foo > 10")).isEqualTo("{ \"$gt\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeGte() { + assertThat(transform("foo >= 10")).isEqualTo("{ \"$gte\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeLt() { + assertThat(transform("foo < 10")).isEqualTo("{ \"$lt\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeLte() { + assertThat(transform("foo <= 10")).isEqualTo("{ \"$lte\" : [ \"$foo\" , 10]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodePow() { + assertThat(transform("foo^2")).isEqualTo("{ \"$pow\" : [ \"$foo\" , 2]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeOr() { + assertThat(transform("true || false")).isEqualTo("{ \"$or\" : [ true , false]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderComplexOperationNodeOr() { + assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( + "{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderOperationNodeAnd() { + assertThat(transform("true && false")).isEqualTo("{ \"$and\" : [ true , false]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderComplexOperationNodeAnd() { + assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( + "{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderNotCorrectly() { + assertThat(transform("!true")).isEqualTo("{ \"$not\" : [ true]}"); + } + + @Test // DATAMONGO-1530 + void shouldRenderComplexNotCorrectly() { + assertThat(transform("!(foo > 10)")).isEqualTo("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceIndexOfBytes() { + assertThat(transform("indexOfBytes(item, 'foo')")) + .isEqualTo("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceIndexOfCP() { + assertThat(transform("indexOfCP(item, 'foo')")) + .isEqualTo("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceSplit() { + assertThat(transform("split(item, ',')")).isEqualTo("{ \"$split\" : [ \"$item\" , \",\"]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceStrLenBytes() { + assertThat(transform("strLenBytes(item)")).isEqualTo("{ \"$strLenBytes\" : \"$item\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceStrLenCP() { + assertThat(transform("strLenCP(item)")).isEqualTo("{ \"$strLenCP\" : \"$item\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodSubstrCP() { + assertThat(transform("substrCP(item, 0, 5)")).isEqualTo("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceReverseArray() { + assertThat(transform("reverseArray(array)")).isEqualTo("{ \"$reverseArray\" : \"$array\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceReduce() { + assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo( + "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceZip() { + assertThat(transform("zip(new String[]{'$array1', '$array2'})")) + .isEqualTo("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodReferenceZipWithOptionalArgs() { + assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo( + "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodIn() { + assertThat(transform("in('item', array)")).isEqualTo("{ \"$in\" : [ \"item\" , \"$array\"]}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodRefereneIsoDayOfWeek() { + assertThat(transform("isoDayOfWeek(date)")).isEqualTo("{ \"$isoDayOfWeek\" : \"$date\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodRefereneIsoWeek() { + assertThat(transform("isoWeek(date)")).isEqualTo("{ \"$isoWeek\" : \"$date\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodRefereneIsoWeekYear() { + assertThat(transform("isoWeekYear(date)")).isEqualTo("{ \"$isoWeekYear\" : \"$date\"}"); + } + + @Test // DATAMONGO-1548 + void shouldRenderMethodRefereneType() { + assertThat(transform("type(a)")).isEqualTo("{ \"$type\" : \"$a\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderArrayToObjectWithFieldReference() { + assertThat(transform("arrayToObject(field)")).isEqualTo("{ \"$arrayToObject\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderArrayToObjectWithArray() { + + assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) + .isEqualTo("{ \"$arrayToObject\" : [\"key\", \"value\"]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderObjectToArrayWithFieldReference() { + assertThat(transform("objectToArray(field)")).isEqualTo("{ \"$objectToArray\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderMergeObjects() { + + assertThat(transform("mergeObjects(field1, $$ROOT)")) + .isEqualTo("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithoutChars() { + assertThat(transform("trim(field)")).isEqualTo("{ \"$trim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithChars() { + + assertThat(transform("trim(field, 'ie')")) + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithCharsFromFieldReference() { + + assertThat(transform("trim(field1, field2)")) + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithoutChars() { + assertThat(transform("ltrim(field)")).isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithChars() { + + assertThat(transform("ltrim(field, 'ie')")) + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithCharsFromFieldReference() { + + assertThat(transform("ltrim(field1, field2)")) + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithoutChars() { + assertThat(transform("rtrim(field)")).isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithChars() { + + assertThat(transform("rtrim(field, 'ie')")) + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithCharsFromFieldReference() { + + assertThat(transform("rtrim(field1, field2)")) + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithoutOptions() { + + assertThat(transform("regexFind(field1,'e')")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(transform("regexFind(field1,'e','i')")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsFromFieldReference() { + + assertThat(transform("regexFind(field1,'e',field2)")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithoutOptions() { + + assertThat(transform("regexFindAll(field1,'e')")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + + assertThat(transform("regexFindAll(field1,'e','i')")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + + assertThat(transform("regexFindAll(field1,'e',field2)")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithoutOptions() { + + assertThat(transform("regexMatch(field1,'e')")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + + assertThat(transform("regexMatch(field1,'e','i')")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsFromFieldReference() { + + assertThat(transform("regexMatch(field1,'e',field2)")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceOne() { + + assertThat(transform("replaceOne(field, 'bar', 'baz')")) + .isEqualTo("{ \"$replaceOne\" : {\"input\" : \"$field\" , \"find\" : \"bar\" , \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAll() { + + assertThat(transform("replaceAll(field, 'bar', 'baz')")) + .isEqualTo("{ \"$replaceAll\" : {\"input\" : \"$field\" , \"find\" : \"bar\" , \"replacement\" : \"baz\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithoutOptionalParameters() { + + assertThat(transform("convert(field, 'string')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithOnError() { + + assertThat(transform("convert(field, 'int', 'Not an integer.')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithOnErrorOnNull() { + + assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo( + "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToBool() { + assertThat(transform("toBool(field)")).isEqualTo("{ \"$toBool\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDate() { + assertThat(transform("toDate(field)")).isEqualTo("{ \"$toDate\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDecimal() { + assertThat(transform("toDecimal(field)")).isEqualTo("{ \"$toDecimal\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDouble() { + assertThat(transform("toDouble(field)")).isEqualTo("{ \"$toDouble\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToInt() { + assertThat(transform("toInt(field)")).isEqualTo("{ \"$toInt\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToLong() { + assertThat(transform("toLong(field)")).isEqualTo("{ \"$toLong\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToObjectId() { + assertThat(transform("toObjectId(field)")).isEqualTo("{ \"$toObjectId\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToString() { + assertThat(transform("toString(field)")).isEqualTo("{ \"$toString\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithoutOptionalParameters() { + + assertThat(transform("dateFromString(field)")) + .isEqualTo("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormat() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatAndTimezone() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}"); + } + + @Test // DATAMONGO-2077, DATAMONGO-2671 + void shouldRenderDateFromParts() { + + assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } - /** - * @see DATAMONGO-840 - */ - @Test - public void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { + @Test // DATAMONGO-2077, DATAMONGO-2671 + void shouldRenderIsoDateFromParts() { - assertThat(transform("a.b + a.c"), is("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); + assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } - @Test - public void shouldRenderStringFunctions() { + @Test // DATAMONGO-2077 + void shouldRenderDateToParts() { + + assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( + "{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderIndexOfArray() { + + assertThat(transform("indexOfArray(field, 2)")) + .isEqualTo("{ \"$indexOfArray\" : [\"$field\", 2 ]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRange() { + + assertThat(transform("range(0, 10, 2)")).isEqualTo("{ \"$range\" : [0, 10, 2 ]}"); + } + + @Test // DATAMONGO-2370 + void shouldRenderRound() { + assertThat(transform("round(field)")).isEqualTo("{ \"$round\" : [\"$field\"]}"); + } + + @Test // DATAMONGO-2370 + void shouldRenderRoundWithPlace() { + assertThat(transform("round(field, 2)")).isEqualTo("{ \"$round\" : [\"$field\", 2]}"); + } + + @Test // GH-3714 + void shouldRenderDegreesToRadians() { + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo("{ \"$degreesToRadians\" : \"$angle_a\"}"); + } + + @Test // GH-3712 + void shouldRenderCovariancePop() { + assertThat(transform("covariancePop(field1, field2)")) + .isEqualTo("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}"); + } + + @Test // GH-3712 + void shouldRenderCovarianceSamp() { + assertThat(transform("covarianceSamp(field1, field2)")) + .isEqualTo("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}"); + } + + @Test // GH-3715 + void shouldRenderRank() { + assertThat(transform("rank()")).isEqualTo("{ $rank : {} }"); + } + + @Test // GH-3715 + void shouldRenderDenseRank() { + assertThat(transform("denseRank()")).isEqualTo("{ $denseRank : {} }"); + } + + @Test // GH-3717 + void shouldRenderDocumentNumber() { + assertThat(transform("documentNumber()")).isEqualTo("{ $documentNumber : {} }"); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(transform("shift(quantity, 1)")) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(transform("shift(quantity, 1, 'Not available')")) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); + } + + @Test // GH-3716 + void shouldRenderDerivative() { + assertThat(transform("derivative(miles, 'hour')")) + .isEqualTo("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }"); + } + + @Test // GH-3721 + void shouldRenderIntegral() { + assertThat(transform("integral(field)")).isEqualTo("{ \"$integral\" : { \"input\" : \"$field\" }}"); + } + + @Test // GH-3721 + void shouldRenderIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")) + .isEqualTo("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}"); + } + + @Test // GH-3728 + void shouldRenderSin() { + assertThat(transform("sin(angle)")).isEqualTo("{ \"$sin\" : \"$angle\"}"); + } + + @Test // GH-3728 + void shouldRenderSinh() { + assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); + } + + @Test // GH-3708 + void shouldRenderASin() { + assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); + } + + @Test // GH-3708 + void shouldRenderASinh() { + assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); + } + + @Test // GH-3710 + void shouldRenderCos() { + assertThat(transform("cos(angle)")).isEqualTo("{ \"$cos\" : \"$angle\"}"); + } + + @Test // GH-3710 + void shouldRenderCosh() { + assertThat(transform("cosh(angle)")).isEqualTo("{ \"$cosh\" : \"$angle\"}"); + } + + @Test // GH-3707 + void shouldRenderACos() { + assertThat(transform("acos(angle)")).isEqualTo("{ \"$acos\" : \"$angle\"}"); + } + + @Test // GH-3707 + void shouldRenderACosh() { + assertThat(transform("acosh(angle)")).isEqualTo("{ \"$acosh\" : \"$angle\"}"); + } + + @Test // GH-3730 + void shouldRenderTan() { + assertThat(transform("tan(angle)")).isEqualTo("{ \"$tan\" : \"$angle\"}"); + } + + @Test // GH-3730 + void shouldRenderTanh() { + assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan() { + assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan2() { + assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATanh() { + assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); + } + + @Test // GH-3713 + void shouldRenderDateAdd() { + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-4139 + void shouldRenderDateSubtract() { + assertThat(transform("dateSubtract(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-3713 + void shouldRenderDateDiff() { + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); + } + + @Test // GH-3724 + void shouldRenderRand() { + assertThat(transform("rand()")).isEqualTo("{ $rand : {} }"); + } + + @Test // GH-4139 + void shouldRenderBottom() { + assertThat(transform("bottom(new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $bottom : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderBottomN() { + assertThat(transform("bottomN(3, new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $bottomN : { n : 3, output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderTop() { + assertThat(transform("top(new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $top : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderTopN() { + assertThat(transform("topN(3, new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $topN : { n : 3, output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderFirstN() { + assertThat(transform("firstN(3, \"$score\")")).isEqualTo("{ $firstN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderLastN() { + assertThat(transform("lastN(3, \"$score\")")).isEqualTo("{ $lastN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderMaxN() { + assertThat(transform("maxN(3, \"$score\")")).isEqualTo("{ $maxN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderMinN() { + assertThat(transform("minN(3, \"$score\")")).isEqualTo("{ $minN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderDateTrunc() { + assertThat(transform("dateTrunc(purchaseDate, \"week\", 2, \"monday\")")).isEqualTo("{ $dateTrunc : { date : \"$purchaseDate\", unit : \"week\", binSize : 2, startOfWeek : \"monday\" }}"); + } + + @Test // GH-4139 + void shouldRenderGetField() { + assertThat(transform("getField(\"score\", source)")).isEqualTo("{ $getField : { field : \"score\", input : \"$source\" }}"); + } + + @Test // GH-4139 + void shouldRenderSetField() { + assertThat(transform("setField(\"score\", 100, source)")).isEqualTo("{ $setField : { field : \"score\", value : 100, input : \"$source\" }}"); + } + + @Test // GH-4139 + void shouldRenderSortArray() { + assertThat(transform( + "sortArray(team, new org.bson.Document(\"name\" , 1))")).isEqualTo("{ $sortArray : { input : \"$team\", sortBy : {\"name\" : 1 } }}"); + } + + @Test // GH-4139 + void shouldTsIncrement() { + assertThat(transform("tsIncrement(saleTimestamp)")).isEqualTo("{ $tsIncrement: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void shouldTsSecond() { + assertThat(transform("tsSecond(saleTimestamp)")).isEqualTo("{ $tsSecond: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void shouldRenderLocf() { + assertThat(transform("locf(price)")).isEqualTo("{ $locf: \"$price\" }"); + } + + @Test // GH-4473 + void shouldRenderPercentile() { + assertThat(transform("percentile(new String[]{\"$scoreOne\", \"$scoreTwo\" }, new double[]{0.4}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : [\"$scoreOne\", \"$scoreTwo\"], p : [0.4], method : \"approximate\" }}"); + + assertThat(transform("percentile(score, new double[]{0.4, 0.85}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}"); + + assertThat(transform("percentile(\"$score\", new double[]{0.4, 0.85}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}"); + } + + @Test // GH-4472 + void shouldRenderMedian() { + + assertThat(transform("median(new String[]{\"$scoreOne\", \"$scoreTwo\" }, \"approximate\")")) + .isEqualTo("{ $median : { input : [\"$scoreOne\", \"$scoreTwo\"], method : \"approximate\" }}"); + + assertThat(transform("median(score, \"approximate\")")) + .isEqualTo("{ $median : { input : \"$score\", method : \"approximate\" }}"); + } - assertThat(transform("concat(a, b)"), is("{ \"$concat\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("substr(a, 1, 2)"), is("{ \"$substr\" : [ \"$a\" , 1 , 2]}")); - assertThat(transform("strcasecmp(a, b)"), is("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("toLower(a)"), is("{ \"$toLower\" : [ \"$a\"]}")); - assertThat(transform("toUpper(a)"), is("{ \"$toUpper\" : [ \"$a\"]}")); - assertThat(transform("toUpper(toLower(a))"), is("{ \"$toUpper\" : [ { \"$toLower\" : [ \"$a\"]}]}")); + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); } - private String transform(String expression, Object... params) { + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); - return result == null ? null : result.toString(); + return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java index 335e9ef752..a5a47ec85a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java new file mode 100644 index 0000000000..61d2951ebb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -0,0 +1,312 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.regex.Pattern; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit test for {@link StringOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + * @currentRead Royal Assassin - Robin Hobb + */ +class StringOperatorsUnitTests { + + private static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; + private static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + private static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2049 + void shouldRenderTrim() { + + assertThat(StringOperators.valueOf("shrewd").trim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).trim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").trim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimLeft() { + + assertThat(StringOperators.valueOf("shrewd").trim().left().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimLeftWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").left().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimRight() { + + assertThat(StringOperators.valueOf("shrewd").trim().right().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimRightWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").right().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrim() { + + assertThat(StringOperators.valueOf("shrewd").ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").ltrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").ltrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrim() { + + assertThat(StringOperators.valueOf("shrewd").rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").rtrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAll() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithPattern() { + + assertThat(StringOperators.valueOf("shrewd") + .regexFindAll( + Pattern.compile("foo", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL | Pattern.COMMENTS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"foo\" , \"options\" : \"imsx\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatch() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(Pattern.compile("foo", Pattern.CASE_INSENSITIVE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"i\"} } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFind() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexFindForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(Pattern.compile("foo", Pattern.MULTILINE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"m\"} } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3695 + void shouldRenderReplaceOne() { + + assertThat(StringOperators.valueOf("bar").replaceOne("foobar","baz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceOne : {\"find\" : \"foobar\", \"input\" : \"$bar\", \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceOneForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).replaceOne("a","s").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceOne : {\"find\" : \"a\", \"input\" : " + EXPRESSION_STRING + ", \"replacement\" : \"s\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAll() { + + assertThat(StringOperators.valueOf("bar").replaceAll("foobar","baz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceAll : {\"find\" : \"foobar\", \"input\" : \"$bar\", \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).replaceAll("a","s").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceAll : {\"find\" : \"a\", \"input\" : " + EXPRESSION_STRING + ", \"replacement\" : \"s\"}}"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java index eece18bca2..bcc0b6e17f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2016 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,50 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.aggregation.Fields.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import java.util.List; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.core.convert.converter.Converter; import org.springframework.core.convert.support.GenericConversionService; import org.springframework.data.annotation.Id; -import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.Variable; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; -import org.springframework.data.mongodb.core.convert.CustomConversions; +import org.springframework.data.mongodb.core.aggregation.SetOperators.SetUnion; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.QueryMapper; -import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.Criteria; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Unit tests for {@link TypeBasedAggregationOperationContext}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class TypeBasedAggregationOperationContextUnitTests { MongoMappingContext context; @@ -63,7 +67,7 @@ public class TypeBasedAggregationOperationContextUnitTests { @Mock DbRefResolver dbRefResolver; - @Before + @BeforeEach public void setUp() { this.context = new MongoMappingContext(); @@ -73,43 +77,35 @@ public void setUp() { @Test public void findsSimpleReference() { - assertThat(getContext(Foo.class).getReference("bar"), is(notNullValue())); + assertThat(getContext(Foo.class).getReference("bar")).isNotNull(); } - @Test(expected = MappingException.class) + @Test public void rejectsInvalidFieldReference() { - getContext(Foo.class).getReference("foo"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> getContext(Foo.class).getReference("foo")); } - /** - * @see DATAMONGO-741 - */ - @Test + @Test // DATAMONGO-741 public void returnsReferencesToNestedFieldsCorrectly() { AggregationOperationContext context = getContext(Foo.class); - Field field = Fields.field("bar.name"); + Field field = field("bar.name"); - assertThat(context.getReference("bar.name"), is(notNullValue())); - assertThat(context.getReference(field), is(notNullValue())); - assertThat(context.getReference(field), is(context.getReference("bar.name"))); + assertThat(context.getReference("bar.name")).isNotNull(); + assertThat(context.getReference(field)).isNotNull(); + assertThat(context.getReference(field)).isEqualTo(context.getReference("bar.name")); } - /** - * @see DATAMONGO-806 - */ - @Test + @Test // DATAMONGO-806 public void aliasesIdFieldCorrectly() { AggregationOperationContext context = getContext(Foo.class); - assertThat(context.getReference("id"), is(new FieldReference(new ExposedField(Fields.field("id", "_id"), true)))); + assertThat(context.getReference("id")) + .isEqualTo(new DirectFieldReference(new ExposedField(field("id", "_id"), true))); } - /** - * @see DATAMONGO-912 - */ - @Test + @Test // DATAMONGO-912 public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInFirstStage() { CustomConversions customConversions = customAgeConversions(); @@ -121,16 +117,13 @@ public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInFirstStag MatchOperation matchStage = match(Criteria.where("age").is(new Age(10))); ProjectionOperation projectStage = project("age", "name"); - DBObject agg = newAggregation(matchStage, projectStage).toDbObject("test", context); + org.bson.Document agg = newAggregation(matchStage, projectStage).toDocument("test", context); - DBObject age = getValue((DBObject) getValue(getPipelineElementFromAggregationAt(agg, 0), "$match"), "age"); - assertThat(age, is((DBObject) new BasicDBObject("v", 10))); + org.bson.Document age = getValue(getValue(getPipelineElementFromAggregationAt(agg, 0), "$match"), "age"); + assertThat(age).isEqualTo(new Document("v", 10)); } - /** - * @see DATAMONGO-912 - */ - @Test + @Test // DATAMONGO-912 public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInLaterStage() { CustomConversions customConversions = customAgeConversions(); @@ -142,93 +135,122 @@ public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInLaterStag MatchOperation matchStage = match(Criteria.where("age").is(new Age(10))); ProjectionOperation projectStage = project("age", "name"); - DBObject agg = newAggregation(projectStage, matchStage).toDbObject("test", context); + org.bson.Document agg = newAggregation(projectStage, matchStage).toDocument("test", context); - DBObject age = getValue((DBObject) getValue(getPipelineElementFromAggregationAt(agg, 1), "$match"), "age"); - assertThat(age, is((DBObject) new BasicDBObject("v", 10))); + org.bson.Document age = getValue(getValue(getPipelineElementFromAggregationAt(agg, 1), "$match"), "age"); + assertThat(age).isEqualTo(new Document("v", 10)); } - /** - * @see DATAMONGO-960 - */ - @Test + @Test // DATAMONGO-960 public void rendersAggregationOptionsInTypedAggregationContextCorrectly() { AggregationOperationContext context = getContext(FooPerson.class); TypedAggregation agg = newAggregation(FooPerson.class, project("name", "age")) // .withOptions( - newAggregationOptions().allowDiskUse(true).explain(true).cursor(new BasicDBObject("foo", 1)).build()); + newAggregationOptions().allowDiskUse(true).explain(true).cursor(new org.bson.Document("foo", 1)).build()); - DBObject dbo = agg.toDbObject("person", context); + org.bson.Document document = agg.toDocument("person", context); - DBObject projection = getPipelineElementFromAggregationAt(dbo, 0); - assertThat(projection.containsField("$project"), is(true)); + org.bson.Document projection = getPipelineElementFromAggregationAt(document, 0); + assertThat(projection.containsKey("$project")).isTrue(); - assertThat(projection.get("$project"), is((Object) new BasicDBObject("name", 1).append("age", 1))); + assertThat(projection.get("$project")).isEqualTo(new Document("name", 1).append("age", 1)); - assertThat(dbo.get("allowDiskUse"), is((Object) true)); - assertThat(dbo.get("explain"), is((Object) true)); - assertThat(dbo.get("cursor"), is((Object) new BasicDBObject("foo", 1))); + assertThat(document.get("allowDiskUse")).isEqualTo(true); + assertThat(document.get("explain")).isEqualTo(true); + assertThat(document.get("cursor")).isEqualTo(new Document("foo", 1)); } - /** - * @see DATAMONGO-1133 - */ - @Test + @Test // DATAMONGO-1585 + public void rendersSortOfProjectedFieldCorrectly() { + + TypeBasedAggregationOperationContext context = getContext(MeterData.class); + TypedAggregation agg = newAggregation(MeterData.class, project().and("counterName").as("counter"), // + sort(Direction.ASC, "counter")); + + Document dbo = agg.toDocument("meterData", context); + Document sort = getPipelineElementFromAggregationAt(dbo, 1); + + Document definition = (Document) sort.get("$sort"); + assertThat(definition.get("counter")).isEqualTo(1); + } + + @Test // DATAMONGO-1586 + public void rendersFieldAliasingProjectionCorrectly() { + + AggregationOperationContext context = getContext(FooPerson.class); + TypedAggregation agg = newAggregation(FooPerson.class, project() // + .and("name").as("person_name") // + .and("age.value").as("age")); + + Document dbo = agg.toDocument("person", context); + + Document projection = getPipelineElementFromAggregationAt(dbo, 0); + assertThat(getAsDocument(projection, "$project")).containsEntry("person_name", "$name") // + .containsEntry("age", "$age.value"); + } + + @Test // DATAMONGO-1893 + public void considersIncludedFieldsFromSingleExclusionsCorrectly() { + + AggregationOperationContext context = getContext(FooPerson.class); + TypedAggregation agg = newAggregation(FooPerson.class, project() // + .andExclude("name"), sort(Sort.by("age.value", "lastName"))); + + Document dbo = agg.toDocument("person", context); + + Document sort = getPipelineElementFromAggregationAt(dbo, 1); + assertThat(getAsDocument(sort, "$sort")).isEqualTo(new Document("age.value", 1).append("last_name", 1)); + } + + @Test // DATAMONGO-1133 public void shouldHonorAliasedFieldsInGroupExpressions() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); TypedAggregation agg = newAggregation(MeterData.class, group("counterName").sum("counterVolume").as("totalCounterVolume")); - DBObject dbo = agg.toDbObject("meterData", context); - DBObject group = getPipelineElementFromAggregationAt(dbo, 0); + org.bson.Document document = agg.toDocument("meterData", context); + org.bson.Document group = getPipelineElementFromAggregationAt(document, 0); - DBObject definition = (DBObject) group.get("$group"); + org.bson.Document definition = (org.bson.Document) group.get("$group"); - assertThat(definition.get("_id"), is(equalTo((Object) "$counter_name"))); + assertThat(definition.get("_id")).isEqualTo("$counter_name"); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326, DATAMONGO-1585 public void lookupShouldInheritFieldsFromInheritingAggregationOperation() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); TypedAggregation agg = newAggregation(MeterData.class, - lookup("OtherCollection", "resourceId", "otherId", "lookup"), sort(Direction.ASC, "resourceId")); + lookup("OtherCollection", "resourceId", "otherId", "lookup"), // + sort(Direction.ASC, "resourceId", "counterName")); - DBObject dbo = agg.toDbObject("meterData", context); - DBObject sort = getPipelineElementFromAggregationAt(dbo, 1); + org.bson.Document document = agg.toDocument("meterData", context); + org.bson.Document sort = getPipelineElementFromAggregationAt(document, 1); - DBObject definition = (DBObject) sort.get("$sort"); + org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("resourceId"), is(equalTo((Object) 1))); + assertThat(definition.get("resourceId")).isEqualTo(1); + assertThat(definition.get("counter_name")).isEqualTo(1); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void groupLookupShouldInheritFieldsFromPreviousAggregationOperation() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); TypedAggregation agg = newAggregation(MeterData.class, group().min("resourceId").as("foreignKey"), lookup("OtherCollection", "foreignKey", "otherId", "lookup"), sort(Direction.ASC, "foreignKey")); - DBObject dbo = agg.toDbObject("meterData", context); - DBObject sort = getPipelineElementFromAggregationAt(dbo, 2); + org.bson.Document document = agg.toDocument("meterData", context); + org.bson.Document sort = getPipelineElementFromAggregationAt(document, 2); - DBObject definition = (DBObject) sort.get("$sort"); + org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("foreignKey"), is(equalTo((Object) 1))); + assertThat(definition.get("foreignKey")).isEqualTo(1); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupGroupAggregationShouldUseCorrectGroupField() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); @@ -236,19 +258,16 @@ public void lookupGroupAggregationShouldUseCorrectGroupField() { lookup("OtherCollection", "resourceId", "otherId", "lookup"), group().min("lookup.otherkey").as("something_totally_different")); - DBObject dbo = agg.toDbObject("meterData", context); - DBObject group = getPipelineElementFromAggregationAt(dbo, 1); + org.bson.Document document = agg.toDocument("meterData", context); + org.bson.Document group = getPipelineElementFromAggregationAt(document, 1); - DBObject definition = (DBObject) group.get("$group"); - DBObject field = (DBObject) definition.get("something_totally_different"); + org.bson.Document definition = (org.bson.Document) group.get("$group"); + org.bson.Document field = (org.bson.Document) definition.get("something_totally_different"); - assertThat(field.get("$min"), is(equalTo((Object) "$lookup.otherkey"))); + assertThat(field.get("$min")).isEqualTo("$lookup.otherkey"); } - /** - * @see DATAMONGO-1326 - */ - @Test + @Test // DATAMONGO-1326 public void lookupGroupAggregationShouldOverwriteExposedFields() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); @@ -257,18 +276,15 @@ public void lookupGroupAggregationShouldOverwriteExposedFields() { group().min("lookup.otherkey").as("something_totally_different"), sort(Direction.ASC, "something_totally_different")); - DBObject dbo = agg.toDbObject("meterData", context); - DBObject sort = getPipelineElementFromAggregationAt(dbo, 2); + org.bson.Document document = agg.toDocument("meterData", context); + org.bson.Document sort = getPipelineElementFromAggregationAt(document, 2); - DBObject definition = (DBObject) sort.get("$sort"); + org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("something_totally_different"), is(equalTo((Object) 1))); + assertThat(definition.get("something_totally_different")).isEqualTo(1); } - /** - * @see DATAMONGO-1326 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1326 public void lookupGroupAggregationShouldFailInvalidFieldReference() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); @@ -276,20 +292,206 @@ public void lookupGroupAggregationShouldFailInvalidFieldReference() { lookup("OtherCollection", "resourceId", "otherId", "lookup"), group().min("lookup.otherkey").as("something_totally_different"), sort(Direction.ASC, "resourceId")); - agg.toDbObject("meterData", context); + assertThatIllegalArgumentException().isThrownBy(() -> agg.toDocument("meterData", context)); + } + + @Test // DATAMONGO-861 + public void rendersAggregationConditionalInTypedAggregationContextCorrectly() { + + AggregationOperationContext context = getContext(FooPerson.class); + TypedAggregation agg = newAggregation(FooPerson.class, project("name") // + .and("age") // + .applyCondition( + ConditionalOperators.when(Criteria.where("age.value").lt(10)).then(new Age(0)).otherwiseValueOf("age")) // + ); + + Document document = agg.toDocument("person", context); + + Document projection = getPipelineElementFromAggregationAt(document, 0); + assertThat(projection.containsKey("$project")).isTrue(); + + Document project = getValue(projection, "$project"); + Document age = getValue(project, "age"); + + assertThat((Document) getValue(age, "$cond")).containsEntry("then.value", 0); + assertThat((Document) getValue(age, "$cond")).containsEntry("then._class", Age.class.getName()); + assertThat((Document) getValue(age, "$cond")).containsEntry("else", "$age"); } - @Document(collection = "person") + /** + * .AggregationUnitTests + */ + @Test // DATAMONGO-861, DATAMONGO-1542 + public void rendersAggregationIfNullInTypedAggregationContextCorrectly() { + + AggregationOperationContext context = getContext(FooPerson.class); + TypedAggregation agg = newAggregation(FooPerson.class, project("name") // + .and("age") // + .applyCondition(ConditionalOperators.ifNull("age").then(new Age(0))) // + ); + + Document document = agg.toDocument("person", context); + + Document projection = getPipelineElementFromAggregationAt(document, 0); + assertThat(projection.containsKey("$project")).isTrue(); + + Document project = getValue(projection, "$project"); + Document age = getValue(project, "age"); + + assertThat(age).isEqualTo(Document.parse( + "{ $ifNull: [ \"$age\", { \"_class\":\"org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContextUnitTests$Age\", \"value\": 0} ] }")); + + assertThat(age).containsEntry("$ifNull.[0]", "$age"); + assertThat(age).containsEntry("$ifNull.[1].value", 0); + assertThat(age).containsEntry("$ifNull.[1]._class", Age.class.getName()); + } + + @Test // DATAMONGO-1756 + public void projectOperationShouldRenderNestedFieldNamesCorrectlyForTypedAggregation() { + + AggregationOperationContext context = getContext(Wrapper.class); + + Document agg = newAggregation(Wrapper.class, project().and("nested1.value1").plus("nested2.value2").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", new Document("$add", Arrays.asList("$nested1.value1", "$field2.nestedValue2")))); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnUnwrappableTypeFieldCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("unwrappedValue.stringValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("unwrappedValue.atFieldAnnotatedValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnPrefixedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("prefixedUnwrappedValue.stringValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$prefix-stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnPrefixedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, + project().and("prefixedUnwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$prefix-with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.unwrappedValue.stringValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.unwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", + context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedPrefixedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.prefixedUnwrappedValue.stringValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.prefix-stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedPrefixedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.prefixedUnwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", + context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.prefix-with-at-field-annotation")); + } + + @Test // GH-4070 + void rendersLocalVariables() { + + AggregationOperationContext context = getContext(WithLists.class); + + Document agg = newAggregation(WithLists.class, + project() + .and(Reduce.arrayOf("listOfListOfString").withInitialValue(field("listOfString")) + .reduce(SetUnion.arrayAsSet(Variable.VALUE.getTarget()).union(Variable.THIS.getTarget()))) + .as("listOfString")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")).isEqualTo(Document.parse(""" + { + "listOfString" : { + "$reduce" : { + "in" : { "$setUnion" : ["$$value", "$$this"] }, + "initialValue" : "$listOfString", + "input" : "$listOfListOfString" + } + } + } + """)); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "person") public static class FooPerson { final ObjectId id; final String name; + @org.springframework.data.mongodb.core.mapping.Field("last_name") final String lastName; final Age age; - @PersistenceConstructor - FooPerson(ObjectId id, String name, Age age) { + public FooPerson(ObjectId id, String name, String lastName, Age age) { this.id = id; this.name = name; + this.lastName = lastName; this.age = age; } } @@ -304,34 +506,34 @@ public static class Age { } public CustomConversions customAgeConversions() { - return new CustomConversions(Arrays.> asList(ageWriteConverter(), ageReadConverter())); + return new MongoCustomConversions(Arrays.asList(ageWriteConverter(), ageReadConverter())); } - Converter ageWriteConverter() { - return new Converter() { + Converter ageWriteConverter() { + return new Converter() { @Override - public DBObject convert(Age age) { - return new BasicDBObject("v", age.value); + public org.bson.Document convert(Age age) { + return new org.bson.Document("v", age.value); } }; } - Converter ageReadConverter() { - return new Converter() { + Converter ageReadConverter() { + return new Converter() { @Override - public Age convert(DBObject dbObject) { - return new Age(((Integer) dbObject.get("v"))); + public Age convert(org.bson.Document document) { + return new Age(((Integer) document.get("v"))); } }; } @SuppressWarnings("unchecked") - static DBObject getPipelineElementFromAggregationAt(DBObject agg, int index) { - return ((List) agg.get("pipeline")).get(index); + static org.bson.Document getPipelineElementFromAggregationAt(org.bson.Document agg, int index) { + return ((List) agg.get("pipeline")).get(index); } @SuppressWarnings("unchecked") - static T getValue(DBObject o, String key) { + static T getValue(org.bson.Document o, String key) { return (T) o.get(key); } @@ -349,4 +551,42 @@ static class Bar { String name; } + + static class Wrapper { + + Nested nested1; + @org.springframework.data.mongodb.core.mapping.Field("field2") Nested nested2; + } + + static class Nested { + String value1; + @org.springframework.data.mongodb.core.mapping.Field("nestedValue2") String value2; + } + + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + @Unwrapped.Nullable("prefix-") UnwrappableType prefixedUnwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + + @org.springframework.data.mongodb.core.mapping.Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } + + static class WithLists { + public List listOfString; + public List> listOfListOfString; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java new file mode 100644 index 0000000000..e47fea289e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java @@ -0,0 +1,130 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link UnionWithOperation}. + * + * @author Christoph Strobl + */ +class UnionWithOperationUnitTests { + + @Test // DATAMONGO-2622 + void throwsErrorWhenNoCollectionPresent() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> UnionWithOperation.unionWith(null)); + } + + @Test // DATAMONGO-2622 + void rendersJustCollectionCorrectly() { + + assertThat(UnionWithOperation.unionWith("coll-1").toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1"))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectly() { + + assertThat(UnionWithOperation.unionWith("coll-1").mapFieldsTo(Warehouse.class) + .pipeline(Aggregation.project().and("location").as("region")).toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("region", 1)))))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectlyForDifferentDomainType() { + + assertThat(UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project().and("name").as("name")) + .mapFieldsTo(Supplier.class).toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("name", "$supplier")))))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectlyForUntypedContext() { + + assertThat(UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project("region")) + .toPipelineStages(contextFor(null))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("region", 1)))))); + } + + @Test // DATAMONGO-2622 + void doesNotMapAgainstFieldsFromAPreviousStage() { + + TypedAggregation agg = TypedAggregation.newAggregation(Supplier.class, + Aggregation.project().and("name").as("supplier"), + UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project().and("name").as("name"))); + + List pipeline = agg.toPipeline(contextFor(Supplier.class)); + assertThat(pipeline).containsExactly(new Document("$project", new Document("supplier", 1)), // + new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("name", 1)))))); + } + + @Test // DATAMONGO-2622 + void mapAgainstUnionWithDomainTypeEvenWhenInsideTypedAggregation() { + + TypedAggregation agg = TypedAggregation.newAggregation(Supplier.class, + Aggregation.project().and("name").as("supplier"), UnionWithOperation.unionWith("coll-1") + .mapFieldsTo(Warehouse.class).pipeline(Aggregation.project().and("location").as("location"))); + + List pipeline = agg.toPipeline(contextFor(Supplier.class)); + assertThat(pipeline).containsExactly(new Document("$project", new Document("supplier", 1)), // + new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("location", "$region")))))); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Warehouse { + + String name; + @Field("region") String location; + String state; + } + + static class Supplier { + + @Field("supplier") String name; + String state; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java new file mode 100644 index 0000000000..2f081cc9fc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link UnsetOperation}. + * + * @author Christoph Strobl + */ +public class UnsetOperationUnitTests { + + @Test // DATAMONGO-2331 + public void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new UnsetOperation(null)); + } + + @Test // DATAMONGO-2331 + public void rendersSingleFieldReferenceCorrectly() { + + assertThat(new UnsetOperation(Collections.singletonList("title")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"title\" }")); + } + + @Test // DATAMONGO-2331 + public void rendersSingleMappedFieldReferenceCorrectly() { + + assertThat(new UnsetOperation(Collections.singletonList("stock")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"copies\" }")); + } + + @Test // DATAMONGO-2331 + public void rendersSingleNestedMappedFieldReferenceCorrectly() { + + assertThat( + new UnsetOperation(Collections.singletonList("author.firstname")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"author.first\"}")); + } + + @Test // DATAMONGO-2331 + public void rendersMultipleFieldReferencesCorrectly() { + + assertThat(new UnsetOperation(Arrays.asList("title", "author.firstname", "stock.location")) + .toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : [\"title\", \"author.first\", \"copies.warehouse\"] }")); + } + + @Test // DATAMONGO-2331 + public void exposesFieldsCorrectly() { + assertThat(UnsetOperation.unset("title").and("isbn").getFields()).isEqualTo(ExposedFields.from()); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + } + + static class Warehouse { + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java new file mode 100644 index 0000000000..8d46363c2d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java @@ -0,0 +1,111 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.DocumentTestUtils; + +/** + * Unit tests for {@link UnwindOperation}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +public class UnwindOperationUnitTests { + + @Test // DATAMONGO-1391 + public void unwindWithPathOnlyShouldUsePreMongo32Syntax() { + + UnwindOperation unwindOperation = Aggregation.unwind("a"); + + Document pipeline = unwindOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(pipeline).containsEntry("$unwind", "$a"); + } + + @Test // DATAMONGO-1391 + public void unwindWithArrayIndexShouldUseMongo32Syntax() { + + UnwindOperation unwindOperation = Aggregation.unwind("a", "index"); + + Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); + + assertThat(unwindClause).containsEntry("path", "$a").// + containsEntry("preserveNullAndEmptyArrays", false).// + containsEntry("includeArrayIndex", "index"); + } + + @Test // DATAMONGO-1391 + public void unwindWithArrayIndexShouldExposeArrayIndex() { + + UnwindOperation unwindOperation = Aggregation.unwind("a", "index"); + + assertThat(unwindOperation.getFields().getField("index")).isNotNull(); + } + + @Test // DATAMONGO-1391 + public void plainUnwindShouldNotExposeIndex() { + + UnwindOperation unwindOperation = Aggregation.unwind("a"); + + assertThat(unwindOperation.getFields().exposesNoFields()).isTrue(); + } + + @Test // DATAMONGO-1391 + public void unwindWithPreserveNullShouldUseMongo32Syntax() { + + UnwindOperation unwindOperation = Aggregation.unwind("a", true); + + Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); + + assertThat(unwindClause).containsEntry("path", "$a").// + containsEntry("preserveNullAndEmptyArrays", true).// + doesNotContainKey("includeArrayIndex"); + } + + @Test // DATAMONGO-1391 + public void lookupBuilderBuildsCorrectClause() { + + UnwindOperation unwindOperation = UnwindOperation.newUnwind().path("$foo").noArrayIndex().skipNullAndEmptyArrays(); + Document pipeline = unwindOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(pipeline).containsEntry("$unwind", "$foo"); + } + + @Test // DATAMONGO-1391 + public void lookupBuilderBuildsCorrectClauseForMongo32() { + + UnwindOperation unwindOperation = UnwindOperation.newUnwind().path("$foo").arrayIndex("myindex") + .preserveNullAndEmptyArrays(); + + Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); + + assertThat(unwindClause).containsEntry("path", "$foo").// + containsEntry("preserveNullAndEmptyArrays", true).// + containsEntry("includeArrayIndex", "myindex"); + } + + private Document extractDocumentFromUnwindOperation(UnwindOperation unwindOperation) { + + Document document = unwindOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document unwindClause = DocumentTestUtils.getAsDocument(document, "$unwind"); + return unwindClause; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java index 8398ed29d8..3eb7f4f884 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,12 @@ import java.util.Arrays; import java.util.Date; import java.util.HashSet; +import java.util.Objects; import java.util.Set; /** * @author Thomas Darimont + * @author Christoph Strobl */ public class UserWithLikes { @@ -29,9 +31,57 @@ public class UserWithLikes { Date joined; Set likes = new HashSet(); + public UserWithLikes() {} + public UserWithLikes(String id, Date joined, String... likes) { + this.id = id; this.joined = joined; this.likes = new HashSet(Arrays.asList(likes)); } + + public String getId() { + return this.id; + } + + public Date getJoined() { + return this.joined; + } + + public Set getLikes() { + return this.likes; + } + + public void setId(String id) { + this.id = id; + } + + public void setJoined(Date joined) { + this.joined = joined; + } + + public void setLikes(Set likes) { + this.likes = likes; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UserWithLikes that = (UserWithLikes) o; + return Objects.equals(id, that.id) && Objects.equals(joined, that.joined) && Objects.equals(likes, that.likes); + } + + @Override + public int hashCode() { + return Objects.hash(id, joined, likes); + } + + public String toString() { + return "UserWithLikes(id=" + this.getId() + ", joined=" + this.getJoined() + ", likes=" + this.getLikes() + ")"; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java new file mode 100644 index 0000000000..4ce045fe6f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.aggregation.VectorSearchOperation.SearchType; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * Unit tests for {@link VectorSearchOperation}. + * + * @author Christoph Strobl + */ +class VectorSearchOperationUnitTests { + + static final Document $VECTOR_SEARCH = Document.parse( + "{'index' : 'vector_index', 'limit' : 10, 'path' : 'plot_embedding', 'queryVector' : [-0.0016261312, -0.028070757, -0.011342932]}"); + static final VectorSearchOperation SEARCH_OPERATION = VectorSearchOperation.search("vector_index") + .path("plot_embedding").vector(-0.0016261312, -0.028070757, -0.011342932).limit(10); + + @Test // GH-4706 + void requiredArgs() { + + List stages = SEARCH_OPERATION.toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH)); + } + + @Test // GH-4706 + void optionalArgs() { + + VectorSearchOperation $search = SEARCH_OPERATION.numCandidates(150).searchType(SearchType.ENN) + .filter(new Criteria().andOperator(Criteria.where("year").gt(1955), Criteria.where("year").lt(1975))); + + List stages = $search.toPipelineStages(Aggregation.DEFAULT_CONTEXT); + + Document filter = new Document("$and", + List.of(new Document("year", new Document("$gt", 1955)), new Document("year", new Document("$lt", 1975)))); + assertThat(stages).containsExactly(new Document("$vectorSearch", + new Document($VECTOR_SEARCH).append("exact", true).append("filter", filter).append("numCandidates", 150))); + } + + @Test // GH-4706 + void withScore() { + + List stages = SEARCH_OPERATION.withSearchScore().toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("score", new Document("$meta", "vectorSearchScore")))); + } + + @Test // GH-4706 + void withScoreFilter() { + + List stages = SEARCH_OPERATION.withFilterBySore(score -> score.gt(50)) + .toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("score", new Document("$meta", "vectorSearchScore"))), + new Document("$match", new Document("score", new Document("$gt", 50)))); + } + + @Test // GH-4706 + void withScoreFilterOnCustomFieldName() { + + List stages = SEARCH_OPERATION.withFilterBySore(score -> score.gt(50)).withSearchScore("s-c-o-r-e") + .toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("s-c-o-r-e", new Document("$meta", "vectorSearchScore"))), + new Document("$match", new Document("s-c-o-r-e", new Document("$gt", 50)))); + } + + @Test // GH-4706 + void mapsCriteriaToDomainType() { + + VectorSearchOperation $search = SEARCH_OPERATION + .filter(new Criteria().andOperator(Criteria.where("y").gt(1955), Criteria.where("y").lt(1975))); + + List stages = $search.toPipelineStages(TestAggregationContext.contextFor(Movie.class)); + + Document filter = new Document("$and", + List.of(new Document("year", new Document("$gt", 1955)), new Document("year", new Document("$lt", 1975)))); + assertThat(stages) + .containsExactly(new Document("$vectorSearch", new Document($VECTOR_SEARCH).append("filter", filter))); + } + + static class Movie { + + @Id String id; + String title; + + @Field("year") String y; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java new file mode 100644 index 0000000000..18991c1768 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java @@ -0,0 +1,242 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.assertj.core.api.InstanceOfAssertFactories; +import org.bson.BinaryVector; +import org.bson.Document; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.aggregation.VectorSearchOperation.SearchType; +import org.springframework.data.mongodb.core.index.VectorIndex; +import org.springframework.data.mongodb.core.index.VectorIndex.SimilarityFunction; +import org.springframework.data.mongodb.core.mapping.MongoVector; +import org.springframework.data.mongodb.test.util.AtlasContainer; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; + +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Integration tests using Vector Search and Vector Indexes through local MongoDB Atlas. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@Testcontainers(disabledWithoutDocker = true) +public class VectorSearchTests { + + private static final String SCORE_FIELD = "vector-search-tests"; + private static final @Container AtlasContainer atlasLocal = AtlasContainer.bestMatch(); + private static final String COLLECTION_NAME = "collection-1"; + + static MongoClient client; + static MongoTestTemplate template; + + @BeforeAll + static void beforeAll() throws InterruptedException { + + client = MongoClients.create(atlasLocal.getConnectionString()); + template = new MongoTestTemplate(client, SCORE_FIELD); + + Thread.sleep(250); // just wait a little or the index will be broken + + initDocuments(); + initIndexes(); + } + + @AfterAll + static void afterAll() { + template.dropCollection(WithVectorFields.class); + } + + @ParameterizedTest // GH-4706 + @MethodSource("vectorAggregations") + void searchUsingArraysAddingScore(VectorSearchOperation searchOperation) { + + VectorSearchOperation $search = searchOperation.withSearchScore(SCORE_FIELD); + + AggregationResults results = template.aggregate(Aggregation.newAggregation($search), + WithVectorFields.class, Document.class); + + assertThat(results).hasSize(10); + assertScoreIsDecreasing(results); + assertThat(results.iterator().next()).containsKey(SCORE_FIELD) + .extracting(it -> it.get(SCORE_FIELD), InstanceOfAssertFactories.DOUBLE).isEqualByComparingTo(1D); + } + + @ParameterizedTest // GH-4706 + @MethodSource("binaryVectorAggregations") + void searchUsingBinaryVectorAddingScore(VectorSearchOperation searchOperation) { + + VectorSearchOperation $search = searchOperation.withSearchScore(SCORE_FIELD); + + AggregationResults results = template.aggregate(Aggregation.newAggregation($search), + WithVectorFields.class, Document.class); + + assertThat(results).hasSize(10); + assertScoreIsDecreasing(results); + assertThat(results.iterator().next()).containsKey(SCORE_FIELD) + .extracting(it -> it.get(SCORE_FIELD), InstanceOfAssertFactories.DOUBLE).isEqualByComparingTo(1D); + } + + private static Stream binaryVectorAggregations() { + + return Stream.of(// + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawInt8vector") // + .vector(new byte[] { 0, 1, 2, 3, 4 }) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("int8vector") // + .vector(BinaryVector.int8Vector(new byte[] { 0, 1, 2, 3, 4 })) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("float32vector") // + .vector(BinaryVector.floatVector(new float[] { 0.0001f, 1.12345f, 2.23456f, 3.34567f, 4.45678f })) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN))); + } + + private static Stream vectorAggregations() { + + return Stream.of(// + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawFloat32vector") // + .vector(0.0001f, 1.12345f, 2.23456f, 3.34567f, 4.45678f) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawFloat64vector") // + .vector(1.0001d, 2.12345d, 3.23456d, 4.34567d, 5.45678d) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("float64vector") // + .vector(Vector.of(1.0001d, 2.12345d, 3.23456d, 4.34567d, 5.45678d)) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN))); + } + + static void initDocuments() { + IntStream.range(0, 10).mapToObj(WithVectorFields::instance).forEach(template::save); + } + + static void initIndexes() { + + VectorIndex rawIndex = new VectorIndex("raw-index") + .addVector("rawInt8vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("rawFloat32vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("rawFloat64vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addFilter("justSomeArgument"); + + VectorIndex wrapperIndex = new VectorIndex("wrapper-index") + .addVector("int8vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("float32vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("float64vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addFilter("justSomeArgument"); + + template.searchIndexOps(WithVectorFields.class).createIndex(rawIndex); + template.searchIndexOps(WithVectorFields.class).createIndex(wrapperIndex); + + template.awaitIndexCreation(WithVectorFields.class, rawIndex.getName()); + template.awaitIndexCreation(WithVectorFields.class, wrapperIndex.getName()); + } + + private static void assertScoreIsDecreasing(Iterable documents) { + + double previousScore = Integer.MAX_VALUE; + for (Document document : documents) { + + Double vectorSearchScore = document.getDouble(SCORE_FIELD); + assertThat(vectorSearchScore).isGreaterThan(0D); + assertThat(vectorSearchScore).isLessThan(previousScore); + previousScore = vectorSearchScore; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class WithVectorFields { + + String id; + + Vector int8vector; + Vector float32vector; + Vector float64vector; + + BinaryVector rawInt8vector; + float[] rawFloat32vector; + double[] rawFloat64vector; + + int justSomeArgument; + + static WithVectorFields instance(int offset) { + + WithVectorFields instance = new WithVectorFields(); + instance.id = "id-%s".formatted(offset); + instance.rawFloat32vector = new float[5]; + instance.rawFloat64vector = new double[5]; + + byte[] int8 = new byte[5]; + for (int i = 0; i < 5; i++) { + + int v = i + offset; + int8[i] = (byte) v; + } + instance.rawInt8vector = BinaryVector.int8Vector(int8); + + if (offset == 0) { + instance.rawFloat32vector[0] = 0.0001f; + instance.rawFloat64vector[0] = 0.0001d; + } else { + instance.rawFloat32vector[0] = Float.parseFloat("%s.000%s".formatted(offset, offset)); + instance.rawFloat64vector[0] = Double.parseDouble("%s.000%s".formatted(offset, offset)); + } + instance.rawFloat32vector[1] = Float.parseFloat("%s.12345".formatted(offset + 1)); + instance.rawFloat64vector[1] = Double.parseDouble("%s.12345".formatted(offset + 1)); + instance.rawFloat32vector[2] = Float.parseFloat("%s.23456".formatted(offset + 2)); + instance.rawFloat64vector[2] = Double.parseDouble("%s.23456".formatted(offset + 2)); + instance.rawFloat32vector[3] = Float.parseFloat("%s.34567".formatted(offset + 3)); + instance.rawFloat64vector[3] = Double.parseDouble("%s.34567".formatted(offset + 3)); + instance.rawFloat32vector[4] = Float.parseFloat("%s.45678".formatted(offset + 4)); + instance.rawFloat64vector[4] = Double.parseDouble("%s.45678".formatted(offset + 4)); + + instance.justSomeArgument = offset; + + instance.int8vector = MongoVector.of(instance.rawInt8vector); + instance.float32vector = MongoVector.of(BinaryVector.floatVector(instance.rawFloat32vector)); + instance.float64vector = Vector.of(instance.rawFloat64vector); + + return instance; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java index 005085d25a..7cf01122f2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java @@ -7,8 +7,8 @@ /** * Data model from mongodb reference data set * - * @see http://docs.mongodb.org/manual/tutorial/aggregation-examples/ - * @see http://media.mongodb.org/zips.json + * @see Aggregation Examples + * @see > getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>(Arrays.asList(ImmutableAuditableEntityWithVersion.class, KAuditableVersionedEntity.class)); + } + } + + @Autowired MongoTemplate template; + + @Test // DATAMONGO-2346 + public void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedEntityOnSave() throws InterruptedException { + + template.remove(new Query(), ImmutableAuditableEntityWithVersion.class); + + ImmutableAuditableEntityWithVersion entity = new ImmutableAuditableEntityWithVersion("id-1", "value", null, null); + ImmutableAuditableEntityWithVersion inserted = template.save(entity); + + TimeUnit.MILLISECONDS.sleep(500); + + ImmutableAuditableEntityWithVersion modified = inserted.withValue("changed-value"); + ImmutableAuditableEntityWithVersion updated = template.save(modified); + + ImmutableAuditableEntityWithVersion fetched = template.findOne(Query.query(Criteria.where("id").is(entity.id)), + ImmutableAuditableEntityWithVersion.class); + + assertThat(updated.modificationDate).isAfter(inserted.modificationDate); + assertThat(fetched.modificationDate).isAfter(inserted.modificationDate); + assertThat(fetched.modificationDate).isEqualTo(updated.modificationDate.truncatedTo(ChronoUnit.MILLIS)); + } + + @Test // DATAMONGO-2346 + public void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedKotlinEntityOnSave() + throws InterruptedException { + + template.remove(new Query(), KAuditableVersionedEntity.class); + + KAuditableVersionedEntity entity = new KAuditableVersionedEntity("kId-1", "value", null, null); + KAuditableVersionedEntity inserted = template.save(entity); + + TimeUnit.MILLISECONDS.sleep(500); + + KAuditableVersionedEntity updated = template.save(inserted.withValue("changed-value")); + + KAuditableVersionedEntity fetched = template.findOne(Query.query(Criteria.where("id").is(entity.getId())), + KAuditableVersionedEntity.class); + + assertThat(updated.getModificationDate()).isAfter(inserted.getModificationDate()); + assertThat(fetched.getModificationDate()).isAfter(inserted.getModificationDate()); + assertThat(fetched.getModificationDate()).isEqualTo(updated.getModificationDate().truncatedTo(ChronoUnit.MILLIS)); + } + + static class ImmutableAuditableEntityWithVersion { + + final @Id String id; + final String value; + final @Version Integer version; + final @LastModifiedDate Instant modificationDate; + + ImmutableAuditableEntityWithVersion(String id, String value, Integer version, Instant modificationDate) { + + this.id = id; + this.value = value; + this.version = version; + this.modificationDate = modificationDate; + } + + ImmutableAuditableEntityWithVersion withValue(String value) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withModificationDate(Instant modificationDate) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withVersion(Integer version) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java new file mode 100644 index 0000000000..28429b53dc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java @@ -0,0 +1,175 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.auditing; + +import static org.assertj.core.api.Assertions.*; + +import reactor.test.StepVerifier; +import reactor.util.function.Tuples; + +import java.time.Duration; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Collections; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.data.mongodb.config.EnableReactiveMongoAuditing; +import org.springframework.data.mongodb.core.KAuditableVersionedEntity; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link EnableReactiveMongoAuditing} through {@link ReactiveMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +class ReactiveMongoTemplateAuditingTests { + + static final String DB_NAME = "mongo-template-audit-tests"; + + static @Client MongoClient mongoClient; + + @Configuration + @EnableReactiveMongoAuditing + static class Conf extends AbstractReactiveMongoConfiguration { + + @Bean + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } + } + + @Autowired ReactiveMongoTemplate template; + @Autowired MongoClient client; + + @BeforeEach + void setUp() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(ImmutableAuditableEntityWithVersion.class), + client); + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(KAuditableVersionedEntity.class), client); + } + + @Test // DATAMONGO-2346 + void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedEntityOnSave() { + + ImmutableAuditableEntityWithVersion entity = new ImmutableAuditableEntityWithVersion(null, "value", null, null); + + template.save(entity).delayElement(Duration.ofMillis(500)) // + .flatMap(inserted -> template.save(inserted.withValue("changed-value")) // + .map(updated -> Tuples.of(inserted, updated))) // + .flatMap(tuple2 -> template + .findOne(Query.query(Criteria.where("id").is(tuple2.getT1().id)), ImmutableAuditableEntityWithVersion.class) + .map(fetched -> Tuples.of(tuple2.getT1(), tuple2.getT2(), fetched))) // + .as(StepVerifier::create) // + .consumeNextWith(tuple3 -> { + + assertThat(tuple3.getT2().modificationDate).isAfter(tuple3.getT1().modificationDate); + assertThat(tuple3.getT3().modificationDate).isAfter(tuple3.getT1().modificationDate); + assertThat(tuple3.getT3().modificationDate) + .isEqualTo(tuple3.getT2().modificationDate.truncatedTo(ChronoUnit.MILLIS)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2346 + void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedKotlinEntityOnSave() { + + KAuditableVersionedEntity entity = new KAuditableVersionedEntity(null, "value", null, null); + + template.save(entity).delayElement(Duration.ofMillis(500)) // + .flatMap(inserted -> template.save(inserted.withValue("changed-value")) // + .map(updated -> Tuples.of(inserted, updated))) // + .flatMap(tuple2 -> template + .findOne(Query.query(Criteria.where("id").is(tuple2.getT1().getId())), KAuditableVersionedEntity.class) + .map(fetched -> Tuples.of(tuple2.getT1(), tuple2.getT2(), fetched))) // + .as(StepVerifier::create) // + .consumeNextWith(tuple3 -> { + + assertThat(tuple3.getT2().getModificationDate()).isAfter(tuple3.getT1().getModificationDate()); + assertThat(tuple3.getT3().getModificationDate()).isAfter(tuple3.getT1().getModificationDate()); + assertThat(tuple3.getT3().getModificationDate()) + .isEqualTo(tuple3.getT2().getModificationDate().truncatedTo(ChronoUnit.MILLIS)); + }) // + .verifyComplete(); + } + + @Document("versioned-auditable") + static class ImmutableAuditableEntityWithVersion { + + final @Id String id; + final String value; + final @Version Integer version; + final @LastModifiedDate Instant modificationDate; + + ImmutableAuditableEntityWithVersion(String id, String value, Integer version, Instant modificationDate) { + + this.id = id; + this.value = value; + this.version = version; + this.modificationDate = modificationDate; + } + + ImmutableAuditableEntityWithVersion withId(String id) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withValue(String value) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withModificationDate(Instant modificationDate) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withVersion(Integer version) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java index 717bd5a692..dfd6b0ab56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,30 +17,31 @@ import static org.mockito.Mockito.*; -import org.junit.Test; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.Test; + import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToObjectIdConverter; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.util.TypeInformation; -import com.mongodb.DBObject; import com.mongodb.DBRef; /** * Unit tests for {@link AbstractMongoConverter}. - * + * * @author Oliver Gierke */ public class AbstractMongoConverterUnitTests { - /** - * @see DATAMONGO-1324 - */ - @Test + @Test // DATAMONGO-1324 public void registersObjectIdConvertersExplicitly() { DefaultConversionService conversionService = spy(new DefaultConversionService()); @@ -62,18 +63,33 @@ public MongoTypeMapper getTypeMapper() { throw new UnsupportedOperationException(); } + @Override + public ProjectionFactory getProjectionFactory() { + return null; + } + + @Override + public CustomConversions getCustomConversions() { + return null; + } + + @Override + public R project(EntityProjection descriptor, Bson bson) { + return null; + } + @Override public MappingContext, MongoPersistentProperty> getMappingContext() { throw new UnsupportedOperationException(); } @Override - public R read(Class type, DBObject source) { + public R read(Class type, Bson source) { throw new UnsupportedOperationException(); } @Override - public void write(Object source, DBObject sink) { + public void write(Object source, Bson sink) { throw new UnsupportedOperationException(); } @@ -83,7 +99,7 @@ public Object convertToMongoType(Object obj, TypeInformation typeInformation) } @Override - public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { + public DBRef toDBRef(Object object, MongoPersistentProperty referringProperty) { throw new UnsupportedOperationException(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ConverterRegistrationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ConverterRegistrationUnitTests.java deleted file mode 100644 index 069540006b..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ConverterRegistrationUnitTests.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Test; -import org.springframework.data.mongodb.core.mapping.Person; - -/** - * Unit tests for {@link ConverterRegistration}. - * - * @author Oliver Gierke - */ -public class ConverterRegistrationUnitTests { - - @Test - public void considersNotExplicitlyReadingDependingOnTypes() { - - ConverterRegistration context = new ConverterRegistration(Person.class, String.class, false, false); - assertThat(context.isWriting(), is(true)); - assertThat(context.isReading(), is(false)); - - context = new ConverterRegistration(String.class, Person.class, false, false); - assertThat(context.isWriting(), is(false)); - assertThat(context.isReading(), is(true)); - - context = new ConverterRegistration(String.class, Class.class, false, false); - assertThat(context.isWriting(), is(true)); - assertThat(context.isReading(), is(true)); - } - - @Test - public void forcesReadWriteOnlyIfAnnotated() { - - ConverterRegistration context = new ConverterRegistration(String.class, Class.class, false, true); - assertThat(context.isWriting(), is(true)); - assertThat(context.isReading(), is(false)); - - context = new ConverterRegistration(String.class, Class.class, true, false); - assertThat(context.isWriting(), is(false)); - assertThat(context.isReading(), is(true)); - } - - @Test - public void considersConverterForReadAndWriteIfBothAnnotated() { - - ConverterRegistration context = new ConverterRegistration(String.class, Class.class, true, true); - assertThat(context.isWriting(), is(true)); - assertThat(context.isReading(), is(true)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConversionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConversionsUnitTests.java deleted file mode 100644 index ecba618599..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConversionsUnitTests.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Copyright 2011-2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; - -import java.net.URL; -import java.text.DateFormat; -import java.text.Format; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Collections; -import java.util.Currency; -import java.util.Date; -import java.util.Locale; -import java.util.UUID; - -import org.bson.types.Binary; -import org.bson.types.ObjectId; -import org.joda.time.DateTime; -import org.junit.Test; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.convert.converter.ConverterFactory; -import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.core.convert.support.GenericConversionService; -import org.springframework.data.convert.WritingConverter; -import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter; -import org.threeten.bp.LocalDateTime; - -import com.mongodb.DBRef; - -/** - * Unit tests for {@link CustomConversions}. - * - * @author Oliver Gierke - * @author Christoph Strobl - */ -public class CustomConversionsUnitTests { - - @Test - public void findsBasicReadAndWriteConversions() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE, - StringToFormatConverter.INSTANCE)); - - assertThat(conversions.getCustomWriteTarget(Format.class, null), is(typeCompatibleWith(String.class))); - assertThat(conversions.getCustomWriteTarget(String.class, null), is(nullValue())); - - assertThat(conversions.hasCustomReadTarget(String.class, Format.class), is(true)); - assertThat(conversions.hasCustomReadTarget(String.class, Locale.class), is(false)); - } - - @Test - public void considersSubtypesCorrectly() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(NumberToStringConverter.INSTANCE, - StringToNumberConverter.INSTANCE)); - - assertThat(conversions.getCustomWriteTarget(Long.class, null), is(typeCompatibleWith(String.class))); - assertThat(conversions.hasCustomReadTarget(String.class, Long.class), is(true)); - } - - @Test - public void considersTypesWeRegisteredConvertersForAsSimple() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE)); - assertThat(conversions.isSimpleType(UUID.class), is(true)); - } - - /** - * @see DATAMONGO-240 - */ - @Test - public void considersObjectIdToBeSimpleType() { - - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.isSimpleType(ObjectId.class), is(true)); - assertThat(conversions.hasCustomWriteTarget(ObjectId.class), is(false)); - - } - - /** - * @see DATAMONGO-240 - */ - @Test - public void considersCustomConverterForSimpleType() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(new Converter() { - public String convert(ObjectId source) { - return source == null ? null : source.toString(); - } - })); - - assertThat(conversions.isSimpleType(ObjectId.class), is(true)); - assertThat(conversions.hasCustomWriteTarget(ObjectId.class), is(true)); - assertThat(conversions.hasCustomReadTarget(ObjectId.class, String.class), is(true)); - assertThat(conversions.hasCustomReadTarget(ObjectId.class, Object.class), is(false)); - } - - @Test - public void considersDBRefsToBeSimpleTypes() { - - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.isSimpleType(DBRef.class), is(true)); - } - - @Test - public void populatesConversionServiceCorrectly() { - - GenericConversionService conversionService = new DefaultConversionService(); - - CustomConversions conversions = new CustomConversions(Arrays.asList(StringToFormatConverter.INSTANCE)); - conversions.registerConvertersIn(conversionService); - - assertThat(conversionService.canConvert(String.class, Format.class), is(true)); - } - - /** - * @see DATAMONGO-259 - */ - @Test - public void doesNotConsiderTypeSimpleIfOnlyReadConverterIsRegistered() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(StringToFormatConverter.INSTANCE)); - assertThat(conversions.isSimpleType(Format.class), is(false)); - } - - /** - * @see DATAMONGO-298 - */ - @Test - public void discoversConvertersForSubtypesOfMongoTypes() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(StringToIntegerConverter.INSTANCE)); - assertThat(conversions.hasCustomReadTarget(String.class, Integer.class), is(true)); - assertThat(conversions.hasCustomWriteTarget(String.class, Integer.class), is(true)); - } - - /** - * @see DATAMONGO-342 - */ - @Test - public void doesNotHaveConverterForStringToBigIntegerByDefault() { - - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.hasCustomWriteTarget(String.class), is(false)); - assertThat(conversions.getCustomWriteTarget(String.class), is(nullValue())); - - conversions = new CustomConversions(Arrays.asList(StringToBigIntegerConverter.INSTANCE)); - assertThat(conversions.hasCustomWriteTarget(String.class), is(false)); - assertThat(conversions.getCustomWriteTarget(String.class), is(nullValue())); - } - - /** - * @see DATAMONGO-390 - */ - @Test - public void considersBinaryASimpleType() { - - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.isSimpleType(Binary.class), is(true)); - } - - /** - * @see DATAMONGO-462 - */ - @Test - public void hasWriteConverterForURL() { - - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.hasCustomWriteTarget(URL.class), is(true)); - } - - /** - * @see DATAMONGO-462 - */ - @Test - public void readTargetForURL() { - CustomConversions conversions = new CustomConversions(); - assertThat(conversions.hasCustomReadTarget(String.class, URL.class), is(true)); - } - - /** - * @see DATAMONGO-795 - */ - @Test - @SuppressWarnings("rawtypes") - public void favorsCustomConverterForIndeterminedTargetType() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(DateTimeToStringConverter.INSTANCE)); - assertThat(conversions.getCustomWriteTarget(DateTime.class, null), is(equalTo((Class) String.class))); - } - - /** - * @see DATAMONGO-881 - */ - @Test - public void customConverterOverridesDefault() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(CustomDateTimeConverter.INSTANCE)); - GenericConversionService conversionService = new DefaultConversionService(); - conversions.registerConvertersIn(conversionService); - - assertThat(conversionService.convert(new DateTime(), Date.class), is(new Date(0))); - } - - /** - * @see DATAMONGO-1001 - */ - @Test - public void shouldSelectPropertCustomWriteTargetForCglibProxiedType() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE)); - assertThat(conversions.getCustomWriteTarget(createProxyTypeFor(Format.class)), is(typeCompatibleWith(String.class))); - } - - /** - * @see DATAMONGO-1001 - */ - @Test - public void shouldSelectPropertCustomReadTargetForCglibProxiedType() { - - CustomConversions conversions = new CustomConversions(Arrays.asList(CustomObjectToStringConverter.INSTANCE)); - assertThat(conversions.hasCustomReadTarget(createProxyTypeFor(Object.class), String.class), is(true)); - } - - /** - * @see DATAMONGO-1131 - */ - @Test - public void registersConvertersForJsr310() { - - CustomConversions customConversions = new CustomConversions(); - - assertThat(customConversions.hasCustomWriteTarget(java.time.LocalDateTime.class), is(true)); - } - - /** - * @see DATAMONGO-1131 - */ - @Test - public void registersConvertersForThreeTenBackPort() { - - CustomConversions customConversions = new CustomConversions(); - - assertThat(customConversions.hasCustomWriteTarget(LocalDateTime.class), is(true)); - } - - /** - * @see DATAMONGO-1302 - */ - @Test - public void registersConverterFactoryCorrectly() { - - CustomConversions customConversions = new CustomConversions(Collections.singletonList(new FormatConverterFactory())); - - assertThat(customConversions.getCustomWriteTarget(String.class, SimpleDateFormat.class), notNullValue()); - } - - /** - * @see DATAMONGO-1372 - */ - @Test - public void registersConvertersForCurrency() { - - CustomConversions customConversions = new CustomConversions(); - - assertThat(customConversions.hasCustomWriteTarget(Currency.class), is(true)); - assertThat(customConversions.hasCustomReadTarget(String.class, Currency.class), is(true)); - } - - private static Class createProxyTypeFor(Class type) { - - ProxyFactory factory = new ProxyFactory(); - factory.setProxyTargetClass(true); - factory.setTargetClass(type); - - return factory.getProxy().getClass(); - } - - enum FormatToStringConverter implements Converter { - INSTANCE; - - public String convert(Format source) { - return source.toString(); - } - } - - enum StringToFormatConverter implements Converter { - INSTANCE; - public Format convert(String source) { - return DateFormat.getInstance(); - } - } - - enum NumberToStringConverter implements Converter { - INSTANCE; - public String convert(Number source) { - return source.toString(); - } - } - - enum StringToNumberConverter implements Converter { - INSTANCE; - public Number convert(String source) { - return 0L; - } - } - - enum StringToIntegerConverter implements Converter { - INSTANCE; - public Integer convert(String source) { - return 0; - } - } - - enum DateTimeToStringConverter implements Converter { - INSTANCE; - - @Override - public String convert(DateTime source) { - return ""; - } - } - - enum CustomDateTimeConverter implements Converter { - - INSTANCE; - - @Override - public Date convert(DateTime source) { - return new Date(0); - } - } - - enum CustomObjectToStringConverter implements Converter { - - INSTANCE; - - @Override - public String convert(Object source) { - return source != null ? source.toString() : null; - } - - } - - @WritingConverter - static class FormatConverterFactory implements ConverterFactory { - - @Override - public Converter getConverter(Class targetType) { - return new StringToFormat(targetType); - } - - private static final class StringToFormat implements Converter { - - private final Class targetType; - - public StringToFormat(Class targetType) { - this.targetType = targetType; - } - - @Override - public T convert(String source) { - - if (source.length() == 0) { - return null; - } - - try { - return targetType.newInstance(); - } catch (Exception e) { - throw new IllegalArgumentException(e.getMessage(), e); - } - } - } - - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java index e26394f111..5da3e896e6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,106 +15,101 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.mockito.Matchers.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.HashSet; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; /** * Test case to verify correct usage of custom {@link Converter} implementations to be used. - * + * * @author Oliver Gierke - * @see DATADOC-101 */ -@RunWith(MockitoJUnitRunner.class) -public class CustomConvertersUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class CustomConvertersUnitTests { - MappingMongoConverter converter; + private MappingMongoConverter converter; - @Mock BarToDBObjectConverter barToDBObjectConverter; - @Mock DBObjectToBarConverter dbObjectToBarConverter; - @Mock MongoDbFactory mongoDbFactory; + @Mock BarToDocumentConverter barToDocumentConverter; + @Mock DocumentToBarConverter documentToBarConverter; - MongoMappingContext context; - MongoPersistentEntity fooEntity; - MongoPersistentEntity barEntity; + private MongoMappingContext context; - @Before - @SuppressWarnings("unchecked") - public void setUp() throws Exception { + @BeforeEach + void setUp() { - when(barToDBObjectConverter.convert(any(Bar.class))).thenReturn(new BasicDBObject()); - when(dbObjectToBarConverter.convert(any(DBObject.class))).thenReturn(new Bar()); + when(barToDocumentConverter.convert(any(Bar.class))).thenReturn(new Document()); + when(documentToBarConverter.convert(any(Document.class))).thenReturn(new Bar()); - CustomConversions conversions = new CustomConversions(Arrays.asList(barToDBObjectConverter, dbObjectToBarConverter)); + CustomConversions conversions = new MongoCustomConversions( + Arrays.asList(barToDocumentConverter, documentToBarConverter)); context = new MongoMappingContext(); - context.setInitialEntitySet(new HashSet>(Arrays.asList(Foo.class, Bar.class))); + context.setInitialEntitySet(new HashSet<>(Arrays.asList(Foo.class, Bar.class))); context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); context.initialize(); - converter = new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); converter.setCustomConversions(conversions); converter.afterPropertiesSet(); } - @Test - public void nestedToDBObjectConverterGetsInvoked() { + @Test // DATADOC-101 + void nestedToDocumentConverterGetsInvoked() { Foo foo = new Foo(); foo.bar = new Bar(); - converter.write(foo, new BasicDBObject()); - verify(barToDBObjectConverter).convert(any(Bar.class)); + converter.write(foo, new Document()); + verify(barToDocumentConverter).convert(any(Bar.class)); } - @Test - public void nestedFromDBObjectConverterGetsInvoked() { + @Test // DATADOC-101 + void nestedFromDocumentConverterGetsInvoked() { - BasicDBObject dbObject = new BasicDBObject(); - dbObject.put("bar", new BasicDBObject()); + Document document = new Document(); + document.put("bar", new Document()); - converter.read(Foo.class, dbObject); - verify(dbObjectToBarConverter).convert(any(DBObject.class)); + converter.read(Foo.class, document); + verify(documentToBarConverter).convert(any(Document.class)); } - @Test - public void toDBObjectConverterGetsInvoked() { + @Test // DATADOC-101 + void toDocumentConverterGetsInvoked() { - converter.write(new Bar(), new BasicDBObject()); - verify(barToDBObjectConverter).convert(any(Bar.class)); + converter.write(new Bar(), new Document()); + verify(barToDocumentConverter).convert(any(Bar.class)); } - @Test - public void fromDBObjectConverterGetsInvoked() { + @Test // DATADOC-101 + void fromDocumentConverterGetsInvoked() { - converter.read(Bar.class, new BasicDBObject()); - verify(dbObjectToBarConverter).convert(any(DBObject.class)); + converter.read(Bar.class, new Document()); + verify(documentToBarConverter).convert(any(Document.class)); } - @Test - public void foo() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("foo", null); + @Test // DATADOC-101 + void foo() { + Document document = new Document(); + document.put("foo", null); - Assert.assertThat(dbObject.containsField("foo"), CoreMatchers.is(true)); + assertThat(document).containsKey("foo"); } public static class Foo { @@ -127,11 +122,7 @@ public static class Bar { public String foo; } - private interface BarToDBObjectConverter extends Converter { + private interface BarToDocumentConverter extends Converter {} - } - - private interface DBObjectToBarConverter extends Converter { - - } + private interface DocumentToBarConverter extends Converter {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DBObjectAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DBObjectAccessorUnitTests.java deleted file mode 100644 index b4ad05236b..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DBObjectAccessorUnitTests.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; -import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; - -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * Unit tests for {@link DbObjectAccessor}. - * - * @see DATAMONGO-766 - * @author Oliver Gierke - */ -public class DBObjectAccessorUnitTests { - - MongoMappingContext context = new MongoMappingContext(); - MongoPersistentEntity projectingTypeEntity = context.getPersistentEntity(ProjectingType.class); - MongoPersistentProperty fooProperty = projectingTypeEntity.getPersistentProperty("foo"); - - @Test - public void putsNestedFieldCorrectly() { - - DBObject dbObject = new BasicDBObject(); - - DBObjectAccessor accessor = new DBObjectAccessor(dbObject); - accessor.put(fooProperty, "FooBar"); - - DBObject aDbObject = DBObjectTestUtils.getAsDBObject(dbObject, "a"); - assertThat(aDbObject.get("b"), is((Object) "FooBar")); - } - - @Test - public void getsNestedFieldCorrectly() { - - DBObject source = new BasicDBObject("a", new BasicDBObject("b", "FooBar")); - - DBObjectAccessor accessor = new DBObjectAccessor(source); - assertThat(accessor.get(fooProperty), is((Object) "FooBar")); - } - - @Test - public void returnsNullForNonExistingFieldPath() { - - DBObjectAccessor accessor = new DBObjectAccessor(new BasicDBObject()); - assertThat(accessor.get(fooProperty), is(nullValue())); - } - - @Test(expected = IllegalArgumentException.class) - public void rejectsNonBasicDBObjects() { - new DBObjectAccessor(new BasicDBList()); - } - - @Test(expected = IllegalArgumentException.class) - public void rejectsNullDBObject() { - new DBObjectAccessor(null); - } - - /** - * @see DATAMONGO-1335 - */ - @Test - public void writesAllNestingsCorrectly() { - - MongoPersistentEntity entity = context.getPersistentEntity(TypeWithTwoNestings.class); - - BasicDBObject target = new BasicDBObject(); - - DBObjectAccessor accessor = new DBObjectAccessor(target); - accessor.put(entity.getPersistentProperty("id"), "id"); - accessor.put(entity.getPersistentProperty("b"), "b"); - accessor.put(entity.getPersistentProperty("c"), "c"); - - DBObject nestedA = DBObjectTestUtils.getAsDBObject(target, "a"); - - assertThat(nestedA, is(notNullValue())); - assertThat(nestedA.get("b"), is((Object) "b")); - assertThat(nestedA.get("c"), is((Object) "c")); - } - - static class ProjectingType { - - String name; - @Field("a.b") String foo; - NestedType a; - } - - static class NestedType { - String b; - String c; - } - - static class TypeWithTwoNestings { - - String id; - @Field("a.b") String b; - @Field("a.c") String c; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java index 5c3ae804e7..92e5003628 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2012 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.ArrayList; @@ -23,17 +23,16 @@ import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** * Unit test to reproduce DATAMONGO-273. - * + * * @author Harlan Iverson * @author Oliver Gierke */ @@ -41,7 +40,7 @@ public class DataMongo273Tests { MappingMongoConverter converter; - @Before + @BeforeEach public void setupMongoConverter() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -51,10 +50,7 @@ public void setupMongoConverter() { converter.afterPropertiesSet(); } - /** - * @see DATAMONGO-273 - */ - @Test + @Test // DATAMONGO-273 public void convertMapOfThings() { Plane plane = new Plane("Boeing", 4); @@ -66,21 +62,19 @@ public void convertMapOfThings() { mapOfThings.put("train", train); mapOfThings.put("automobile", automobile); - DBObject result = new BasicDBObject(); + Document result = new Document(); converter.write(mapOfThings, result); @SuppressWarnings("unchecked") Map mapOfThings2 = converter.read(Map.class, result); - assertTrue(mapOfThings2.get("plane") instanceof Plane); - assertTrue(mapOfThings2.get("train") instanceof Train); - assertTrue(mapOfThings2.get("automobile") instanceof Automobile); + assertThat(mapOfThings2.get("plane") instanceof Plane).isTrue(); + assertThat(mapOfThings2.get("train") instanceof Train).isTrue(); + assertThat(mapOfThings2.get("automobile") instanceof Automobile).isTrue(); } - /** - * @see DATAMONGO-294 - */ - @Test + @Test // DATAMONGO-294 + @Disabled("TODO: Mongo3 - this is no longer supported as DBList is no Bson type :/") @SuppressWarnings({ "rawtypes", "unchecked" }) public void convertListOfThings() { Plane plane = new Plane("Boeing", 4); @@ -92,20 +86,17 @@ public void convertListOfThings() { listOfThings.add(train); listOfThings.add(automobile); - DBObject result = new BasicDBList(); + Document result = new Document(); converter.write(listOfThings, result); List listOfThings2 = converter.read(List.class, result); - assertTrue(listOfThings2.get(0) instanceof Plane); - assertTrue(listOfThings2.get(1) instanceof Train); - assertTrue(listOfThings2.get(2) instanceof Automobile); + assertThat(listOfThings2.get(0) instanceof Plane).isTrue(); + assertThat(listOfThings2.get(1) instanceof Train).isTrue(); + assertThat(listOfThings2.get(2) instanceof Automobile).isTrue(); } - /** - * @see DATAMONGO-294 - */ - @Test + @Test // DATAMONGO-294 @SuppressWarnings({ "rawtypes", "unchecked" }) public void convertListOfThings_NestedInMap() { @@ -123,16 +114,16 @@ public void convertListOfThings_NestedInMap() { Shipment shipment = new Shipment(box); - DBObject result = new BasicDBObject(); + Document result = new Document(); converter.write(shipment, result); Shipment shipment2 = converter.read(Shipment.class, result); List listOfThings2 = (List) shipment2.getBoxes().get("one"); - assertTrue(listOfThings2.get(0) instanceof Plane); - assertTrue(listOfThings2.get(1) instanceof Train); - assertTrue(listOfThings2.get(2) instanceof Automobile); + assertThat(listOfThings2.get(0) instanceof Plane).isTrue(); + assertThat(listOfThings2.get(1) instanceof Train).isTrue(); + assertThat(listOfThings2.get(2) instanceof Automobile).isTrue(); } static class Plane { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index bc549004d2..b53531f301 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Matchers.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils.*; @@ -25,129 +23,133 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; +import org.bson.Document; +import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.annotation.AccessType; import org.springframework.data.annotation.AccessType.Type; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.Person; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.SerializationUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.DBRef; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; /** - * Unit tests for {@link DbRefMappingMongoConverter}. - * + * Unit tests for {@link MappingMongoConverter}. + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class DbRefMappingMongoConverterUnitTests { +@ExtendWith(MockitoExtension.class) +class DbRefMappingMongoConverterUnitTests { - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; - @Mock MongoDbFactory dbFactory; - DefaultDbRefResolver dbRefResolver; + @Mock MongoDatabaseFactory dbFactory; + private DefaultDbRefResolver dbRefResolver; - @Before - public void setUp() { + @BeforeEach + void setUp() { when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); this.dbRefResolver = spy(new DefaultDbRefResolver(dbFactory)); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); } - /** - * @see DATAMONGO-347 - */ - @Test - public void createsSimpleDBRefCorrectly() { + @Test // DATAMONGO-347 + void createsSimpleDBRefCorrectly() { Person person = new Person(); person.id = "foo"; DBRef dbRef = converter.toDBRef(person, null); - assertThat(dbRef.getId(), is((Object) "foo")); - assertThat(dbRef.getCollectionName(), is("person")); + assertThat(dbRef.getId()).isEqualTo("foo"); + assertThat(dbRef.getCollectionName()).isEqualTo("person"); } - /** - * @see DATAMONGO-657 - */ - @Test - public void convertDocumentWithMapDBRef() { + @Test // DATAMONGO-657 + void convertDocumentWithMapDBRef() { - DBObject mapValDBObject = new BasicDBObject(); - mapValDBObject.put("_id", BigInteger.ONE); + Document mapValDocument = new Document(); + mapValDocument.put("_id", BigInteger.ONE); DBRef dbRef = mock(DBRef.class); + when(dbRef.getId()).thenReturn(BigInteger.ONE); + when(dbRef.getCollectionName()).thenReturn("collection-1"); - if (MongoClientVersion.isMongo3Driver()) { - DB dbMock = mock(DB.class); - DBCollection collectionMock = mock(DBCollection.class); - when(dbFactory.getDb()).thenReturn(dbMock); - when(dbMock.getCollection(anyString())).thenReturn(collectionMock); - when(collectionMock.findOne(anyObject())).thenReturn(mapValDBObject); - } else { - when(dbRefResolver.fetch(dbRef)).thenReturn(mapValDBObject); - } + MongoDatabase dbMock = mock(MongoDatabase.class); + MongoCollection collectionMock = mock(MongoCollection.class); + when(dbFactory.getMongoDatabase()).thenReturn(dbMock); + when(dbMock.getCollection(anyString(), eq(Document.class))).thenReturn(collectionMock); + + FindIterable fi = mock(FindIterable.class); + when(fi.limit(anyInt())).thenReturn(fi); + when(fi.sort(any())).thenReturn(fi); + when(fi.first()).thenReturn(mapValDocument); + when(collectionMock.find(Mockito.any(Bson.class))).thenReturn(fi); MapDBRef mapDBRef = new MapDBRef(); MapDBRefVal val = new MapDBRefVal(); val.id = BigInteger.ONE; - Map mapVal = new HashMap(); + Map mapVal = new HashMap<>(); mapVal.put("test", val); mapDBRef.map = mapVal; - BasicDBObject dbObject = new BasicDBObject(); - converter.write(mapDBRef, dbObject); + Document document = new Document(); + converter.write(mapDBRef, document); - DBObject map = (DBObject) dbObject.get("map"); + Document map = (Document) document.get("map"); - assertThat(map.get("test"), instanceOf(DBRef.class)); + assertThat(map.get("test")).isInstanceOf(DBRef.class); - ((DBObject) dbObject.get("map")).put("test", dbRef); + ((Document) document.get("map")).put("test", dbRef); - MapDBRef read = converter.read(MapDBRef.class, dbObject); + MapDBRef read = converter.read(MapDBRef.class, document); - assertThat(read.map.get("test").id, is(BigInteger.ONE)); + assertThat(read.map.get("test").id).isEqualTo(BigInteger.ONE); } - /** - * @see DATAMONGO-347 - */ - @Test - public void createsDBRefWithClientSpecCorrectly() { + @Test // DATAMONGO-347 + void createsDBRefWithClientSpecCorrectly() { PropertyPath path = PropertyPath.from("person", PersonClient.class); MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getLeafProperty(); @@ -156,433 +158,529 @@ public void createsDBRefWithClientSpecCorrectly() { person.id = "foo"; DBRef dbRef = converter.toDBRef(person, property); - assertThat(dbRef.getId(), is((Object) "foo")); - assertThat(dbRef.getCollectionName(), is("person")); + assertThat(dbRef.getId()).isEqualTo("foo"); + assertThat(dbRef.getCollectionName()).isEqualTo("person"); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForLazyDbRefOnInterface() { + @Test // DATAMONGO-348 + void lazyLoadingProxyForLazyDbRefOnInterface() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); - lazyDbRefs.dbRefToInterface = new LinkedList(Arrays.asList(new LazyDbRefTarget("1"))); - converterSpy.write(lazyDbRefs, dbo); + lazyDbRefs.dbRefToInterface = new LinkedList<>(Collections.singletonList(new LazyDbRefTarget("1"))); + converterSpy.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToInterface, false); - assertThat(result.dbRefToInterface.get(0).getId(), is(id)); + assertThat(result.dbRefToInterface.get(0).getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToInterface, true); - assertThat(result.dbRefToInterface.get(0).getValue(), is(value)); + assertThat(result.dbRefToInterface.get(0).getValue()).isEqualTo(value); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { + @Test // DATAMONGO-348 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); - lazyDbRefs.dbRefToConcreteCollection = new ArrayList(Arrays.asList(new LazyDbRefTarget(id, value))); - converterSpy.write(lazyDbRefs, dbo); + lazyDbRefs.dbRefToConcreteCollection = new ArrayList<>(Collections.singletonList(new LazyDbRefTarget(id, value))); + converterSpy.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteCollection, false); - assertThat(result.dbRefToConcreteCollection.get(0).getId(), is(id)); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteCollection, true); - assertThat(result.dbRefToConcreteCollection.get(0).getValue(), is(value)); + assertThat(result.dbRefToConcreteCollection.get(0).getValue()).isEqualTo(value); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForLazyDbRefOnConcreteType() { + @Test // DATAMONGO-348 + void lazyLoadingProxyForLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); lazyDbRefs.dbRefToConcreteType = new LazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteType, false); - assertThat(result.dbRefToConcreteType.getId(), is(id)); + assertThat(result.dbRefToConcreteType.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteType, true); - assertThat(result.dbRefToConcreteType.getValue(), is(value)); + assertThat(result.dbRefToConcreteType.getValue()).isEqualTo(value); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { + @Test // DATAMONGO-348 + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); - lazyDbRefs.dbRefToConcreteTypeWithPersistenceConstructor = new LazyDbRefTargetWithPeristenceConstructor( - (Object) id, (Object) value); - converterSpy.write(lazyDbRefs, dbo); + lazyDbRefs.dbRefToConcreteTypeWithPersistenceConstructor = new LazyDbRefTargetWithPeristenceConstructor(id, value); + converterSpy.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructor, false); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getId(), is(id)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructor, true); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getValue(), is(value)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getValue()).isEqualTo(value); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { + @Test // DATAMONGO-348 + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); lazyDbRefs.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor = new LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor( - (Object) id, (Object) value); - converterSpy.write(lazyDbRefs, dbo); + id, value); + converterSpy.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, false); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getId(), is(id)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, true); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getValue(), is(value)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getValue()) + .isEqualTo(value); } - /** - * @see DATAMONGO-348 - */ - @Test - public void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { + @Test // DATAMONGO-348 + void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); SerializableClassWithLazyDbRefs lazyDbRefs = new SerializableClassWithLazyDbRefs(); lazyDbRefs.dbRefToSerializableTarget = new SerializableLazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - SerializableClassWithLazyDbRefs result = converterSpy.read(SerializableClassWithLazyDbRefs.class, dbo); + SerializableClassWithLazyDbRefs result = converterSpy.read(SerializableClassWithLazyDbRefs.class, document); SerializableClassWithLazyDbRefs deserializedResult = (SerializableClassWithLazyDbRefs) transport(result); - assertThat(deserializedResult.dbRefToSerializableTarget.getId(), is(id)); + assertThat(deserializedResult.dbRefToSerializableTarget.getId()).isEqualTo(id); assertProxyIsResolved(deserializedResult.dbRefToSerializableTarget, true); - assertThat(deserializedResult.dbRefToSerializableTarget.getValue(), is(value)); + assertThat(deserializedResult.dbRefToSerializableTarget.getValue()).isEqualTo(value); } - /** - * @see DATAMONGO-884 - */ - @Test - public void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { + @Test // DATAMONGO-884 + void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); WithObjectMethodOverrideLazyDbRefs lazyDbRefs = new WithObjectMethodOverrideLazyDbRefs(); lazyDbRefs.dbRefToToStringObjectMethodOverride = new ToStringObjectMethodOverrideLazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, dbo); + WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToToStringObjectMethodOverride, is(notNullValue())); + assertThat(result.dbRefToToStringObjectMethodOverride).isNotNull(); assertProxyIsResolved(result.dbRefToToStringObjectMethodOverride, false); - assertThat(result.dbRefToToStringObjectMethodOverride.toString(), is(id + ":" + value)); + assertThat(result.dbRefToToStringObjectMethodOverride.toString()).isEqualTo(id + ":" + value); assertProxyIsResolved(result.dbRefToToStringObjectMethodOverride, true); } - /** - * @see DATAMONGO-884 - */ - @Test - public void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + @Test // DATAMONGO-884 + void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); WithObjectMethodOverrideLazyDbRefs lazyDbRefs = new WithObjectMethodOverrideLazyDbRefs(); lazyDbRefs.dbRefToPlainObject = new LazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, dbo); + WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); // calling Object#toString does not initialize the proxy. String proxyString = result.dbRefToPlainObject.toString(); - assertThat(proxyString, is("lazyDbRefTarget" + ":" + id + "$LazyLoadingProxy")); + assertThat(proxyString).isEqualTo("lazyDbRefTarget" + ":" + id + "$LazyLoadingProxy"); assertProxyIsResolved(result.dbRefToPlainObject, false); // calling another method not declared on object triggers proxy initialization. - assertThat(result.dbRefToPlainObject.getValue(), is(value)); + assertThat(result.dbRefToPlainObject.getValue()).isEqualTo(value); assertProxyIsResolved(result.dbRefToPlainObject, true); } - /** - * @see DATAMONGO-884 - */ - @Test - public void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + @Test // DATAMONGO-884 + void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); WithObjectMethodOverrideLazyDbRefs lazyDbRefs = new WithObjectMethodOverrideLazyDbRefs(); lazyDbRefs.dbRefToPlainObject = new LazyDbRefTarget(id, value); lazyDbRefs.dbRefToToStringObjectMethodOverride = new ToStringObjectMethodOverrideLazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, dbo); + WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); - assertThat(result.dbRefToPlainObject, is(equalTo(result.dbRefToPlainObject))); - assertThat(result.dbRefToPlainObject, is(not(equalTo(null)))); - assertThat(result.dbRefToPlainObject, is(not(equalTo((Object) lazyDbRefs.dbRefToToStringObjectMethodOverride)))); + assertThat(result.dbRefToPlainObject).isEqualTo(result.dbRefToPlainObject); + assertThat(result.dbRefToPlainObject).isNotEqualTo(null); + assertThat(result.dbRefToPlainObject).isNotEqualTo((Object) lazyDbRefs.dbRefToToStringObjectMethodOverride); assertProxyIsResolved(result.dbRefToPlainObject, false); } - /** - * @see DATAMONGO-884 - */ - @Test - public void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + @Test // DATAMONGO-884 + void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); WithObjectMethodOverrideLazyDbRefs lazyDbRefs = new WithObjectMethodOverrideLazyDbRefs(); lazyDbRefs.dbRefToPlainObject = new LazyDbRefTarget(id, value); lazyDbRefs.dbRefToToStringObjectMethodOverride = new ToStringObjectMethodOverrideLazyDbRefTarget(id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, dbo); + WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); - assertThat(result.dbRefToPlainObject.hashCode(), is(311365444)); + assertThat(result.dbRefToPlainObject.hashCode()).isEqualTo(311365444); assertProxyIsResolved(result.dbRefToPlainObject, false); } - /** - * @see DATAMONGO-884 - */ - @Test - public void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { + @Test // DATAMONGO-884 + void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; MappingMongoConverter converterSpy = spy(converter); - doReturn(new BasicDBObject("_id", id).append("value", value)).when(converterSpy).readRef((DBRef) any()); + doReturn(new Document("_id", id).append("value", value)).when(converterSpy).readRef(any()); - BasicDBObject dbo = new BasicDBObject(); + Document document = new Document(); WithObjectMethodOverrideLazyDbRefs lazyDbRefs = new WithObjectMethodOverrideLazyDbRefs(); lazyDbRefs.dbRefEqualsAndHashcodeObjectMethodOverride1 = new EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget( id, value); lazyDbRefs.dbRefEqualsAndHashcodeObjectMethodOverride2 = new EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget( id, value); - converterSpy.write(lazyDbRefs, dbo); + converterSpy.write(lazyDbRefs, document); - WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, dbo); + WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride1, false); - assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride1, is(notNullValue())); + assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride1).isNotNull(); result.dbRefEqualsAndHashcodeObjectMethodOverride1.equals(null); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride1, true); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, false); - assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride2, is(notNullValue())); + assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride2).isNotNull(); result.dbRefEqualsAndHashcodeObjectMethodOverride2.hashCode(); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, true); } - /** - * @see DATAMONGO-987 - */ - @Test - public void shouldNotGenerateLazyLoadingProxyForNullValues() { + @Test // DATAMONGO-987 + void shouldNotGenerateLazyLoadingProxyForNullValues() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); lazyDbRefs.id = "42"; - converter.write(lazyDbRefs, dbo); + converter.write(lazyDbRefs, document); - ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, dbo); + ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, document); - assertThat(result.id, is(lazyDbRefs.id)); - assertThat(result.dbRefToInterface, is(nullValue())); - assertThat(result.dbRefToConcreteCollection, is(nullValue())); - assertThat(result.dbRefToConcreteType, is(nullValue())); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor, is(nullValue())); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, is(nullValue())); + assertThat(result.id).isEqualTo(lazyDbRefs.id); + assertThat(result.dbRefToInterface).isNull(); + assertThat(result.dbRefToConcreteCollection).isNull(); + assertThat(result.dbRefToConcreteType).isNull(); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor).isNull(); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor).isNull(); } - /** - * @see DATAMONGO-1005 - */ - @Test - public void shouldBeAbleToStoreDirectReferencesToSelf() { + @Test // DATAMONGO-1005 + void shouldBeAbleToStoreDirectReferencesToSelf() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithDbRefField o = new ClassWithDbRefField(); o.id = "123"; o.reference = o; - converter.write(o, dbo); + converter.write(o, document); - ClassWithDbRefField found = converter.read(ClassWithDbRefField.class, dbo); + ClassWithDbRefField found = converter.read(ClassWithDbRefField.class, document); - assertThat(found, is(notNullValue())); - assertThat(found.reference, is(found)); + assertThat(found).isNotNull(); + assertThat(found.reference).isEqualTo(found); } - /** - * @see DATAMONGO-1005 - */ - @Test - public void shouldBeAbleToStoreNestedReferencesToSelf() { + @Test // DATAMONGO-1005 + void shouldBeAbleToStoreNestedReferencesToSelf() { - DBObject dbo = new BasicDBObject(); + Document document = new Document(); ClassWithNestedDbRefField o = new ClassWithNestedDbRefField(); o.id = "123"; o.nested = new NestedReferenceHolder(); o.nested.reference = o; - converter.write(o, dbo); + converter.write(o, document); - ClassWithNestedDbRefField found = converter.read(ClassWithNestedDbRefField.class, dbo); + ClassWithNestedDbRefField found = converter.read(ClassWithNestedDbRefField.class, document); - assertThat(found, is(notNullValue())); - assertThat(found.nested, is(notNullValue())); - assertThat(found.nested.reference, is(found)); + assertThat(found).isNotNull(); + assertThat(found.nested).isNotNull(); + assertThat(found.nested.reference).isEqualTo(found); } - /** - * @see DATAMONGO-1012 - */ - @Test - public void shouldEagerlyResolveIdPropertyWithFieldAccess() { + @Test // DATAMONGO-1012 + void shouldEagerlyResolveIdPropertyWithFieldAccess() { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(ClassWithLazyDbRefs.class); - MongoPersistentProperty property = entity.getPersistentProperty("dbRefToConcreteType"); - MongoPersistentEntity propertyEntity = mappingContext.getPersistentEntity(property); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); + MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteType"); + MongoPersistentEntity propertyEntity = mappingContext.getRequiredPersistentEntity(property); String idValue = new ObjectId().toString(); DBRef dbRef = converter.toDBRef(new LazyDbRefTarget(idValue), property); - DBObject object = new BasicDBObject("dbRefToConcreteType", dbRef); + Document object = new Document("dbRefToConcreteType", dbRef); ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, object); PersistentPropertyAccessor accessor = propertyEntity.getPropertyAccessor(result.dbRefToConcreteType); - MongoPersistentProperty idProperty = mappingContext.getPersistentEntity(LazyDbRefTarget.class).getIdProperty(); + MongoPersistentProperty idProperty = mappingContext.getRequiredPersistentEntity(LazyDbRefTarget.class) + .getIdProperty(); - assertThat(accessor.getProperty(idProperty), is(notNullValue())); + assertThat(accessor.getProperty(idProperty)).isNotNull(); assertProxyIsResolved(result.dbRefToConcreteType, false); } - /** - * @see DATAMONGO-1012 - */ - @Test - public void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { + @Test // DATAMONGO-1012 + void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(ClassWithLazyDbRefs.class); - MongoPersistentProperty property = entity.getPersistentProperty("dbRefToConcreteTypeWithPropertyAccess"); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); + MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteTypeWithPropertyAccess"); String idValue = new ObjectId().toString(); DBRef dbRef = converter.toDBRef(new LazyDbRefTargetPropertyAccess(idValue), property); - DBObject object = new BasicDBObject("dbRefToConcreteTypeWithPropertyAccess", dbRef); + Document object = new Document("dbRefToConcreteTypeWithPropertyAccess", dbRef); ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, object); LazyDbRefTargetPropertyAccess proxy = result.dbRefToConcreteTypeWithPropertyAccess; - assertThat(ReflectionTestUtils.getField(proxy, "id"), is(nullValue())); + assertThat(ReflectionTestUtils.getField(proxy, "id")).isNull(); assertProxyIsResolved(proxy, false); } - /** - * @see DATAMONGO-1076 - */ - @Test - public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { + @Test // DATAMONGO-1076 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { - MongoPersistentEntity entity = mappingContext.getPersistentEntity(WithObjectMethodOverrideLazyDbRefs.class); - MongoPersistentProperty property = entity.getPersistentProperty("dbRefToConcreteTypeWithPropertyAccess"); + MongoPersistentEntity entity = mappingContext + .getRequiredPersistentEntity(WithObjectMethodOverrideLazyDbRefs.class); + MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToPlainObject"); String idValue = new ObjectId().toString(); DBRef dbRef = converter.toDBRef(new LazyDbRefTargetPropertyAccess(idValue), property); WithObjectMethodOverrideLazyDbRefs result = converter.read(WithObjectMethodOverrideLazyDbRefs.class, - new BasicDBObject("dbRefToPlainObject", dbRef)); + new Document("dbRefToPlainObject", dbRef)); ReflectionTestUtils.invokeMethod(result.dbRefToPlainObject, "finalize"); assertProxyIsResolved(result.dbRefToPlainObject, false); } + @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldBulkFetchListOfReferences() { + + String id1 = "1"; + String id2 = "2"; + String value = "val"; + + MappingMongoConverter converterSpy = spy(converter); + doReturn( + Arrays.asList(new Document("_id", id1).append("value", value), new Document("_id", id2).append("value", value))) + .when(converterSpy).bulkReadRefs(anyList()); + + Document document = new Document(); + ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); + lazyDbRefs.dbRefToConcreteCollection = new ArrayList<>( + Arrays.asList(new LazyDbRefTarget(id1, value), new LazyDbRefTarget(id2, value))); + converterSpy.write(lazyDbRefs, document); + + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); + + assertProxyIsResolved(result.dbRefToConcreteCollection, false); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id1); + assertProxyIsResolved(result.dbRefToConcreteCollection, true); + assertThat(result.dbRefToConcreteCollection.get(1).getId()).isEqualTo(id2); + + verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); + } + + @Test // DATAMONGO-1666 + void shouldBulkFetchSetOfReferencesForConstructorCreation() { + + String id1 = "1"; + String id2 = "2"; + String value = "val"; + + MappingMongoConverter converterSpy = spy(converter); + doReturn( + Arrays.asList(new Document("_id", id1).append("value", value), new Document("_id", id2).append("value", value))) + .when(converterSpy).bulkReadRefs(anyList()); + + Document document = new Document("dbRefToInterface", + Arrays.asList(new DBRef("lazyDbRefTarget", "1"), new DBRef("lazyDbRefTarget", "2"))); + + ClassWithDbRefSetConstructor result = converterSpy.read(ClassWithDbRefSetConstructor.class, document); + + assertThat(result.dbRefToInterface).isInstanceOf(Set.class); + + verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); + } + + @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { + + String id1 = "1"; + String id2 = "2"; + String value = "val"; + + MappingMongoConverter converterSpy = spy(converter); + doReturn(new Document("_id", id1).append("value", value)).doReturn(new Document("_id", id2).append("value", value)) + .when(converterSpy).readRef(Mockito.any(DBRef.class)); + + Document document = new Document(); + ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); + lazyDbRefs.dbRefToConcreteCollection = new ArrayList<>( + Arrays.asList(new LazyDbRefTarget(id1, value), new SerializableLazyDbRefTarget(id2, value))); + converterSpy.write(lazyDbRefs, document); + + ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); + + assertProxyIsResolved(result.dbRefToConcreteCollection, false); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id1); + assertProxyIsResolved(result.dbRefToConcreteCollection, true); + assertThat(result.dbRefToConcreteCollection.get(1).getId()).isEqualTo(id2); + + verify(converterSpy, times(2)).readRef(Mockito.any(DBRef.class)); + verify(converterSpy, never()).bulkReadRefs(anyList()); + } + + @Test // DATAMONGO-1194 + void shouldBulkFetchMapOfReferences() { + + MapDBRefVal val1 = new MapDBRefVal(); + val1.id = BigInteger.ONE; + + MapDBRefVal val2 = new MapDBRefVal(); + val2.id = BigInteger.ZERO; + + MappingMongoConverter converterSpy = spy(converter); + doReturn(Arrays.asList(new Document("_id", val1.id), new Document("_id", val2.id))).when(converterSpy) + .bulkReadRefs(anyList()); + + Document document = new Document(); + MapDBRef mapDBRef = new MapDBRef(); + mapDBRef.map = new LinkedHashMap<>(); + mapDBRef.map.put("one", val1); + mapDBRef.map.put("two", val2); + + converterSpy.write(mapDBRef, document); + + MapDBRef result = converterSpy.read(MapDBRef.class, document); + + // assertProxyIsResolved(result.map, false); + assertThat(result.map.get("one").id).isEqualTo(val1.id); + // assertProxyIsResolved(result.map, true); + assertThat(result.map.get("two").id).isEqualTo(val2.id); + + verify(converterSpy, times(1)).bulkReadRefs(anyList()); + verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); + } + + @Test // DATAMONGO-1194 + void shouldBulkFetchLazyMapOfReferences() { + + MapDBRefVal val1 = new MapDBRefVal(); + val1.id = BigInteger.ONE; + + MapDBRefVal val2 = new MapDBRefVal(); + val2.id = BigInteger.ZERO; + + MappingMongoConverter converterSpy = spy(converter); + doReturn(Arrays.asList(new Document("_id", val1.id), new Document("_id", val2.id))).when(converterSpy) + .bulkReadRefs(anyList()); + + Document document = new Document(); + MapDBRef mapDBRef = new MapDBRef(); + mapDBRef.lazyMap = new LinkedHashMap<>(); + mapDBRef.lazyMap.put("one", val1); + mapDBRef.lazyMap.put("two", val2); + + converterSpy.write(mapDBRef, document); + + MapDBRef result = converterSpy.read(MapDBRef.class, document); + + assertProxyIsResolved(result.lazyMap, false); + assertThat(result.lazyMap.get("one").id).isEqualTo(val1.id); + assertProxyIsResolved(result.lazyMap, true); + assertThat(result.lazyMap.get("two").id).isEqualTo(val2.id); + + verify(converterSpy, times(1)).bulkReadRefs(anyList()); + verify(converterSpy, never()).readRef(any()); + } + private Object transport(Object result) { return SerializationUtils.deserialize(SerializationUtils.serialize(result)); } class MapDBRef { @org.springframework.data.mongodb.core.mapping.DBRef Map map; + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) Map lazyMap; } class MapDBRefVal { @@ -597,18 +695,32 @@ static class ClassWithLazyDbRefs { @Id String id; @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) List dbRefToInterface; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) ArrayList dbRefToConcreteCollection; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) ArrayList dbRefToConcreteCollection; @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) LazyDbRefTarget dbRefToConcreteType; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) LazyDbRefTargetPropertyAccess dbRefToConcreteTypeWithPropertyAccess; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) LazyDbRefTargetWithPeristenceConstructor dbRefToConcreteTypeWithPersistenceConstructor; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) LazyDbRefTargetPropertyAccess dbRefToConcreteTypeWithPropertyAccess; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) LazyDbRefTargetWithPeristenceConstructor dbRefToConcreteTypeWithPersistenceConstructor; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor; + } + + static class ClassWithDbRefSetConstructor { + + final @org.springframework.data.mongodb.core.mapping.DBRef Set dbRefToInterface; + + public ClassWithDbRefSetConstructor(Set dbRefToInterface) { + this.dbRefToInterface = dbRefToInterface; + } } static class SerializableClassWithLazyDbRefs implements Serializable { private static final long serialVersionUID = 1L; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) SerializableLazyDbRefTarget dbRefToSerializableTarget; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) SerializableLazyDbRefTarget dbRefToSerializableTarget; } static class LazyDbRefTarget implements Serializable { @@ -618,15 +730,15 @@ static class LazyDbRefTarget implements Serializable { @Id String id; String value; - public LazyDbRefTarget() { + LazyDbRefTarget() { this(null); } - public LazyDbRefTarget(String id) { + LazyDbRefTarget(String id) { this(id, null); } - public LazyDbRefTarget(String id, String value) { + LazyDbRefTarget(String id, String value) { this.id = id; this.value = value; } @@ -646,7 +758,7 @@ static class LazyDbRefTargetPropertyAccess implements Serializable { @Id @AccessType(Type.PROPERTY) String id; - public LazyDbRefTargetPropertyAccess(String id) { + LazyDbRefTargetPropertyAccess(String id) { this.id = id; } @@ -663,7 +775,7 @@ static class LazyDbRefTargetWithPeristenceConstructor extends LazyDbRefTarget { public LazyDbRefTargetWithPeristenceConstructor() {} @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -679,7 +791,7 @@ static class LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor e boolean persistenceConstructorCalled; @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -693,7 +805,7 @@ static class SerializableLazyDbRefTarget extends LazyDbRefTarget implements Seri public SerializableLazyDbRefTarget() {} - public SerializableLazyDbRefTarget(String id, String value) { + SerializableLazyDbRefTarget(String id, String value) { super(id, value); } @@ -706,14 +818,10 @@ static class ToStringObjectMethodOverrideLazyDbRefTarget extends LazyDbRefTarget public ToStringObjectMethodOverrideLazyDbRefTarget() {} - public ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { + ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return this.id + ":" + this.value; @@ -726,7 +834,7 @@ static class EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget extends LazyDb public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget() {} - public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { + EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } @@ -740,7 +848,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) return true; if (obj == null) @@ -765,9 +873,12 @@ public boolean equals(Object obj) { static class WithObjectMethodOverrideLazyDbRefs { @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) LazyDbRefTarget dbRefToPlainObject; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) ToStringObjectMethodOverrideLazyDbRefTarget dbRefToToStringObjectMethodOverride; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget dbRefEqualsAndHashcodeObjectMethodOverride2; - @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget dbRefEqualsAndHashcodeObjectMethodOverride1; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) ToStringObjectMethodOverrideLazyDbRefTarget dbRefToToStringObjectMethodOverride; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget dbRefEqualsAndHashcodeObjectMethodOverride2; + @org.springframework.data.mongodb.core.mapping.DBRef( + lazy = true) EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget dbRefEqualsAndHashcodeObjectMethodOverride1; } class ClassWithDbRefField { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java new file mode 100644 index 0000000000..75c7cc4366 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java @@ -0,0 +1,137 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.Arrays; +import java.util.Collections; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.DBRef; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Unit tests for {@link DefaultDbRefResolver}. + * + * @author Christoph Strobl + * @author Oliver Gierke + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class DefaultDbRefResolverUnitTests { + + @Mock MongoDatabaseFactory factoryMock; + @Mock MongoDatabase dbMock; + @Mock MongoCollection collectionMock; + @Mock FindIterable cursorMock; + private DefaultDbRefResolver resolver; + + @BeforeEach + void setUp() { + + when(factoryMock.getMongoDatabase()).thenReturn(dbMock); + when(factoryMock.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbMock.getCollection(anyString(), any(Class.class))).thenReturn(collectionMock); + when(collectionMock.find(any(Document.class))).thenReturn(cursorMock); + + resolver = new DefaultDbRefResolver(factoryMock); + } + + @Test // DATAMONGO-1194 + @SuppressWarnings("unchecked") + void bulkFetchShouldLoadDbRefsCorrectly() { + + DBRef ref1 = new DBRef("collection-1", new ObjectId()); + DBRef ref2 = new DBRef("collection-1", new ObjectId()); + + resolver.bulkFetch(Arrays.asList(ref1, ref2)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); + + verify(collectionMock, times(1)).find(captor.capture()); + + Document _id = DocumentTestUtils.getAsDocument(captor.getValue(), "_id"); + Iterable $in = DocumentTestUtils.getTypedValue(_id, "$in", Iterable.class); + + assertThat($in).hasSize(2); + } + + @Test // DATAMONGO-1194 + void bulkFetchShouldThrowExceptionWhenUsingDifferntCollectionsWithinSetOfReferences() { + + DBRef ref1 = new DBRef("collection-1", new ObjectId()); + DBRef ref2 = new DBRef("collection-2", new ObjectId()); + + assertThatThrownBy(() -> resolver.bulkFetch(Arrays.asList(ref1, ref2))) + .isInstanceOf(InvalidDataAccessApiUsageException.class); + } + + @Test // DATAMONGO-1194 + void bulkFetchShouldReturnEarlyForEmptyLists() { + + resolver.bulkFetch(Collections.emptyList()); + + verify(collectionMock, never()).find(Mockito.any(Document.class)); + } + + @Test // DATAMONGO-1194 + void bulkFetchShouldRestoreOriginalOrder() { + + Document o1 = new Document("_id", new ObjectId()); + Document o2 = new Document("_id", new ObjectId()); + + DBRef ref1 = new DBRef("collection-1", o1.get("_id")); + DBRef ref2 = new DBRef("collection-1", o2.get("_id")); + + when(cursorMock.into(any())).then(invocation -> Arrays.asList(o2, o1)); + + assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(o1, o2); + } + + @Test // DATAMONGO-1765 + void bulkFetchContainsDuplicates() { + + Document document = new Document("_id", new ObjectId()); + + DBRef ref1 = new DBRef("collection-1", document.get("_id")); + DBRef ref2 = new DBRef("collection-1", document.get("_id")); + + when(cursorMock.into(any())).then(invocation -> Arrays.asList(document)); + + assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(document, document); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java index 2cb4560cc2..75fca5b267 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,24 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.convert.ConfigurableTypeInformationMapper; import org.springframework.data.convert.SimpleTypeInformationMapper; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.util.TypeInformation; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Unit tests for {@link DefaultMongoTypeMapper}. - * + * * @author Oliver Gierke */ public class DefaultMongoTypeMapperUnitTests { @@ -45,7 +42,7 @@ public class DefaultMongoTypeMapperUnitTests { DefaultMongoTypeMapper typeMapper; - @Before + @BeforeEach public void setUp() { configurableTypeInformationMapper = new ConfigurableTypeInformationMapper( @@ -58,14 +55,14 @@ public void setUp() { @Test public void defaultInstanceWritesClasses() { - writesTypeToField(new BasicDBObject(), String.class, String.class.getName()); + writesTypeToField(new Document(), String.class, String.class.getName()); } @Test public void defaultInstanceReadsClasses() { - DBObject dbObject = new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()); - readsTypeFromField(dbObject, String.class); + Document document = new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()); + readsTypeFromField(document, String.class); } @Test @@ -74,8 +71,8 @@ public void writesMapKeyForType() { typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(configurableTypeInformationMapper)); - writesTypeToField(new BasicDBObject(), String.class, "1"); - writesTypeToField(new BasicDBObject(), Object.class, null); + writesTypeToField(new Document(), String.class, "1"); + writesTypeToField(new Document(), Object.class, null); } @Test @@ -84,8 +81,8 @@ public void writesClassNamesForUnmappedValuesIfConfigured() { typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(configurableTypeInformationMapper, simpleTypeInformationMapper)); - writesTypeToField(new BasicDBObject(), String.class, "1"); - writesTypeToField(new BasicDBObject(), Object.class, Object.class.getName()); + writesTypeToField(new Document(), String.class, "1"); + writesTypeToField(new Document(), Object.class, Object.class.getName()); } @Test @@ -94,8 +91,8 @@ public void readsTypeForMapKey() { typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(configurableTypeInformationMapper)); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "1"), String.class); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "unmapped"), null); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "1"), String.class); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "unmapped"), null); } @Test @@ -104,127 +101,122 @@ public void readsTypeLoadingClassesForUnmappedTypesIfConfigured() { typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Arrays.asList(configurableTypeInformationMapper, simpleTypeInformationMapper)); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "1"), String.class); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Object.class.getName()), - Object.class); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "1"), String.class); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Object.class.getName()), Object.class); } - /** - * @see DATAMONGO-709 - */ - @Test + @Test // DATAMONGO-709 public void writesTypeRestrictionsCorrectly() { - DBObject result = new BasicDBObject(); + Document result = new Document(); typeMapper = new DefaultMongoTypeMapper(); typeMapper.writeTypeRestrictions(result, Collections.> singleton(String.class)); - DBObject typeInfo = DBObjectTestUtils.getAsDBObject(result, DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - List aliases = DBObjectTestUtils.getAsDBList(typeInfo, "$in"); - assertThat(aliases, hasSize(1)); - assertThat(aliases.get(0), is((Object) String.class.getName())); + Document typeInfo = DocumentTestUtils.getAsDocument(result, DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + List aliases = DocumentTestUtils.getAsDBList(typeInfo, "$in"); + assertThat(aliases).hasSize(1); + assertThat(aliases.get(0)).isEqualTo((Object) String.class.getName()); } @Test public void addsFullyQualifiedClassNameUnderDefaultKeyByDefault() { - writesTypeToField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, new BasicDBObject(), String.class); + writesTypeToField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, new Document(), String.class); } @Test public void writesTypeToCustomFieldIfConfigured() { typeMapper = new DefaultMongoTypeMapper("_custom"); - writesTypeToField("_custom", new BasicDBObject(), String.class); + writesTypeToField("_custom", new Document(), String.class); } @Test public void doesNotWriteTypeInformationInCaseKeyIsSetToNull() { typeMapper = new DefaultMongoTypeMapper(null); - writesTypeToField(null, new BasicDBObject(), String.class); + writesTypeToField(null, new Document(), String.class); } @Test public void readsTypeFromDefaultKeyByDefault() { - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()), - String.class); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class.getName()), String.class); } @Test public void readsTypeFromCustomFieldConfigured() { typeMapper = new DefaultMongoTypeMapper("_custom"); - readsTypeFromField(new BasicDBObject("_custom", String.class.getName()), String.class); + readsTypeFromField(new Document("_custom", String.class.getName()), String.class); } @Test public void returnsListForBasicDBLists() { - readsTypeFromField(new BasicDBList(), null); + readsTypeFromField(new Document(), null); } @Test - public void returnsNullIfNoTypeInfoInDBObject() { - readsTypeFromField(new BasicDBObject(), null); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, ""), null); + public void returnsNullIfNoTypeInfoInDocument() { + readsTypeFromField(new Document(), null); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, ""), null); } @Test public void returnsNullIfClassCannotBeLoaded() { - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "fooBar"), null); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, "fooBar"), null); } @Test public void returnsNullIfTypeKeySetToNull() { typeMapper = new DefaultMongoTypeMapper(null); - readsTypeFromField(new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class), null); + readsTypeFromField(new Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, String.class), null); } @Test public void returnsCorrectTypeKey() { - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); typeMapper = new DefaultMongoTypeMapper("_custom"); - assertThat(typeMapper.isTypeKey("_custom"), is(true)); - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(false)); + assertThat(typeMapper.isTypeKey("_custom")).isTrue(); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isFalse(); typeMapper = new DefaultMongoTypeMapper(null); - assertThat(typeMapper.isTypeKey("_custom"), is(false)); - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(false)); + assertThat(typeMapper.isTypeKey("_custom")).isFalse(); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isFalse(); } - private void readsTypeFromField(DBObject dbObject, Class type) { + private void readsTypeFromField(Document document, Class type) { - TypeInformation typeInfo = typeMapper.readType(dbObject); + TypeInformation typeInfo = typeMapper.readType(document); if (type != null) { - assertThat(typeInfo, is(notNullValue())); - assertThat(typeInfo.getType(), is(typeCompatibleWith(type))); + assertThat(typeInfo).isNotNull(); + assertThat(typeInfo.getType()).isAssignableFrom(type); } else { - assertThat(typeInfo, is(nullValue())); + assertThat(typeInfo).isNull(); } } - private void writesTypeToField(String field, DBObject dbObject, Class type) { + private void writesTypeToField(String field, Document document, Class type) { - typeMapper.writeType(type, dbObject); + typeMapper.writeType(type, document); if (field == null) { - assertThat(dbObject.keySet().isEmpty(), is(true)); + assertThat(document.keySet().isEmpty()).isTrue(); } else { - assertThat(dbObject.containsField(field), is(true)); - assertThat(dbObject.get(field), is((Object) type.getName())); + assertThat(document.containsKey(field)).isTrue(); + assertThat(document.get(field)).isEqualTo((Object) type.getName()); } } - private void writesTypeToField(DBObject dbObject, Class type, Object value) { + private void writesTypeToField(Document document, Class type, Object value) { - typeMapper.writeType(type, dbObject); + typeMapper.writeType(type, document); if (value == null) { - assertThat(dbObject.keySet().isEmpty(), is(true)); + assertThat(document.keySet().isEmpty()).isTrue(); } else { - assertThat(dbObject.containsField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); - assertThat(dbObject.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(value)); + assertThat(document.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); + assertThat(document.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(value); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java new file mode 100644 index 0000000000..4f46283b74 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java @@ -0,0 +1,129 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.BsonDocument; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +import com.mongodb.BasicDBObject; + +/** + * Unit tests for {@link DocumentAccessor}. + * + * @author Oliver Gierke + */ +public class DocumentAccessorUnitTests { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity projectingTypeEntity = context.getRequiredPersistentEntity(ProjectingType.class); + MongoPersistentProperty fooProperty = projectingTypeEntity.getRequiredPersistentProperty("foo"); + + @Test // DATAMONGO-766 + public void putsNestedFieldCorrectly() { + + Document document = new Document(); + + DocumentAccessor accessor = new DocumentAccessor(document); + accessor.put(fooProperty, "FooBar"); + + Document aDocument = DocumentTestUtils.getAsDocument(document, "a"); + assertThat(aDocument.get("b")).isEqualTo((Object) "FooBar"); + } + + @Test // DATAMONGO-766 + public void getsNestedFieldCorrectly() { + + Document source = new Document("a", new Document("b", "FooBar")); + + DocumentAccessor accessor = new DocumentAccessor(source); + assertThat(accessor.get(fooProperty)).isEqualTo((Object) "FooBar"); + } + + @Test // DATAMONGO-766 + public void returnsNullForNonExistingFieldPath() { + + DocumentAccessor accessor = new DocumentAccessor(new Document()); + assertThat(accessor.get(fooProperty)).isNull(); + } + + @Test // DATAMONGO-766 + public void rejectsNonDocuments() { + assertThatIllegalArgumentException().isThrownBy(() -> new DocumentAccessor(new BsonDocument())); + } + + @Test // DATAMONGO-766 + public void rejectsNullDocument() { + assertThatIllegalArgumentException().isThrownBy(() -> new DocumentAccessor(null)); + } + + @Test // DATAMONGO-1335 + public void writesAllNestingsCorrectly() { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(TypeWithTwoNestings.class); + + Document target = new Document(); + + DocumentAccessor accessor = new DocumentAccessor(target); + accessor.put(entity.getRequiredPersistentProperty("id"), "id"); + accessor.put(entity.getRequiredPersistentProperty("b"), "b"); + accessor.put(entity.getRequiredPersistentProperty("c"), "c"); + + Document nestedA = DocumentTestUtils.getAsDocument(target, "a"); + + assertThat(nestedA).isNotNull(); + assertThat(nestedA.get("b")).isEqualTo((Object) "b"); + assertThat(nestedA.get("c")).isEqualTo((Object) "c"); + } + + @Test // DATAMONGO-1471 + public void exposesAvailabilityOfFields() { + + DocumentAccessor accessor = new DocumentAccessor(new Document("a", new BasicDBObject("c", "d"))); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ProjectingType.class); + + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("foo"))).isFalse(); + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("a"))).isTrue(); + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("name"))).isFalse(); + } + + static class ProjectingType { + + String name; + @Field("a.b") String foo; + NestedType a; + } + + static class NestedType { + String b; + String c; + } + + static class TypeWithTwoNestings { + + String id; + @Field("a.b") String b; + @Field("a.c") String c; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java new file mode 100644 index 0000000000..ce6cfc6517 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java @@ -0,0 +1,210 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.convert.DocumentPointerFactory.LinkageDocument; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; + +/** + * @author Christoph Strobl + */ +public class DocumentPointerFactoryUnitTests { + + @Test // GH-3602 + void errorsOnMongoOperatorUsage() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : { '$eq' : 1 } }"); + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> getPointerValue(source, new Book())) // + .withMessageContaining("$eq"); + } + + @Test // GH-3602 + void computesStaticPointer() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : 1 }"); + + assertThat(getPointerValue(source, new Book())).isEqualTo(new Document("_id", 1)); + } + + @Test // GH-3602 + void computesPointerWithIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : ?#{id} }"); + + assertThat(getPointerValue(source, new Book("book-1", null, null))).isEqualTo(new Document("id", "book-1")); + } + + @Test // GH-3602 + void computesPointerForNonIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ 'title' : ?#{book_title} }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null))) + .isEqualTo(new Document("book_title", "Living With A Seal")); + } + + @Test // GH-3602 + void computesPlaceholderFromNestedPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata.pages' : ?#{p} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("p", 272)); + } + + @Test // GH-3602 + void computesNestedPlaceholderPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata' : { 'pages' : ?#{metadata.pages} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("metadata", new Document("pages", 272))); + } + + Object getPointerValue(LinkageDocument linkageDocument, Object value) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(value.getClass()); + return linkageDocument + .getDocumentPointer(mappingContext, persistentEntity, persistentEntity.getPropertyPathAccessor(value)) + .getPointer(); + } + + static class Book { + + String id; + String title; + List author; + Metadata metadata; + + public Book() {} + + public Book(String id, String title, List author) { + this.id = id; + this.title = title; + this.author = author; + } + + public Book(String id, String title, List author, Metadata metadata) { + this.id = id; + this.title = title; + this.author = author; + this.metadata = metadata; + } + + public String getId() { + return this.id; + } + + public String getTitle() { + return this.title; + } + + public List getAuthor() { + return this.author; + } + + public Metadata getMetadata() { + return this.metadata; + } + + public void setId(String id) { + this.id = id; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(List author) { + this.author = author; + } + + public void setMetadata(Metadata metadata) { + this.metadata = metadata; + } + + public String toString() { + return "DocumentPointerFactoryUnitTests.Book(id=" + this.getId() + ", title=" + this.getTitle() + ", author=" + + this.getAuthor() + ", metadata=" + this.getMetadata() + ")"; + } + } + + static class Metadata { + + int pages; + + public Metadata(int pages) { + this.pages = pages; + } + + public int getPages() { + return pages; + } + + public void setPages(int pages) { + this.pages = pages; + } + } + + static class Author { + + String id; + String firstname; + String lastname; + + public Author() {} + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public String toString() { + return "DocumentPointerFactoryUnitTests.Author(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java index 3ff7f3e514..7fb664b00c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,166 +15,163 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; -import org.springframework.data.mongodb.core.convert.GeoConverters.BoxToDbObjectConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.CircleToDbObjectConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToBoxConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToCircleConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToPointConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToPolygonConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToSphereConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.GeoCommandToDbObjectConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.PointToDbObjectConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.PolygonToDbObjectConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.SphereToDbObjectConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.*; import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.query.GeoCommand; -import com.mongodb.DBObject; - /** * Unit tests for {@link GeoConverters}. - * + * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl * @since 1.5 */ public class GeoConvertersUnitTests { - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsBoxToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + public void convertsBoxToDocumentAndBackCorrectly() { Box box = new Box(new Point(1, 2), new Point(3, 4)); - DBObject dbo = BoxToDbObjectConverter.INSTANCE.convert(box); - Box result = DbObjectToBoxConverter.INSTANCE.convert(dbo); + Document document = BoxToDocumentConverter.INSTANCE.convert(box); + Box result = DocumentToBoxConverter.INSTANCE.convert(document); - assertThat(result, is(box)); - assertThat(result.getClass().equals(Box.class), is(true)); + assertThat(result).isEqualTo(box); + assertThat(result.getClass().equals(Box.class)).isTrue(); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsCircleToDbObjectAndBackCorrectlyNeutralDistance() { + @Test // DATAMONGO-858 + public void convertsCircleToDocumentAndBackCorrectlyNeutralDistance() { Circle circle = new Circle(new Point(1, 2), 3); - DBObject dbo = CircleToDbObjectConverter.INSTANCE.convert(circle); - Circle result = DbObjectToCircleConverter.INSTANCE.convert(dbo); + Document document = CircleToDocumentConverter.INSTANCE.convert(circle); + Circle result = DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(result, is(circle)); + assertThat(result).isEqualTo(circle); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsCircleToDbObjectAndBackCorrectlyMilesDistance() { + @Test // DATAMONGO-858 + public void convertsCircleToDocumentAndBackCorrectlyMilesDistance() { Distance radius = new Distance(3, Metrics.MILES); Circle circle = new Circle(new Point(1, 2), radius); - DBObject dbo = CircleToDbObjectConverter.INSTANCE.convert(circle); - Circle result = DbObjectToCircleConverter.INSTANCE.convert(dbo); + Document document = CircleToDocumentConverter.INSTANCE.convert(circle); + Circle result = DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(result, is(circle)); - assertThat(result.getRadius(), is(radius)); + assertThat(result).isEqualTo(circle); + assertThat(result.getRadius()).isEqualTo(radius); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsPolygonToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + public void convertsPolygonToDocumentAndBackCorrectly() { Polygon polygon = new Polygon(new Point(1, 2), new Point(2, 3), new Point(3, 4), new Point(5, 6)); - DBObject dbo = PolygonToDbObjectConverter.INSTANCE.convert(polygon); - Polygon result = DbObjectToPolygonConverter.INSTANCE.convert(dbo); + Document document = PolygonToDocumentConverter.INSTANCE.convert(polygon); + Polygon result = DocumentToPolygonConverter.INSTANCE.convert(document); - assertThat(result, is(polygon)); - assertThat(result.getClass().equals(Polygon.class), is(true)); + assertThat(result).isEqualTo(polygon); + assertThat(result.getClass().equals(Polygon.class)).isTrue(); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsSphereToDbObjectAndBackCorrectlyWithNeutralDistance() { + @Test // DATAMONGO-858 + public void convertsSphereToDocumentAndBackCorrectlyWithNeutralDistance() { Sphere sphere = new Sphere(new Point(1, 2), 3); - DBObject dbo = SphereToDbObjectConverter.INSTANCE.convert(sphere); - Sphere result = DbObjectToSphereConverter.INSTANCE.convert(dbo); + Document document = SphereToDocumentConverter.INSTANCE.convert(sphere); + Sphere result = DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(result, is(sphere)); - assertThat(result.getClass().equals(Sphere.class), is(true)); + assertThat(result).isEqualTo(sphere); + assertThat(result.getClass().equals(Sphere.class)).isTrue(); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsSphereToDbObjectAndBackCorrectlyWithKilometerDistance() { + @Test // DATAMONGO-858 + public void convertsSphereToDocumentAndBackCorrectlyWithKilometerDistance() { Distance radius = new Distance(3, Metrics.KILOMETERS); Sphere sphere = new Sphere(new Point(1, 2), radius); - DBObject dbo = SphereToDbObjectConverter.INSTANCE.convert(sphere); - Sphere result = DbObjectToSphereConverter.INSTANCE.convert(dbo); + Document document = SphereToDocumentConverter.INSTANCE.convert(sphere); + Sphere result = DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(result, is(sphere)); - assertThat(result.getRadius(), is(radius)); - assertThat(result.getClass().equals(org.springframework.data.mongodb.core.geo.Sphere.class), is(true)); + assertThat(result).isEqualTo(sphere); + assertThat(result.getRadius()).isEqualTo(radius); + assertThat(result.getClass().equals(Sphere.class)).isTrue(); } - /** - * @see DATAMONGO-858 - */ - @Test + @Test // DATAMONGO-858 public void convertsPointToListAndBackCorrectly() { Point point = new Point(1, 2); - DBObject dbo = PointToDbObjectConverter.INSTANCE.convert(point); - Point result = DbObjectToPointConverter.INSTANCE.convert(dbo); + Document document = PointToDocumentConverter.INSTANCE.convert(point); + Point result = DocumentToPointConverter.INSTANCE.convert(document); - assertThat(result, is(point)); - assertThat(result.getClass().equals(Point.class), is(true)); + assertThat(result).isEqualTo(point); + assertThat(result.getClass().equals(Point.class)).isTrue(); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsGeoCommandToDbObjectCorrectly() { + @Test // DATAMONGO-858 + public void convertsGeoCommandToDocumentCorrectly() { Box box = new Box(new double[] { 1, 2 }, new double[] { 3, 4 }); GeoCommand cmd = new GeoCommand(box); - DBObject dbo = GeoCommandToDbObjectConverter.INSTANCE.convert(cmd); + Document document = GeoCommandToDocumentConverter.INSTANCE.convert(cmd); + + assertThat(document).isNotNull(); - assertThat(dbo, is(notNullValue())); + List boxObject = (List) document.get("$box"); - DBObject boxObject = (DBObject) dbo.get("$box"); + assertThat(boxObject) + .isEqualTo((Object) Arrays.asList(GeoConverters.toList(box.getFirst()), GeoConverters.toList(box.getSecond()))); + } + + @Test // DATAMONGO-1607 + public void convertsPointCorrectlyWhenUsingNonDoubleForCoordinates() { - assertThat(boxObject, - is((Object) Arrays.asList(GeoConverters.toList(box.getFirst()), GeoConverters.toList(box.getSecond())))); + assertThat(DocumentToPointConverter.INSTANCE.convert(new Document().append("x", 1L).append("y", 2L))) + .isEqualTo(new Point(1, 2)); } + + @Test // DATAMONGO-1607 + public void convertsCircleCorrectlyWhenUsingNonDoubleForCoordinates() { + + Document circle = new Document(); + circle.put("center", new Document().append("x", 1).append("y", 2)); + circle.put("radius", 3L); + + assertThat(DocumentToCircleConverter.INSTANCE.convert(circle)) + .isEqualTo(new Circle(new Point(1, 2), new Distance(3))); + } + + @Test // DATAMONGO-1607 + public void convertsSphereCorrectlyWhenUsingNonDoubleForCoordinates() { + + Document sphere = new Document(); + sphere.put("center", new Document().append("x", 1).append("y", 2)); + sphere.put("radius", 3L); + + assertThat(DocumentToSphereConverter.INSTANCE.convert(sphere)) + .isEqualTo(new Sphere(new Point(1, 2), new Distance(3))); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java index 935d135e83..36d69d5c71 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,24 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.IsEqual.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonLineStringConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonMultiLineStringConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonMultiPointConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonMultiPolygonConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonPointConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DbObjectToGeoJsonPolygonConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.GeoJsonToDbObjectConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonLineStringConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonMultiLineStringConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonMultiPointConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonMultiPolygonConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonPointConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonPolygonConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.GeoJsonToDocumentConverter; import org.springframework.data.mongodb.core.geo.GeoJson; import org.springframework.data.mongodb.core.geo.GeoJsonGeometryCollection; import org.springframework.data.mongodb.core.geo.GeoJsonLineString; @@ -46,21 +44,18 @@ import org.springframework.data.mongodb.test.util.BasicDbListBuilder; import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; /** * @author Christoph Strobl */ @RunWith(Suite.class) -@SuiteClasses({ GeoJsonConverterUnitTests.GeoJsonToDbObjectConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonPointConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonPolygonConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonLineStringConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonMultiPolygonConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonMultiLineStringConverterUnitTests.class, - GeoJsonConverterUnitTests.DbObjectToGeoJsonMultiPointConverterUnitTests.class }) +@SuiteClasses({ GeoJsonConverterUnitTests.GeoJsonToDocumentConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonPointConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonPolygonConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonLineStringConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonMultiPolygonConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonMultiLineStringConverterUnitTests.class, + GeoJsonConverterUnitTests.DocumentToGeoJsonMultiPointConverterUnitTests.class }) public class GeoJsonConverterUnitTests { /* @@ -89,7 +84,7 @@ public class GeoJsonConverterUnitTests { static final GeoJsonGeometryCollection GEOMETRY_COLLECTION = new GeoJsonGeometryCollection( Arrays.> asList(SINGLE_POINT, POLYGON)); /* - * -- GeoJson DBObjects + * -- GeoJson Documents */ // Point @@ -97,10 +92,9 @@ public class GeoJsonConverterUnitTests { .add(SINGLE_POINT.getX()) // .add(SINGLE_POINT.getY()) // .get(); // - static final DBObject SINGLE_POINT_DBO = new BasicDBObjectBuilder() // - .add("type", "Point") // - .add("coordinates", SINGE_POINT_CORDS)// - .get(); + static final Document SINGLE_POINT_DOC = new Document() // + .append("type", "Point") // + .append("coordinates", SINGE_POINT_CORDS);// // MultiPoint static final BasicDBList MULTI_POINT_CORDS = new BasicDbListBuilder() // @@ -108,10 +102,9 @@ public class GeoJsonConverterUnitTests { .add(new BasicDbListBuilder().add(POINT_2.getX()).add(POINT_2.getY()).get()) // .add(new BasicDbListBuilder().add(POINT_3.getX()).add(POINT_3.getY()).get()) // .get(); - static final DBObject MULTI_POINT_DBO = new BasicDBObjectBuilder() // - .add("type", "MultiPoint")// - .add("coordinates", MULTI_POINT_CORDS)// - .get(); + static final Document MULTI_POINT_DOC = new Document() // + .append("type", "MultiPoint")// + .append("coordinates", MULTI_POINT_CORDS);// // Polygon static final BasicDBList POLYGON_OUTER_CORDS = new BasicDbListBuilder() // @@ -131,17 +124,15 @@ public class GeoJsonConverterUnitTests { .get(); static final BasicDBList POLYGON_CORDS = new BasicDbListBuilder().add(POLYGON_OUTER_CORDS).get(); - static final DBObject POLYGON_DBO = new BasicDBObjectBuilder() // - .add("type", "Polygon") // - .add("coordinates", POLYGON_CORDS) // - .get(); + static final Document POLYGON_DOC = new Document() // + .append("type", "Polygon") // + .append("coordinates", POLYGON_CORDS); // static final BasicDBList POLYGON_WITH_2_RINGS_CORDS = new BasicDbListBuilder().add(POLYGON_OUTER_CORDS) .add(POLYGON_INNER_CORDS).get(); - static final DBObject POLYGON_WITH_2_RINGS_DBO = new BasicDBObjectBuilder() // - .add("type", "Polygon") // - .add("coordinates", POLYGON_WITH_2_RINGS_CORDS) // - .get(); + static final Document POLYGON_WITH_2_RINGS_DOC = new Document() // + .append("type", "Polygon") // + .append("coordinates", POLYGON_WITH_2_RINGS_CORDS); // LineString static final BasicDBList LINE_STRING_CORDS_0 = new BasicDbListBuilder() // @@ -153,72 +144,55 @@ public class GeoJsonConverterUnitTests { .add(new BasicDbListBuilder().add(POINT_3.getX()).add(POINT_3.getY()).get()) // .add(new BasicDbListBuilder().add(POINT_0.getX()).add(POINT_0.getY()).get()) // .get(); - static final DBObject LINE_STRING_DBO = new BasicDBObjectBuilder().add("type", "LineString") - .add("coordinates", LINE_STRING_CORDS_0).get(); + static final Document LINE_STRING_DOC = new Document().append("type", "LineString").append("coordinates", + LINE_STRING_CORDS_0); // MultiLineString static final BasicDBList MUILT_LINE_STRING_CORDS = new BasicDbListBuilder() // .add(LINE_STRING_CORDS_0) // .add(LINE_STRING_CORDS_1) // .get(); - static final DBObject MULTI_LINE_STRING_DBO = new BasicDBObjectBuilder().add("type", "MultiLineString") - .add("coordinates", MUILT_LINE_STRING_CORDS).get(); + static final Document MULTI_LINE_STRING_DOC = new Document().append("type", "MultiLineString").append("coordinates", + MUILT_LINE_STRING_CORDS); // MultiPolygoin static final BasicDBList MULTI_POLYGON_CORDS = new BasicDbListBuilder().add(POLYGON_CORDS).get(); - static final DBObject MULTI_POLYGON_DBO = new BasicDBObjectBuilder().add("type", "MultiPolygon") - .add("coordinates", MULTI_POLYGON_CORDS).get(); + static final Document MULTI_POLYGON_DOC = new Document().append("type", "MultiPolygon").append("coordinates", + MULTI_POLYGON_CORDS); // GeometryCollection static final BasicDBList GEOMETRY_COLLECTION_GEOMETRIES = new BasicDbListBuilder() // - .add(SINGLE_POINT_DBO)// - .add(POLYGON_DBO)// + .add(SINGLE_POINT_DOC)// + .add(POLYGON_DOC)// .get(); - static final DBObject GEOMETRY_COLLECTION_DBO = new BasicDBObjectBuilder().add("type", "GeometryCollection") - .add("geometries", GEOMETRY_COLLECTION_GEOMETRIES).get(); + static final Document GEOMETRY_COLLECTION_DOC = new Document().append("type", "GeometryCollection") + .append("geometries", GEOMETRY_COLLECTION_GEOMETRIES); /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonPolygonConverterUnitTests { + public static class DocumentToGeoJsonPolygonConverterUnitTests { - DbObjectToGeoJsonPolygonConverter converter = DbObjectToGeoJsonPolygonConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonPolygonConverter converter = DocumentToGeoJsonPolygonConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(POLYGON_DBO), equalTo(POLYGON)); + assertThat(converter.convert(POLYGON_DOC)).isEqualTo(POLYGON); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPolygon() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to Polygon"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } - /** - * @see DATAMONGO-1399 - */ - @Test + @Test // DATAMONGO-1399 public void shouldConvertDboWithMultipleRingsCorrectly() { - assertThat(converter.convert(POLYGON_WITH_2_RINGS_DBO), equalTo(POLYGON_WITH_2_RINGS)); + assertThat(converter.convert(POLYGON_WITH_2_RINGS_DOC)).isEqualTo(POLYGON_WITH_2_RINGS); } } @@ -226,264 +200,169 @@ public void shouldConvertDboWithMultipleRingsCorrectly() { /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonPointConverterUnitTests { + public static class DocumentToGeoJsonPointConverterUnitTests { - DbObjectToGeoJsonPointConverter converter = DbObjectToGeoJsonPointConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonPointConverter converter = DocumentToGeoJsonPointConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(SINGLE_POINT_DBO), equalTo(SINGLE_POINT)); + assertThat(converter.convert(SINGLE_POINT_DOC)).isEqualTo(SINGLE_POINT); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to Point"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonLineStringConverterUnitTests { + public static class DocumentToGeoJsonLineStringConverterUnitTests { - DbObjectToGeoJsonLineStringConverter converter = DbObjectToGeoJsonLineStringConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonLineStringConverter converter = DocumentToGeoJsonLineStringConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(LINE_STRING_DBO), equalTo(LINE_STRING)); + assertThat(converter.convert(LINE_STRING_DOC)).isEqualTo(LINE_STRING); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to LineString"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonMultiLineStringConverterUnitTests { + public static class DocumentToGeoJsonMultiLineStringConverterUnitTests { - DbObjectToGeoJsonMultiLineStringConverter converter = DbObjectToGeoJsonMultiLineStringConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonMultiLineStringConverter converter = DocumentToGeoJsonMultiLineStringConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_LINE_STRING_DBO), equalTo(MULTI_LINE_STRING)); + assertThat(converter.convert(MULTI_LINE_STRING_DOC)).isEqualTo(MULTI_LINE_STRING); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiLineString"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonMultiPointConverterUnitTests { + public static class DocumentToGeoJsonMultiPointConverterUnitTests { - DbObjectToGeoJsonMultiPointConverter converter = DbObjectToGeoJsonMultiPointConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonMultiPointConverter converter = DocumentToGeoJsonMultiPointConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_POINT_DBO), equalTo(MULTI_POINT)); + assertThat(converter.convert(MULTI_POINT_DOC)).isEqualTo(MULTI_POINT); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiPoint"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } /** * @author Christoph Strobl */ - public static class DbObjectToGeoJsonMultiPolygonConverterUnitTests { + public static class DocumentToGeoJsonMultiPolygonConverterUnitTests { - DbObjectToGeoJsonMultiPolygonConverter converter = DbObjectToGeoJsonMultiPolygonConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); + DocumentToGeoJsonMultiPolygonConverter converter = DocumentToGeoJsonMultiPolygonConverter.INSTANCE; - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_POLYGON_DBO), equalTo(MULTI_POLYGON)); + assertThat(converter.convert(MULTI_POLYGON_DOC)).isEqualTo(MULTI_POLYGON); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiPolygon"); - - converter.convert(new BasicDBObject("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } /** * @author Christoph Strobl */ - public static class GeoJsonToDbObjectConverterUnitTests { + public static class GeoJsonToDocumentConverterUnitTests { - GeoJsonToDbObjectConverter converter = GeoJsonToDbObjectConverter.INSTANCE; + GeoJsonToDocumentConverter converter = GeoJsonToDocumentConverter.INSTANCE; - /** - * @see DATAMONGO-1135 - */ + // DATAMONGO-1135 public void convertShouldReturnNullWhenGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void shouldConvertGeoJsonPointCorrectly() { - assertThat(converter.convert(SINGLE_POINT), equalTo(SINGLE_POINT_DBO)); + assertThat(converter.convert(SINGLE_POINT)).isEqualTo(SINGLE_POINT_DOC); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void shouldConvertGeoJsonPolygonCorrectly() { - assertThat(converter.convert(POLYGON), equalTo(POLYGON_DBO)); + assertThat(converter.convert(POLYGON)).isEqualTo(POLYGON_DOC); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertGeoJsonLineStringCorrectly() { - assertThat(converter.convert(LINE_STRING), equalTo(LINE_STRING_DBO)); + assertThat(converter.convert(LINE_STRING)).isEqualTo(LINE_STRING_DOC); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiLineStringCorrectly() { - assertThat(converter.convert(MULTI_LINE_STRING), equalTo(MULTI_LINE_STRING_DBO)); + assertThat(converter.convert(MULTI_LINE_STRING)).isEqualTo(MULTI_LINE_STRING_DOC); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiPointCorrectly() { - assertThat(converter.convert(MULTI_POINT), equalTo(MULTI_POINT_DBO)); + assertThat(converter.convert(MULTI_POINT)).isEqualTo(MULTI_POINT_DOC); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiPolygonCorrectly() { - assertThat(converter.convert(MULTI_POLYGON), equalTo(MULTI_POLYGON_DBO)); + assertThat(converter.convert(MULTI_POLYGON)).isEqualTo(MULTI_POLYGON_DOC); } - /** - * @see DATAMONGO-1137 - */ - @Test + @Test // DATAMONGO-1137 public void shouldConvertGeometryCollectionCorrectly() { - assertThat(converter.convert(GEOMETRY_COLLECTION), equalTo(GEOMETRY_COLLECTION_DBO)); + assertThat(converter.convert(GEOMETRY_COLLECTION)).isEqualTo(GEOMETRY_COLLECTION_DOC); } - /** - * @see DATAMONGO-1399 - */ - @Test + @Test // DATAMONGO-1399 public void shouldConvertGeoJsonPolygonWithMultipleRingsCorrectly() { - assertThat(converter.convert(POLYGON_WITH_2_RINGS), equalTo(POLYGON_WITH_2_RINGS_DBO)); + assertThat(converter.convert(POLYGON_WITH_2_RINGS)).isEqualTo(POLYGON_WITH_2_RINGS_DOC); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java new file mode 100644 index 0000000000..43ea9f3a64 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java @@ -0,0 +1,65 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.LazyLoadingException; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +import com.mongodb.DBRef; + +/** + * Unit tests for {@link LazyLoadingInterceptor}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class LazyLoadingInterceptorUnitTests { + + @Mock MongoPersistentProperty propertyMock; + @Mock DBRef dbrefMock; + @Mock DbRefResolverCallback callbackMock; + + @Test // DATAMONGO-1437 + void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable { + + NullPointerException npe = new NullPointerException("Some Exception we did not think about."); + when(callbackMock.resolve(propertyMock)).thenThrow(npe); + + assertThatExceptionOfType(LazyLoadingException.class).isThrownBy(() -> { + new LazyLoadingInterceptor(propertyMock, callbackMock, dbrefMock, new NullExceptionTranslator()).intercept(null, + LazyLoadingProxy.class.getMethod("getTarget"), null, null); + }).withCause(npe); + } + + static class NullExceptionTranslator implements PersistenceExceptionTranslator { + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return null; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java index d6de71ff13..54f82f6921 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,19 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.*; + +import java.util.function.Consumer; import org.springframework.aop.framework.Advised; import org.springframework.cglib.proxy.Factory; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.test.util.ReflectionTestUtils; /** * Utility class to test proxy handling for lazy loading. - * + * * @author Oliver Gierke */ public class LazyLoadingTestUtils { @@ -33,19 +35,53 @@ public class LazyLoadingTestUtils { /** * Asserts that the given repository is resolved (expected is {@literal true}) and the value is non-{@literal null} or * unresolved (expected is {@literal false}) and the value is {@literal null}. - * + * * @param target * @param expected */ public static void assertProxyIsResolved(Object target, boolean expected) { LazyLoadingInterceptor interceptor = extractInterceptor(target); - assertThat(ReflectionTestUtils.getField(interceptor, "resolved"), is((Object) expected)); - assertThat(ReflectionTestUtils.getField(interceptor, "result"), is(expected ? notNullValue() : nullValue())); + assertThat(ReflectionTestUtils.getField(interceptor, "resolved")).isEqualTo((Object) expected); + + if (expected) { + assertThat(ReflectionTestUtils.getField(interceptor, "result")).isNotNull(); + } else { + assertThat(ReflectionTestUtils.getField(interceptor, "result")).isNull(); + + } + } + + public static void assertProxy(Object proxy, Consumer verification) { + + LazyLoadingInterceptor interceptor = (LazyLoadingInterceptor) (proxy instanceof Advised + ? ((Advised) proxy).getAdvisors()[0].getAdvice() + : ((Factory) proxy).getCallback(0)); + + verification.accept(new LazyLoadingProxyValueRetriever(interceptor)); } private static LazyLoadingInterceptor extractInterceptor(Object proxy) { return (LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() : ((Factory) proxy).getCallback(0)); } + + public static class LazyLoadingProxyValueRetriever { + + LazyLoadingInterceptor interceptor; + + public LazyLoadingProxyValueRetriever(LazyLoadingInterceptor interceptor) { + this.interceptor = interceptor; + } + + public boolean isResolved() { + return (boolean) ReflectionTestUtils.getField(interceptor, "resolved"); + } + + @Unwrapped.Nullable + public Object currentValue() { + return ReflectionTestUtils.getField(interceptor, "result"); + } + + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java new file mode 100644 index 0000000000..1ce58eeb47 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java @@ -0,0 +1,483 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Integration tests for {@link MappingMongoConverter}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MappingMongoConverterTests { + + private static final String DATABASE = "mapping-converter-tests"; + + private static @Client MongoClient client; + + private MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, DATABASE); + + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private DbRefResolver dbRefResolver; + + @BeforeEach + void setUp() { + + MongoDatabase database = client.getDatabase(DATABASE); + + database.getCollection("samples").deleteMany(new Document()); + database.getCollection("java-time-types").deleteMany(new Document()); + + dbRefResolver = spy(new DefaultDbRefResolver(factory)); + + mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + mappingContext.setInitialEntitySet(Set.of(WithLazyDBRefAsConstructorArg.class, WithLazyDBRef.class, WithJavaTimeTypes.class)); + mappingContext.setAutoIndexCreation(false); + mappingContext.afterPropertiesSet(); + + converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.afterPropertiesSet(); + } + + @Test // DATAMONGO-2004 + void resolvesLazyDBRefOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("lazyList", + Arrays.asList(new com.mongodb.DBRef("samples", "sample-1"), new com.mongodb.DBRef("samples", "sample-2"))); + + WithLazyDBRef target = converter.read(WithLazyDBRef.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedReferences() { + + Document sampleSource = new Document("_id", "sample-1").append("value", "one"); + Document source = new Document("_id", "id-1").append("sample", sampleSource); + + WithSingleValueDbRef read = converter.read(WithSingleValueDbRef.class, source); + + assertThat(read.sample).isEqualTo(converter.read(Sample.class, sampleSource)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedListOfReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("lazyList", List.of(sample1Source, sample2Source)); + + WithLazyDBRef read = converter.read(WithLazyDBRef.class, source); + + assertThat(read.lazyList).containsExactly(converter.read(Sample.class, sample1Source), + converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedMapOfReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("sampleMap", + new Document("s1", sample1Source).append("s2", sample2Source)); + + WithMapValueDbRef read = converter.read(WithMapValueDbRef.class, source); + + assertThat(read.sampleMap) // + .containsEntry("s1", converter.read(Sample.class, sample1Source)) // + .containsEntry("s2", converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedMapOfLazyReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("sampleMapLazy", + new Document("s1", sample1Source).append("s2", sample2Source)); + + WithMapValueDbRef read = converter.read(WithMapValueDbRef.class, source); + + assertThat(read.sampleMapLazy) // + .containsEntry("s1", converter.read(Sample.class, sample1Source)) // + .containsEntry("s2", converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void resolvesLazyDBRefMapOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("sampleMapLazy", + new Document("s1", new com.mongodb.DBRef("samples", "sample-1")).append("s2", + new com.mongodb.DBRef("samples", "sample-2"))); + + WithMapValueDbRef target = converter.read(WithMapValueDbRef.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.sampleMapLazy).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getSampleMapLazy()).containsEntry("s1", new Sample("sample-1", "one")).containsEntry("s2", + new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedLazyReferences() { + + Document sampleSource = new Document("_id", "sample-1").append("value", "one"); + Document source = new Document("_id", "id-1").append("sampleLazy", sampleSource); + + WithSingleValueDbRef read = converter.read(WithSingleValueDbRef.class, source); + + assertThat(read.sampleLazy).isEqualTo(converter.read(Sample.class, sampleSource)); + verifyNoInteractions(dbRefResolver); + } + + @Test // DATAMONGO-2004 + void resolvesLazyDBRefConstructorArgOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("lazyList", + Arrays.asList(new com.mongodb.DBRef("samples", "sample-1"), new com.mongodb.DBRef("samples", "sample-2"))); + + WithLazyDBRefAsConstructorArg target = converter.read(WithLazyDBRefAsConstructorArg.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // DATAMONGO-2400 + void readJavaTimeValuesWrittenViaCodec() { + + configureConverterWithNativeJavaTimeCodec(); + MongoCollection mongoCollection = client.getDatabase(DATABASE).getCollection("java-time-types"); + + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + WithJavaTimeTypes source = WithJavaTimeTypes.withJavaTimeTypes(now); + source.id = "id-1"; + + mongoCollection.insertOne(source.toDocument()); + + assertThat(converter.read(WithJavaTimeTypes.class, mongoCollection.find(new Document("_id", source.id)).first())) + .isEqualTo(source); + } + + void configureConverterWithNativeJavaTimeCodec() { + + converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions( + MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + converter.afterPropertiesSet(); + } + + public static class WithLazyDBRef { + + @Id String id; + @DBRef(lazy = true) List lazyList; + + List getLazyList() { + return lazyList; + } + } + + public static class WithSingleValueDbRef { + + @Id // + String id; + + @DBRef // + Sample sample; + + @DBRef(lazy = true) // + Sample sampleLazy; + + public String getId() { + return this.id; + } + + public Sample getSample() { + return this.sample; + } + + public Sample getSampleLazy() { + return this.sampleLazy; + } + + public void setId(String id) { + this.id = id; + } + + public void setSample(Sample sample) { + this.sample = sample; + } + + public void setSampleLazy(Sample sampleLazy) { + this.sampleLazy = sampleLazy; + } + + public String toString() { + return "MappingMongoConverterTests.WithSingleValueDbRef(id=" + this.getId() + ", sample=" + this.getSample() + + ", sampleLazy=" + this.getSampleLazy() + ")"; + } + } + + public static class WithMapValueDbRef { + + @Id String id; + + @DBRef // + Map sampleMap; + + @DBRef(lazy = true) // + Map sampleMapLazy; + + public String getId() { + return this.id; + } + + public Map getSampleMap() { + return this.sampleMap; + } + + public Map getSampleMapLazy() { + return this.sampleMapLazy; + } + + public void setId(String id) { + this.id = id; + } + + public void setSampleMap(Map sampleMap) { + this.sampleMap = sampleMap; + } + + public void setSampleMapLazy(Map sampleMapLazy) { + this.sampleMapLazy = sampleMapLazy; + } + + public String toString() { + return "MappingMongoConverterTests.WithMapValueDbRef(id=" + this.getId() + ", sampleMap=" + this.getSampleMap() + + ", sampleMapLazy=" + this.getSampleMapLazy() + ")"; + } + } + + public static class WithLazyDBRefAsConstructorArg { + + @Id String id; + @DBRef(lazy = true) List lazyList; + + public WithLazyDBRefAsConstructorArg(String id, List lazyList) { + + this.id = id; + this.lazyList = lazyList; + } + + List getLazyList() { + return lazyList; + } + } + + static class Sample { + + @Id String id; + String value; + + public Sample(String id, String value) { + + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(value, sample.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MappingMongoConverterTests.Sample(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithJavaTimeTypes { + + @Id String id; + LocalDate localDate; + LocalTime localTime; + LocalDateTime localDateTime; + + public WithJavaTimeTypes() {} + + static WithJavaTimeTypes withJavaTimeTypes(Instant instant) { + + WithJavaTimeTypes instance = new WithJavaTimeTypes(); + + instance.localDate = LocalDate.from(instant.atZone(ZoneId.of("CET"))); + instance.localTime = LocalTime.from(instant.atZone(ZoneId.of("CET"))); + instance.localDateTime = LocalDateTime.from(instant.atZone(ZoneId.of("CET"))); + + return instance; + } + + Document toDocument() { + return new Document("_id", id).append("localDate", localDate).append("localTime", localTime) + .append("localDateTime", localDateTime); + } + + public String getId() { + return this.id; + } + + public LocalDate getLocalDate() { + return this.localDate; + } + + public LocalTime getLocalTime() { + return this.localTime; + } + + public LocalDateTime getLocalDateTime() { + return this.localDateTime; + } + + public void setId(String id) { + this.id = id; + } + + public void setLocalDate(LocalDate localDate) { + this.localDate = localDate; + } + + public void setLocalTime(LocalTime localTime) { + this.localTime = localTime; + } + + public void setLocalDateTime(LocalDateTime localDateTime) { + this.localDateTime = localDateTime; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithJavaTimeTypes that = (WithJavaTimeTypes) o; + return Objects.equals(id, that.id) && Objects.equals(localDate, that.localDate) + && Objects.equals(localTime, that.localTime) && Objects.equals(localDateTime, that.localDateTime); + } + + @Override + public int hashCode() { + return Objects.hash(id, localDate, localTime, localDateTime); + } + + public String toString() { + return "MappingMongoConverterTests.WithJavaTimeTypes(id=" + this.getId() + ", localDate=" + this.getLocalDate() + + ", localTime=" + this.getLocalTime() + ", localDateTime=" + this.getLocalDateTime() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 4430a07262..cf6d69c6c3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,55 +16,64 @@ package org.springframework.data.mongodb.core.convert; import static java.time.ZoneId.*; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URL; +import java.nio.ByteBuffer; +import java.time.LocalDate; import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.EnumMap; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; - +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.assertj.core.api.Assertions; +import org.assertj.core.data.Percentage; +import org.bson.BsonDouble; +import org.bson.BsonUndefined; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; -import org.joda.time.LocalDate; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.ConversionNotSupportedException; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.context.ApplicationContext; +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.convert.ConverterNotFoundException; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.annotation.Transient; import org.springframework.data.annotation.TypeAlias; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.PropertyValueConverterFactory; import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.ValueConverter; import org.springframework.data.convert.WritingConverter; +import org.springframework.data.domain.Vector; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; @@ -72,277 +81,235 @@ import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.model.MappingInstantiationException; -import org.springframework.data.mongodb.core.DBObjectTestUtils; -import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.NestedType; -import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.ProjectingType; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.convert.DocumentAccessorUnitTests.NestedType; +import org.springframework.data.mongodb.core.convert.DocumentAccessorUnitTests.ProjectingType; import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.ClassWithMapUsingEnumAsKey.FooBarEnum; import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoField; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.PersonPojoStringId; import org.springframework.data.mongodb.core.mapping.TextScore; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DBObject; import com.mongodb.DBRef; -import com.mongodb.util.JSON; /** * Unit tests for {@link MappingMongoConverter}. - * + * * @author Oliver Gierke * @author Patrik Wasik * @author Christoph Strobl + * @author Mark Paluch + * @author Roman Puchkovskiy + * @author Heesu Jung + * @author Julia Lee */ -@RunWith(MockitoJUnitRunner.class) -public class MappingMongoConverterUnitTests { +@ExtendWith(MockitoExtension.class) +class MappingMongoConverterUnitTests { - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; @Mock ApplicationContext context; @Mock DbRefResolver resolver; - public @Rule ExpectedException exception = ExpectedException.none(); + @BeforeEach + void beforeEach() { - @Before - public void setUp() { + MongoCustomConversions conversions = new MongoCustomConversions( + Arrays.asList(new ByteBufferToDoubleHolderConverter())); mappingContext = new MongoMappingContext(); mappingContext.setApplicationContext(context); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); mappingContext.afterPropertiesSet(); + mappingContext.getPersistentEntity(Address.class); + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); converter.afterPropertiesSet(); } @Test - public void convertsAddressCorrectly() { + void convertsAddressCorrectly() { Address address = new Address(); address.city = "New York"; address.street = "Broadway"; - DBObject dbObject = new BasicDBObject(); - - converter.write(address, dbObject); - - assertThat(dbObject.get("city").toString(), is("New York")); - assertThat(dbObject.get("street").toString(), is("Broadway")); - } - - @Test - public void convertsJodaTimeTypesCorrectly() { - - converter = new MappingMongoConverter(resolver, mappingContext); - converter.afterPropertiesSet(); - - Person person = new Person(); - person.birthDate = new LocalDate(); - - DBObject dbObject = new BasicDBObject(); - converter.write(person, dbObject); - - assertThat(dbObject.get("birthDate"), is(instanceOf(Date.class))); + org.bson.Document document = new org.bson.Document(); - Person result = converter.read(Person.class, dbObject); - assertThat(result.birthDate, is(notNullValue())); - } - - @Test - public void convertsCustomTypeOnConvertToMongoType() { - - converter = new MappingMongoConverter(resolver, mappingContext); - converter.afterPropertiesSet(); + converter.write(address, document); - LocalDate date = new LocalDate(); - converter.convertToMongoType(date); + assertThat(document.get("city").toString()).isEqualTo("New York"); + assertThat(document.get("s").toString()).isEqualTo("Broadway"); } - /** - * @see DATAMONGO-130 - */ - @Test - public void writesMapTypeCorrectly() { + @Test // DATAMONGO-130 + void writesMapTypeCorrectly() { Map map = Collections.singletonMap(Locale.US, "Foo"); - BasicDBObject dbObject = new BasicDBObject(); - converter.write(map, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(map, document); - assertThat(dbObject.get(Locale.US.toString()).toString(), is("Foo")); + assertThat(document.get(Locale.US.toString()).toString()).isEqualTo("Foo"); } - /** - * @see DATAMONGO-130 - */ - @Test - public void readsMapWithCustomKeyTypeCorrectly() { + @Test // DATAMONGO-130 + void readsMapWithCustomKeyTypeCorrectly() { - DBObject mapObject = new BasicDBObject(Locale.US.toString(), "Value"); - DBObject dbObject = new BasicDBObject("map", mapObject); + org.bson.Document mapObject = new org.bson.Document(Locale.US.toString(), "Value"); + org.bson.Document document = new org.bson.Document("map", mapObject); - ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, dbObject); - assertThat(result.map.get(Locale.US), is("Value")); + ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, document); + assertThat(result.map.get(Locale.US)).isEqualTo("Value"); } - /** - * @see DATAMONGO-128 - */ - @Test - public void usesDocumentsStoredTypeIfSubtypeOfRequest() { + @Test // DATAMONGO-128 + void usesDocumentsStoredTypeIfSubtypeOfRequest() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("birthDate", new LocalDate()); - dbObject.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); + org.bson.Document document = new org.bson.Document(); + document.put("birthDate", new Date()); + document.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); - assertThat(converter.read(Contact.class, dbObject), is(instanceOf(Person.class))); + assertThat(converter.read(Contact.class, document)).isInstanceOf(Person.class); } - /** - * @see DATAMONGO-128 - */ - @Test - public void ignoresDocumentsStoredTypeIfCompletelyDifferentTypeRequested() { + @Test // DATAMONGO-128 + void ignoresDocumentsStoredTypeIfCompletelyDifferentTypeRequested() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("birthDate", new LocalDate()); - dbObject.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); + org.bson.Document document = new org.bson.Document(); + document.put("birthDate", new Date()); + document.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); - assertThat(converter.read(BirthDateContainer.class, dbObject), is(instanceOf(BirthDateContainer.class))); + assertThat(converter.read(BirthDateContainer.class, document)).isInstanceOf(BirthDateContainer.class); } @Test - public void writesTypeDiscriminatorIntoRootObject() { + void writesTypeDiscriminatorIntoRootObject() { Person person = new Person(); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(person, result); - assertThat(result.containsField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); - assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY).toString(), is(Person.class.getName())); + assertThat(result.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); + assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY).toString()).isEqualTo(Person.class.getName()); } - /** - * @see DATAMONGO-136 - */ - @Test - public void writesEnumsCorrectly() { + @Test // DATAMONGO-136 + void writesEnumsCorrectly() { ClassWithEnumProperty value = new ClassWithEnumProperty(); value.sampleEnum = SampleEnum.FIRST; - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("sampleEnum"), is(instanceOf(String.class))); - assertThat(result.get("sampleEnum").toString(), is("FIRST")); + assertThat(result.get("sampleEnum")).isInstanceOf(String.class); + assertThat(result.get("sampleEnum").toString()).isEqualTo("FIRST"); } - /** - * @see DATAMONGO-209 - */ - @Test - public void writesEnumCollectionCorrectly() { + @Test // DATAMONGO-209 + void writesEnumCollectionCorrectly() { ClassWithEnumProperty value = new ClassWithEnumProperty(); value.enums = Arrays.asList(SampleEnum.FIRST); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("enums"), is(instanceOf(BasicDBList.class))); + assertThat(result.get("enums")).isInstanceOf(List.class); - BasicDBList enums = (BasicDBList) result.get("enums"); - assertThat(enums.size(), is(1)); - assertThat((String) enums.get(0), is("FIRST")); + List enums = (List) result.get("enums"); + assertThat(enums.size()).isEqualTo(1); + assertThat(enums.get(0)).isEqualTo("FIRST"); } - /** - * @see DATAMONGO-136 - */ - @Test - public void readsEnumsCorrectly() { - DBObject dbObject = new BasicDBObject("sampleEnum", "FIRST"); - ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, dbObject); + @Test // DATAMONGO-136 + void readsEnumsCorrectly() { + org.bson.Document document = new org.bson.Document("sampleEnum", "FIRST"); + ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.sampleEnum, is(SampleEnum.FIRST)); + assertThat(result.sampleEnum).isEqualTo(SampleEnum.FIRST); } - /** - * @see DATAMONGO-209 - */ - @Test - public void readsEnumCollectionsCorrectly() { + @Test // DATAMONGO-209 + void readsEnumCollectionsCorrectly() { BasicDBList enums = new BasicDBList(); enums.add("FIRST"); - DBObject dbObject = new BasicDBObject("enums", enums); + org.bson.Document document = new org.bson.Document("enums", enums); - ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, dbObject); + ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.enums, is(instanceOf(List.class))); - assertThat(result.enums.size(), is(1)); - assertThat(result.enums, hasItem(SampleEnum.FIRST)); + assertThat(result.enums).isInstanceOf(List.class); + assertThat(result.enums.size()).isEqualTo(1); + assertThat(result.enums).contains(SampleEnum.FIRST); } - /** - * @see DATAMONGO-144 - */ - @Test - public void considersFieldNameWhenWriting() { + @Test // DATAMONGO-144 + void considersFieldNameWhenWriting() { Person person = new Person(); person.firstname = "Oliver"; - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(person, result); - assertThat(result.containsField("foo"), is(true)); - assertThat(result.containsField("firstname"), is(false)); + assertThat(result.containsKey("foo")).isTrue(); + assertThat(result.containsKey("firstname")).isFalse(); } - /** - * @see DATAMONGO-144 - */ - @Test - public void considersFieldNameWhenReading() { + @Test // DATAMONGO-144 + void considersFieldNameWhenReading() { - DBObject dbObject = new BasicDBObject("foo", "Oliver"); - Person result = converter.read(Person.class, dbObject); + org.bson.Document document = new org.bson.Document("foo", "Oliver"); + Person result = converter.read(Person.class, document); - assertThat(result.firstname, is("Oliver")); + assertThat(result.firstname).isEqualTo("Oliver"); } @Test - public void resolvesNestedComplexTypeForConstructorCorrectly() { + void resolvesNestedComplexTypeForConstructorCorrectly() { - DBObject address = new BasicDBObject("street", "110 Southwark Street"); + org.bson.Document address = new org.bson.Document("street", "110 Southwark Street"); address.put("city", "London"); BasicDBList addresses = new BasicDBList(); addresses.add(address); - DBObject person = new BasicDBObject("firstname", "Oliver"); + org.bson.Document person = new org.bson.Document("firstname", "Oliver"); person.put("addresses", addresses); Person result = converter.read(Person.class, person); - assertThat(result.addresses, is(notNullValue())); + assertThat(result.addresses).isNotNull(); } - /** - * @see DATAMONGO-145 - */ - @Test - public void writesCollectionWithInterfaceCorrectly() { + @Test // DATAMONGO-145 + void writesCollectionWithInterfaceCorrectly() { Person person = new Person(); person.firstname = "Oliver"; @@ -350,58 +317,52 @@ public void writesCollectionWithInterfaceCorrectly() { CollectionWrapper wrapper = new CollectionWrapper(); wrapper.contacts = Arrays.asList((Contact) person); - BasicDBObject dbObject = new BasicDBObject(); - converter.write(wrapper, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(wrapper, document); - Object result = dbObject.get("contacts"); - assertThat(result, is(instanceOf(BasicDBList.class))); - BasicDBList contacts = (BasicDBList) result; - DBObject personDbObject = (DBObject) contacts.get(0); - assertThat(personDbObject.get("foo").toString(), is("Oliver")); - assertThat((String) personDbObject.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(Person.class.getName())); + Object result = document.get("contacts"); + assertThat(result).isInstanceOf(List.class); + List contacts = (List) result; + org.bson.Document personDocument = (org.bson.Document) contacts.get(0); + assertThat(personDocument.get("foo").toString()).isEqualTo("Oliver"); + assertThat((String) personDocument.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(Person.class.getName()); } - /** - * @see DATAMONGO-145 - */ - @Test - public void readsCollectionWithInterfaceCorrectly() { + @Test // DATAMONGO-145 + void readsCollectionWithInterfaceCorrectly() { - BasicDBObject person = new BasicDBObject(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); + org.bson.Document person = new org.bson.Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); person.put("foo", "Oliver"); BasicDBList contacts = new BasicDBList(); contacts.add(person); - CollectionWrapper result = converter.read(CollectionWrapper.class, new BasicDBObject("contacts", contacts)); - assertThat(result.contacts, is(notNullValue())); - assertThat(result.contacts.size(), is(1)); + CollectionWrapper result = converter.read(CollectionWrapper.class, new org.bson.Document("contacts", contacts)); + assertThat(result.contacts).isNotNull(); + assertThat(result.contacts.size()).isEqualTo(1); Contact contact = result.contacts.get(0); - assertThat(contact, is(instanceOf(Person.class))); - assertThat(((Person) contact).firstname, is("Oliver")); + assertThat(contact).isInstanceOf(Person.class); + assertThat(((Person) contact).firstname).isEqualTo("Oliver"); } @Test - public void convertsLocalesOutOfTheBox() { + void convertsLocalesOutOfTheBox() { LocaleWrapper wrapper = new LocaleWrapper(); wrapper.locale = Locale.US; - DBObject dbObject = new BasicDBObject(); - converter.write(wrapper, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(wrapper, document); - Object localeField = dbObject.get("locale"); - assertThat(localeField, is(instanceOf(String.class))); - assertThat((String) localeField, is("en_US")); + Object localeField = document.get("locale"); + assertThat(localeField).isInstanceOf(String.class); + assertThat(localeField).isEqualTo("en_US"); - LocaleWrapper read = converter.read(LocaleWrapper.class, dbObject); - assertThat(read.locale, is(Locale.US)); + LocaleWrapper read = converter.read(LocaleWrapper.class, document); + assertThat(read.locale).isEqualTo(Locale.US); } - /** - * @see DATAMONGO-161 - */ - @Test - public void readsNestedMapsCorrectly() { + @Test // DATAMONGO-161 + void readsNestedMapsCorrectly() { Map secondLevel = new HashMap(); secondLevel.put("key1", "value1"); @@ -415,322 +376,279 @@ public void readsNestedMapsCorrectly() { maps.nestedMaps = new LinkedHashMap>>(); maps.nestedMaps.put("afield", firstLevel); - DBObject dbObject = new BasicDBObject(); - converter.write(maps, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(maps, document); - ClassWithNestedMaps result = converter.read(ClassWithNestedMaps.class, dbObject); + ClassWithNestedMaps result = converter.read(ClassWithNestedMaps.class, document); Map>> nestedMap = result.nestedMaps; - assertThat(nestedMap, is(notNullValue())); - assertThat(nestedMap.get("afield"), is(firstLevel)); + assertThat(nestedMap).isNotNull(); + assertThat(nestedMap.get("afield")).isEqualTo(firstLevel); } - /** - * @see DATACMNS-42, DATAMONGO-171 - */ - @Test - public void writesClassWithBigDecimal() { + @Test // DATACMNS-42, DATAMONGO-171 + void writesClassWithBigDecimal() { BigDecimalContainer container = new BigDecimalContainer(); container.value = BigDecimal.valueOf(2.5d); container.map = Collections.singletonMap("foo", container.value); - DBObject dbObject = new BasicDBObject(); - converter.write(container, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(container, document); - assertThat(dbObject.get("value"), is(instanceOf(String.class))); - assertThat((String) dbObject.get("value"), is("2.5")); - assertThat(((DBObject) dbObject.get("map")).get("foo"), is(instanceOf(String.class))); + assertThat(document.get("value")).isInstanceOf(String.class); + assertThat((String) document.get("value")).isEqualTo("2.5"); + assertThat(((org.bson.Document) document.get("map")).get("foo")).isInstanceOf(String.class); } - /** - * @see DATACMNS-42, DATAMONGO-171 - */ - @Test - public void readsClassWithBigDecimal() { + @Test // DATACMNS-42, DATAMONGO-171 + void readsClassWithBigDecimal() { - DBObject dbObject = new BasicDBObject("value", "2.5"); - dbObject.put("map", new BasicDBObject("foo", "2.5")); + org.bson.Document document = new org.bson.Document("value", "2.5"); + document.put("map", new org.bson.Document("foo", "2.5")); BasicDBList list = new BasicDBList(); list.add("2.5"); - dbObject.put("collection", list); - BigDecimalContainer result = converter.read(BigDecimalContainer.class, dbObject); + document.put("collection", list); + BigDecimalContainer result = converter.read(BigDecimalContainer.class, document); - assertThat(result.value, is(BigDecimal.valueOf(2.5d))); - assertThat(result.map.get("foo"), is(BigDecimal.valueOf(2.5d))); - assertThat(result.collection.get(0), is(BigDecimal.valueOf(2.5d))); + assertThat(result.value).isEqualTo(BigDecimal.valueOf(2.5d)); + assertThat(result.map.get("foo")).isEqualTo(BigDecimal.valueOf(2.5d)); + assertThat(result.collection.get(0)).isEqualTo(BigDecimal.valueOf(2.5d)); } @Test - @SuppressWarnings("unchecked") - public void writesNestedCollectionsCorrectly() { + void writesNestedCollectionsCorrectly() { CollectionWrapper wrapper = new CollectionWrapper(); wrapper.strings = Arrays.asList(Arrays.asList("Foo")); - DBObject dbObject = new BasicDBObject(); - converter.write(wrapper, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(wrapper, document); - Object outerStrings = dbObject.get("strings"); - assertThat(outerStrings, is(instanceOf(BasicDBList.class))); + Object outerStrings = document.get("strings"); + assertThat(outerStrings).isInstanceOf(List.class); - BasicDBList typedOuterString = (BasicDBList) outerStrings; - assertThat(typedOuterString.size(), is(1)); + List typedOuterString = (List) outerStrings; + assertThat(typedOuterString.size()).isEqualTo(1); } - /** - * @see DATAMONGO-192 - */ - @Test - public void readsEmptySetsCorrectly() { + @Test // DATAMONGO-192 + void readsEmptySetsCorrectly() { Person person = new Person(); person.addresses = Collections.emptySet(); - DBObject dbObject = new BasicDBObject(); - converter.write(person, dbObject); - converter.read(Person.class, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(person, document); + converter.read(Person.class, document); } @Test - public void convertsObjectIdStringsToObjectIdCorrectly() { + void convertsObjectIdStringsToObjectIdCorrectly() { PersonPojoStringId p1 = new PersonPojoStringId("1234567890", "Text-1"); - DBObject dbo1 = new BasicDBObject(); + org.bson.Document doc1 = new org.bson.Document(); - converter.write(p1, dbo1); - assertThat(dbo1.get("_id"), is(instanceOf(String.class))); + converter.write(p1, doc1); + assertThat(doc1.get("_id")).isInstanceOf(String.class); PersonPojoStringId p2 = new PersonPojoStringId(new ObjectId().toString(), "Text-1"); - DBObject dbo2 = new BasicDBObject(); + org.bson.Document doc2 = new org.bson.Document(); - converter.write(p2, dbo2); - assertThat(dbo2.get("_id"), is(instanceOf(ObjectId.class))); + converter.write(p2, doc2); + assertThat(doc2.get("_id")).isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-207 - */ - @Test - public void convertsCustomEmptyMapCorrectly() { + @Test // DATAMONGO-207 + void convertsCustomEmptyMapCorrectly() { - DBObject map = new BasicDBObject(); - DBObject wrapper = new BasicDBObject("map", map); + org.bson.Document map = new org.bson.Document(); + org.bson.Document wrapper = new org.bson.Document("map", map); ClassWithSortedMap result = converter.read(ClassWithSortedMap.class, wrapper); - assertThat(result, is(instanceOf(ClassWithSortedMap.class))); - assertThat(result.map, is(instanceOf(SortedMap.class))); + assertThat(result).isInstanceOf(ClassWithSortedMap.class); + assertThat(result.map).isInstanceOf(SortedMap.class); } - /** - * @see DATAMONGO-211 - */ - @Test - public void maybeConvertHandlesNullValuesCorrectly() { - assertThat(converter.convertToMongoType(null), is(nullValue())); + @Test // DATAMONGO-211 + void maybeConvertHandlesNullValuesCorrectly() { + assertThat(converter.convertToMongoType(null)).isNull(); } - @Test - public void writesGenericTypeCorrectly() { + @Test // DATAMONGO-1509 + void writesGenericTypeCorrectly() { GenericType
                    type = new GenericType
                    (); type.content = new Address(); type.content.city = "London"; - BasicDBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(type, result); - DBObject content = (DBObject) result.get("content"); - assertThat(content.get("_class"), is(notNullValue())); - assertThat(content.get("city"), is(notNullValue())); + org.bson.Document content = (org.bson.Document) result.get("content"); + assertTypeHint(content, Address.class); + assertThat(content.get("city")).isNotNull(); } @Test - public void readsGenericTypeCorrectly() { + void readsGenericTypeCorrectly() { - DBObject address = new BasicDBObject("_class", Address.class.getName()); + org.bson.Document address = new org.bson.Document("_class", Address.class.getName()); address.put("city", "London"); - GenericType result = converter.read(GenericType.class, new BasicDBObject("content", address)); - assertThat(result.content, is(instanceOf(Address.class))); - + GenericType result = converter.read(GenericType.class, new org.bson.Document("content", address)); + assertThat(result.content).isInstanceOf(Address.class); } - /** - * @see DATAMONGO-228 - */ - @Test - public void writesNullValuesForMaps() { + @Test // DATAMONGO-228 + void writesNullValuesForMaps() { ClassWithMapProperty foo = new ClassWithMapProperty(); foo.map = Collections.singletonMap(Locale.US, null); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(foo, result); Object map = result.get("map"); - assertThat(map, is(instanceOf(DBObject.class))); - assertThat(((DBObject) map).keySet(), hasItem("en_US")); + assertThat(map).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) map).keySet()).contains("en_US"); } @Test - public void writesBigIntegerIdCorrectly() { + void writesBigIntegerIdCorrectly() { ClassWithBigIntegerId foo = new ClassWithBigIntegerId(); foo.id = BigInteger.valueOf(23L); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(foo, result); - assertThat(result.get("_id"), is(instanceOf(String.class))); + assertThat(result.get("_id")).isInstanceOf(String.class); } - public void convertsObjectsIfNecessary() { + @Test + void convertsObjectsIfNecessary() { ObjectId id = new ObjectId(); - assertThat(converter.convertToMongoType(id), is((Object) id)); + assertThat(converter.convertToMongoType(id)).isEqualTo(id); } - /** - * @see DATAMONGO-235 - */ - @Test - public void writesMapOfListsCorrectly() { + @Test // DATAMONGO-235 + void writesMapOfListsCorrectly() { ClassWithMapProperty input = new ClassWithMapProperty(); input.mapOfLists = Collections.singletonMap("Foo", Arrays.asList("Bar")); - BasicDBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(input, result); Object field = result.get("mapOfLists"); - assertThat(field, is(instanceOf(DBObject.class))); + assertThat(field).isInstanceOf(org.bson.Document.class); - DBObject map = (DBObject) field; + org.bson.Document map = (org.bson.Document) field; Object foo = map.get("Foo"); - assertThat(foo, is(instanceOf(BasicDBList.class))); + assertThat(foo).isInstanceOf(List.class); - BasicDBList value = (BasicDBList) foo; - assertThat(value.size(), is(1)); - assertThat((String) value.get(0), is("Bar")); + List value = (List) foo; + assertThat(value.size()).isEqualTo(1); + assertThat(value.get(0)).isEqualTo("Bar"); } - /** - * @see DATAMONGO-235 - */ - @Test - public void readsMapListValuesCorrectly() { + @Test // DATAMONGO-235 + void readsMapListValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Bar"); - DBObject source = new BasicDBObject("mapOfLists", new BasicDBObject("Foo", list)); + org.bson.Document source = new org.bson.Document("mapOfLists", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); - assertThat(result.mapOfLists, is(not(nullValue()))); + assertThat(result.mapOfLists).isNotNull(); } - /** - * @see DATAMONGO-235 - */ - @Test - public void writesMapsOfObjectsCorrectly() { + @Test // DATAMONGO-235 + void writesMapsOfObjectsCorrectly() { ClassWithMapProperty input = new ClassWithMapProperty(); input.mapOfObjects = new HashMap(); input.mapOfObjects.put("Foo", Arrays.asList("Bar")); - BasicDBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(input, result); Object field = result.get("mapOfObjects"); - assertThat(field, is(instanceOf(DBObject.class))); + assertThat(field).isInstanceOf(org.bson.Document.class); - DBObject map = (DBObject) field; + org.bson.Document map = (org.bson.Document) field; Object foo = map.get("Foo"); - assertThat(foo, is(instanceOf(BasicDBList.class))); + assertThat(foo).isInstanceOf(List.class); - BasicDBList value = (BasicDBList) foo; - assertThat(value.size(), is(1)); - assertThat((String) value.get(0), is("Bar")); + List value = (List) foo; + assertThat(value.size()).isEqualTo(1); + assertThat(value.get(0)).isEqualTo("Bar"); } - /** - * @see DATAMONGO-235 - */ - @Test - public void readsMapOfObjectsListValuesCorrectly() { + @Test // DATAMONGO-235 + void readsMapOfObjectsListValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Bar"); - DBObject source = new BasicDBObject("mapOfObjects", new BasicDBObject("Foo", list)); + org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); - assertThat(result.mapOfObjects, is(not(nullValue()))); + assertThat(result.mapOfObjects).isNotNull(); } - /** - * @see DATAMONGO-245 - */ - @Test - public void readsMapListNestedValuesCorrectly() { + @Test // DATAMONGO-245 + void readsMapListNestedValuesCorrectly() { BasicDBList list = new BasicDBList(); - list.add(new BasicDBObject("Hello", "World")); - DBObject source = new BasicDBObject("mapOfObjects", new BasicDBObject("Foo", list)); + list.add(new org.bson.Document("Hello", "World")); + org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object firstObjectInFoo = ((List) result.mapOfObjects.get("Foo")).get(0); - assertThat(firstObjectInFoo, is(instanceOf(Map.class))); - assertThat((String) ((Map) firstObjectInFoo).get("Hello"), is(equalTo("World"))); + assertThat(firstObjectInFoo).isInstanceOf(Map.class); + assertThat(((Map) firstObjectInFoo).get("Hello")).isEqualTo("World"); } - /** - * @see DATAMONGO-245 - */ - @Test - public void readsMapDoublyNestedValuesCorrectly() { + @Test // DATAMONGO-245 + void readsMapDoublyNestedValuesCorrectly() { - BasicDBObject nested = new BasicDBObject(); - BasicDBObject doubly = new BasicDBObject(); + org.bson.Document nested = new org.bson.Document(); + org.bson.Document doubly = new org.bson.Document(); doubly.append("Hello", "World"); nested.append("nested", doubly); - DBObject source = new BasicDBObject("mapOfObjects", new BasicDBObject("Foo", nested)); + org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("Foo", nested)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object foo = result.mapOfObjects.get("Foo"); - assertThat(foo, is(instanceOf(Map.class))); + assertThat(foo).isInstanceOf(Map.class); Object doublyNestedObject = ((Map) foo).get("nested"); - assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat((String) ((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(doublyNestedObject).isInstanceOf(Map.class); + assertThat(((Map) doublyNestedObject).get("Hello")).isEqualTo("World"); } - /** - * @see DATAMONGO-245 - */ - @Test - public void readsMapListDoublyNestedValuesCorrectly() { + @Test // DATAMONGO-245 + void readsMapListDoublyNestedValuesCorrectly() { BasicDBList list = new BasicDBList(); - BasicDBObject nested = new BasicDBObject(); - BasicDBObject doubly = new BasicDBObject(); + org.bson.Document nested = new org.bson.Document(); + org.bson.Document doubly = new org.bson.Document(); doubly.append("Hello", "World"); nested.append("nested", doubly); list.add(nested); - DBObject source = new BasicDBObject("mapOfObjects", new BasicDBObject("Foo", list)); + org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object firstObjectInFoo = ((List) result.mapOfObjects.get("Foo")).get(0); - assertThat(firstObjectInFoo, is(instanceOf(Map.class))); + assertThat(firstObjectInFoo).isInstanceOf(Map.class); Object doublyNestedObject = ((Map) firstObjectInFoo).get("nested"); - assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat((String) ((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(doublyNestedObject).isInstanceOf(Map.class); + assertThat(((Map) doublyNestedObject).get("Hello")).isEqualTo("World"); } - /** - * @see DATAMONGO-259 - */ - @Test - public void writesListOfMapsCorrectly() { + @Test // DATAMONGO-259 + void writesListOfMapsCorrectly() { Map map = Collections.singletonMap("Foo", Locale.ENGLISH); @@ -738,65 +656,194 @@ public void writesListOfMapsCorrectly() { wrapper.listOfMaps = new ArrayList>(); wrapper.listOfMaps.add(map); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(wrapper, result); - BasicDBList list = (BasicDBList) result.get("listOfMaps"); - assertThat(list, is(notNullValue())); - assertThat(list.size(), is(1)); + List list = (List) result.get("listOfMaps"); + assertThat(list).isNotNull(); + assertThat(list.size()).isEqualTo(1); - DBObject dbObject = (DBObject) list.get(0); - assertThat(dbObject.containsField("Foo"), is(true)); - assertThat((String) dbObject.get("Foo"), is(Locale.ENGLISH.toString())); + org.bson.Document document = (org.bson.Document) list.get(0); + assertThat(document.containsKey("Foo")).isTrue(); + assertThat((String) document.get("Foo")).isEqualTo(Locale.ENGLISH.toString()); } - /** - * @see DATAMONGO-259 - */ - @Test - public void readsListOfMapsCorrectly() { + @Test // DATAMONGO-259 + void readsListOfMapsCorrectly() { - DBObject map = new BasicDBObject("Foo", "en"); + org.bson.Document map = new org.bson.Document("Foo", "en"); BasicDBList list = new BasicDBList(); list.add(map); - DBObject wrapperSource = new BasicDBObject("listOfMaps", list); + org.bson.Document wrapperSource = new org.bson.Document("listOfMaps", list); CollectionWrapper wrapper = converter.read(CollectionWrapper.class, wrapperSource); - assertThat(wrapper.listOfMaps, is(notNullValue())); - assertThat(wrapper.listOfMaps.size(), is(1)); - assertThat(wrapper.listOfMaps.get(0), is(notNullValue())); - assertThat(wrapper.listOfMaps.get(0).get("Foo"), is(Locale.ENGLISH)); + assertThat(wrapper.listOfMaps).isNotNull(); + assertThat(wrapper.listOfMaps.size()).isEqualTo(1); + assertThat(wrapper.listOfMaps.get(0)).isNotNull(); + assertThat(wrapper.listOfMaps.get(0).get("Foo")).isEqualTo(Locale.ENGLISH); + } + + @ParameterizedTest(name = "{4}") // GH-4571 + @MethodSource("listMapSetReadingSource") + void initializesListMapSetPropertiesIfRequiredOnRead(org.bson.Document source, Class type, + Function valueFunction, Object expectedValue, String displayName) { + + T target = converter.read(type, source); + assertThat(target).extracting(valueFunction).isEqualTo(expectedValue); + } + + private static Stream listMapSetReadingSource() { + + Stream initialList = fixtureFor("contacts", CollectionWrapper.class, CollectionWrapper::getContacts, + builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptyList()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initializedList = fixtureFor("autoInitList", CollectionWrapper.class, + CollectionWrapper::getAutoInitList, builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptyList()); + builder.onNull().expect(null); + builder.onEmpty().expect(Collections.singletonList("spring")); + }); + + Stream initialSet = fixtureFor("contactsSet", CollectionWrapper.class, CollectionWrapper::getContactsSet, + builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptySet()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initialMap = fixtureFor("map", ClassWithMapProperty.class, ClassWithMapProperty::getMap, + builder -> { + + builder.onValue(new org.bson.Document()).expect(Collections.emptyMap()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initializedMap = fixtureFor("autoInitMap", ClassWithMapProperty.class, + ClassWithMapProperty::getAutoInitMap, builder -> { + + builder.onValue(new org.bson.Document()).expect(Collections.emptyMap()); + builder.onNull().expect(null); + builder.onEmpty().expect(Collections.singletonMap("spring", "data")); + }); + + return Stream.of(initialList, initializedList, initialSet, initialMap, initializedMap).flatMap(Function.identity()); + } + + static Stream fixtureFor(String field, Class type, Function valueFunction, + Consumer builderConsumer) { + + FixtureBuilder builder = new FixtureBuilder(field, type, valueFunction); + + builderConsumer.accept(builder); + + return builder.fixtures.stream(); } /** - * @see DATAMONGO-259 + * Builder for fixtures. */ - @Test - public void writesPlainMapOfCollectionsCorrectly() { + static class FixtureBuilder { + + private final String field; + private final Class typeUnderTest; + private final Function valueMappingFunction; + final List fixtures = new ArrayList<>(); + + FixtureBuilder(String field, Class typeUnderTest, Function valueMappingFunction) { + this.field = field; + this.typeUnderTest = typeUnderTest; + this.valueMappingFunction = valueMappingFunction; + } + + /** + * If the document value is {@code null}. + */ + FixtureStep onNull() { + return new FixtureStep(false, null); + } + + /** + * If the document value is {@code value}. + */ + FixtureStep onValue(@Nullable Object value) { + return new FixtureStep(false, value); + } + + /** + * If the document does not contain the field. + */ + FixtureStep onEmpty() { + return new FixtureStep(true, null); + } + + class FixtureStep { + + private final boolean empty; + private final @Nullable Object documentValue; + + public FixtureStep(boolean empty, @Nullable Object documentValue) { + this.empty = empty; + this.documentValue = documentValue; + } + + /** + * Then expect {@code expectedValue}. + * + * @param expectedValue + */ + void expect(@Nullable Object expectedValue) { + + Arguments fixture; + if (empty) { + fixture = Arguments.of(new org.bson.Document(), typeUnderTest, valueMappingFunction, expectedValue, + "Empty document expecting '%s' at type %s".formatted(expectedValue, typeUnderTest.getSimpleName())); + } else { + + String valueDescription = (documentValue == null ? "null" + : (documentValue + " (" + documentValue.getClass().getSimpleName()) + ")"); + + fixture = Arguments.of(new org.bson.Document(field, documentValue), typeUnderTest, valueMappingFunction, + expectedValue, "Field '%s' with value %s expecting '%s' at type %s".formatted(field, valueDescription, + expectedValue, typeUnderTest.getSimpleName())); + } + + fixtures.add(fixture); + } + } + + } + + @Test // DATAMONGO-259 + void writesPlainMapOfCollectionsCorrectly() { Map> map = Collections.singletonMap("Foo", Arrays.asList(Locale.US)); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(map, result); - assertThat(result.containsField("Foo"), is(true)); - assertThat(result.get("Foo"), is(notNullValue())); - assertThat(result.get("Foo"), is(instanceOf(BasicDBList.class))); + assertThat(result.containsKey("Foo")).isTrue(); + assertThat(result.get("Foo")).isNotNull(); + assertThat(result.get("Foo")).isInstanceOf(List.class); - BasicDBList list = (BasicDBList) result.get("Foo"); + List list = (List) result.get("Foo"); - assertThat(list.size(), is(1)); - assertThat(list.get(0), is((Object) Locale.US.toString())); + assertThat(list.size()).isEqualTo(1); + assertThat(list.get(0)).isEqualTo(Locale.US.toString()); } - /** - * @see DATAMONGO-285 - */ - @Test + @Test // DATAMONGO-285 @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testSaveMapWithACollectionAsValue() { + void testSaveMapWithACollectionAsValue() { Map keyValues = new HashMap(); keyValues.put("string", "hello"); @@ -805,178 +852,152 @@ public void testSaveMapWithACollectionAsValue() { list.add("pong"); keyValues.put("list", list); - DBObject dbObject = new BasicDBObject(); - converter.write(keyValues, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(keyValues, document); - Map keyValuesFromMongo = converter.read(Map.class, dbObject); + Map keyValuesFromMongo = converter.read(Map.class, document); - assertEquals(keyValues.size(), keyValuesFromMongo.size()); - assertEquals(keyValues.get("string"), keyValuesFromMongo.get("string")); - assertTrue(List.class.isAssignableFrom(keyValuesFromMongo.get("list").getClass())); + assertThat(keyValuesFromMongo.size()).isEqualTo(keyValues.size()); + assertThat(keyValuesFromMongo.get("string")).isEqualTo(keyValues.get("string")); + assertThat(List.class.isAssignableFrom(keyValuesFromMongo.get("list").getClass())).isTrue(); List listFromMongo = (List) keyValuesFromMongo.get("list"); - assertEquals(list.size(), listFromMongo.size()); - assertEquals(list.get(0), listFromMongo.get(0)); - assertEquals(list.get(1), listFromMongo.get(1)); + assertThat(listFromMongo.size()).isEqualTo(list.size()); + assertThat(listFromMongo.get(0)).isEqualTo(list.get(0)); + assertThat(listFromMongo.get(1)).isEqualTo(list.get(1)); } - /** - * @see DATAMONGO-309 - */ - @Test + @Test // DATAMONGO-309 @SuppressWarnings({ "unchecked" }) - public void writesArraysAsMapValuesCorrectly() { + void writesArraysAsMapValuesCorrectly() { ClassWithMapProperty wrapper = new ClassWithMapProperty(); wrapper.mapOfObjects = new HashMap(); wrapper.mapOfObjects.put("foo", new String[] { "bar" }); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(wrapper, result); Object mapObject = result.get("mapOfObjects"); - assertThat(mapObject, is(instanceOf(BasicDBObject.class))); + assertThat(mapObject).isInstanceOf(org.bson.Document.class); - DBObject map = (DBObject) mapObject; + org.bson.Document map = (org.bson.Document) mapObject; Object valueObject = map.get("foo"); - assertThat(valueObject, is(instanceOf(BasicDBList.class))); + assertThat(valueObject).isInstanceOf(List.class); List list = (List) valueObject; - assertThat(list.size(), is(1)); - assertThat(list, hasItem((Object) "bar")); + assertThat(list.size()).isEqualTo(1); + assertThat(list).contains((Object) "bar"); } - /** - * @see DATAMONGO-324 - */ - @Test - public void writesDbObjectCorrectly() { + @Test // DATAMONGO-324 + void writesDocumentCorrectly() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("foo", "bar"); + org.bson.Document document = new org.bson.Document(); + document.put("foo", "bar"); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); - converter.write(dbObject, result); + converter.write(document, result); - result.removeField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - assertThat(dbObject, is(result)); + result.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + assertThat(document).isEqualTo(result); } - /** - * @see DATAMONGO-324 - */ - @Test - public void readsDbObjectCorrectly() { + @Test // DATAMONGO-324 + void readsDocumentCorrectly() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("foo", "bar"); + org.bson.Document document = new org.bson.Document(); + document.put("foo", "bar"); - DBObject result = converter.read(DBObject.class, dbObject); + org.bson.Document result = converter.read(org.bson.Document.class, document); - assertThat(result, is(dbObject)); + assertThat(result).isEqualTo(document); } - /** - * @see DATAMONGO-329 - */ - @Test - public void writesMapAsGenericFieldCorrectly() { + @Test // DATAMONGO-329 + void writesMapAsGenericFieldCorrectly() { Map> objectToSave = new HashMap>(); objectToSave.put("test", new A("testValue")); A>> a = new A>>(objectToSave); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(a, result); - assertThat((String) result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat((String) result.get("valueType"), is(HashMap.class.getName())); + assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(A.class.getName()); + assertThat(result.get("valueType")).isEqualTo(HashMap.class.getName()); - DBObject object = (DBObject) result.get("value"); - assertThat(object, is(notNullValue())); + org.bson.Document object = (org.bson.Document) result.get("value"); + assertThat(object).isNotNull(); - DBObject inner = (DBObject) object.get("test"); - assertThat(inner, is(notNullValue())); - assertThat((String) inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat((String) inner.get("valueType"), is(String.class.getName())); - assertThat((String) inner.get("value"), is("testValue")); + org.bson.Document inner = (org.bson.Document) object.get("test"); + assertThat(inner).isNotNull(); + assertThat(inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(A.class.getName()); + assertThat(inner.get("valueType")).isEqualTo(String.class.getName()); + assertThat(inner.get("value")).isEqualTo("testValue"); } @Test - public void writesIntIdCorrectly() { + void writesIntIdCorrectly() { ClassWithIntId value = new ClassWithIntId(); value.id = 5; - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("_id"), is((Object) 5)); + assertThat(result.get("_id")).isEqualTo(5); } - /** - * @see DATAMONGO-368 - */ - @Test + @Test // DATAMONGO-368 @SuppressWarnings("unchecked") - public void writesNullValuesForCollection() { + void writesNullValuesForCollection() { CollectionWrapper wrapper = new CollectionWrapper(); - wrapper.contacts = Arrays. asList(new Person(), null); + wrapper.contacts = Arrays.asList(new Person(), null); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(wrapper, result); Object contacts = result.get("contacts"); - assertThat(contacts, is(instanceOf(Collection.class))); - assertThat(((Collection) contacts).size(), is(2)); - assertThat((Collection) contacts, hasItem(nullValue())); + assertThat(contacts).isInstanceOf(Collection.class); + assertThat(((Collection) contacts).size()).isEqualTo(2); + assertThat((Collection) contacts).containsNull(); } - /** - * @see DATAMONGO-379 - */ - @Test - public void considersDefaultingExpressionsAtConstructorArguments() { + @Test // DATAMONGO-379 + void considersDefaultingExpressionsAtConstructorArguments() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - dbObject.put("foobar", 2.5); + org.bson.Document document = new org.bson.Document("foo", "bar"); + document.put("foobar", 2.5); - DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, dbObject); - assertThat(result.bar, is(-1)); + DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, document); + assertThat(result.bar).isEqualTo(-1); } - /** - * @see DATAMONGO-379 - */ - @Test - public void usesDocumentFieldIfReferencedInAtValue() { + @Test // DATAMONGO-379 + void usesDocumentFieldIfReferencedInAtValue() { - DBObject dbObject = new BasicDBObject("foo", "bar"); - dbObject.put("something", 37); - dbObject.put("foobar", 2.5); + org.bson.Document document = new org.bson.Document("foo", "bar"); + document.put("something", 37); + document.put("foobar", 2.5); - DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, dbObject); - assertThat(result.bar, is(37)); + DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, document); + assertThat(result.bar).isEqualTo(37); } - /** - * @see DATAMONGO-379 - */ - @Test(expected = MappingInstantiationException.class) - public void rejectsNotFoundConstructorParameterForPrimitiveType() { + @Test // DATAMONGO-379 + void rejectsNotFoundConstructorParameterForPrimitiveType() { - DBObject dbObject = new BasicDBObject("foo", "bar"); + org.bson.Document document = new org.bson.Document("foo", "bar"); - converter.read(DefaultedConstructorArgument.class, dbObject); + assertThatThrownBy(() -> converter.read(DefaultedConstructorArgument.class, document)) + .isInstanceOf(MappingInstantiationException.class); } - /** - * @see DATAMONGO-358 - */ - @Test - public void writesListForObjectPropertyCorrectly() { + @Test // DATAMONGO-358 + void writesListForObjectPropertyCorrectly() { Attribute attribute = new Attribute(); attribute.key = "key"; @@ -985,203 +1006,165 @@ public void writesListForObjectPropertyCorrectly() { Item item = new Item(); item.attributes = Arrays.asList(attribute); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(item, result); Item read = converter.read(Item.class, result); - assertThat(read.attributes.size(), is(1)); - assertThat(read.attributes.get(0).key, is(attribute.key)); - assertThat(read.attributes.get(0).value, is(instanceOf(Collection.class))); + assertThat(read.attributes.size()).isEqualTo(1); + assertThat(read.attributes.get(0).key).isEqualTo(attribute.key); + assertThat(read.attributes.get(0).value).isInstanceOf(Collection.class); @SuppressWarnings("unchecked") Collection values = (Collection) read.attributes.get(0).value; - assertThat(values.size(), is(2)); - assertThat(values, hasItems("1", "2")); + assertThat(values.size()).isEqualTo(2); + assertThat(values).contains("1", "2"); } - /** - * @see DATAMONGO-380 - */ - @Test(expected = MappingException.class) - public void rejectsMapWithKeyContainingDotsByDefault() { - converter.write(Collections.singletonMap("foo.bar", "foobar"), new BasicDBObject()); + @Test // DATAMONGO-380 + void rejectsMapWithKeyContainingDotsByDefault() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> converter.write(Collections.singletonMap("foo.bar", "foobar"), new org.bson.Document())); } - /** - * @see DATAMONGO-380 - */ - @Test - public void escapesDotInMapKeysIfReplacementConfigured() { + @Test // DATAMONGO-380 + void escapesDotInMapKeysIfReplacementConfigured() { converter.setMapKeyDotReplacement("~"); - DBObject dbObject = new BasicDBObject(); - converter.write(Collections.singletonMap("foo.bar", "foobar"), dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(Collections.singletonMap("foo.bar", "foobar"), document); - assertThat((String) dbObject.get("foo~bar"), is("foobar")); - assertThat(dbObject.containsField("foo.bar"), is(false)); + assertThat((String) document.get("foo~bar")).isEqualTo("foobar"); + assertThat(document.containsKey("foo.bar")).isFalse(); } - /** - * @see DATAMONGO-380 - */ - @Test + @Test // DATAMONGO-380 @SuppressWarnings("unchecked") - public void unescapesDotInMapKeysIfReplacementConfigured() { + void unescapesDotInMapKeysIfReplacementConfigured() { converter.setMapKeyDotReplacement("~"); - DBObject dbObject = new BasicDBObject("foo~bar", "foobar"); - Map result = converter.read(Map.class, dbObject); + org.bson.Document document = new org.bson.Document("foo~bar", "foobar"); + Map result = converter.read(Map.class, document); - assertThat(result.get("foo.bar"), is("foobar")); - assertThat(result.containsKey("foobar"), is(false)); + assertThat(result.get("foo.bar")).isEqualTo("foobar"); + assertThat(result.containsKey("foobar")).isFalse(); } - /** - * @see DATAMONGO-382 - */ - @Test - public void convertsSetToBasicDBList() { + @Test // DATAMONGO-382 + @Disabled("mongo3 - no longer supported") + void convertsSetToBasicDBList() { Address address = new Address(); address.city = "London"; address.street = "Foo"; Object result = converter.convertToMongoType(Collections.singleton(address), ClassTypeInformation.OBJECT); - assertThat(result, is(instanceOf(BasicDBList.class))); + assertThat(result).isInstanceOf(List.class); - Set readResult = converter.read(Set.class, (BasicDBList) result); - assertThat(readResult.size(), is(1)); - assertThat(readResult.iterator().next(), is(instanceOf(Address.class))); + Set readResult = converter.read(Set.class, (org.bson.Document) result); + assertThat(readResult.size()).isEqualTo(1); + assertThat(readResult.iterator().next()).isInstanceOf(Address.class); } - /** - * @see DATAMONGO-402 - */ - @Test - public void readsMemberClassCorrectly() { + @Test // DATAMONGO-402, GH-3702 + void readsMemberClassCorrectly() { - DBObject dbObject = new BasicDBObject("inner", new BasicDBObject("value", "FOO!")); + org.bson.Document document = new org.bson.Document("inner", + new LinkedHashMap<>(new org.bson.Document("value", "FOO!"))); - Outer outer = converter.read(Outer.class, dbObject); - assertThat(outer.inner, is(notNullValue())); - assertThat(outer.inner.value, is("FOO!")); + Outer outer = converter.read(Outer.class, document); + assertThat(outer.inner).isNotNull(); + assertThat(outer.inner.value).isEqualTo("FOO!"); assertSyntheticFieldValueOf(outer.inner, outer); } - /** - * @see DATAMONGO-458 - */ - @Test - public void readEmptyCollectionIsModifiable() { + @Test // DATAMONGO-458 + void readEmptyCollectionIsModifiable() { - DBObject dbObject = new BasicDBObject("contactsSet", new BasicDBList()); - CollectionWrapper wrapper = converter.read(CollectionWrapper.class, dbObject); + org.bson.Document document = new org.bson.Document("contactsSet", new BasicDBList()); + CollectionWrapper wrapper = converter.read(CollectionWrapper.class, document); - assertThat(wrapper.contactsSet, is(notNullValue())); + assertThat(wrapper.contactsSet).isNotNull(); wrapper.contactsSet.add(new Contact() {}); } - /** - * @see DATAMONGO-424 - */ - @Test - public void readsPlainDBRefObject() { + @Test // DATAMONGO-424 + void readsPlainDBRefObject() { DBRef dbRef = new DBRef("foo", 2); - DBObject dbObject = new BasicDBObject("ref", dbRef); + org.bson.Document document = new org.bson.Document("ref", dbRef); - DBRefWrapper result = converter.read(DBRefWrapper.class, dbObject); - assertThat(result.ref, is(dbRef)); + DBRefWrapper result = converter.read(DBRefWrapper.class, document); + assertThat(result.ref).isEqualTo(dbRef); } - /** - * @see DATAMONGO-424 - */ - @Test - public void readsCollectionOfDBRefs() { + @Test // DATAMONGO-424 + void readsCollectionOfDBRefs() { DBRef dbRef = new DBRef("foo", 2); BasicDBList refs = new BasicDBList(); refs.add(dbRef); - DBObject dbObject = new BasicDBObject("refs", refs); + org.bson.Document document = new org.bson.Document("refs", refs); - DBRefWrapper result = converter.read(DBRefWrapper.class, dbObject); - assertThat(result.refs, hasSize(1)); - assertThat(result.refs, hasItem(dbRef)); + DBRefWrapper result = converter.read(DBRefWrapper.class, document); + assertThat(result.refs).hasSize(1); + assertThat(result.refs).contains(dbRef); } - /** - * @see DATAMONGO-424 - */ - @Test - public void readsDBRefMap() { + @Test // DATAMONGO-424 + void readsDBRefMap() { DBRef dbRef = mock(DBRef.class); - BasicDBObject refMap = new BasicDBObject("foo", dbRef); - DBObject dbObject = new BasicDBObject("refMap", refMap); + org.bson.Document refMap = new org.bson.Document("foo", dbRef); + org.bson.Document document = new org.bson.Document("refMap", refMap); - DBRefWrapper result = converter.read(DBRefWrapper.class, dbObject); + DBRefWrapper result = converter.read(DBRefWrapper.class, document); - assertThat(result.refMap.entrySet(), hasSize(1)); - assertThat(result.refMap.values(), hasItem(dbRef)); + assertThat(result.refMap.entrySet()).hasSize(1); + assertThat(result.refMap.values()).contains(dbRef); } - /** - * @see DATAMONGO-424 - */ - @Test + @Test // DATAMONGO-424 @SuppressWarnings({ "rawtypes", "unchecked" }) - public void resolvesDBRefMapValue() { + void resolvesDBRefMapValue() { - when(resolver.fetch(Mockito.any(DBRef.class))).thenReturn(new BasicDBObject()); + when(resolver.fetch(Mockito.any(DBRef.class))).thenReturn(new org.bson.Document()); DBRef dbRef = mock(DBRef.class); - BasicDBObject refMap = new BasicDBObject("foo", dbRef); - DBObject dbObject = new BasicDBObject("personMap", refMap); + org.bson.Document refMap = new org.bson.Document("foo", dbRef); + org.bson.Document document = new org.bson.Document("personMap", refMap); - DBRefWrapper result = converter.read(DBRefWrapper.class, dbObject); + DBRefWrapper result = converter.read(DBRefWrapper.class, document); - Matcher isPerson = instanceOf(Person.class); - - assertThat(result.personMap.entrySet(), hasSize(1)); - assertThat(result.personMap.values(), hasItem(isPerson)); + assertThat(result.personMap.entrySet()).hasSize(1); + assertThat(result.personMap.values()).anyMatch(Person.class::isInstance); } - /** - * @see DATAMONGO-462 - */ - @Test - public void writesURLsAsStringOutOfTheBox() throws Exception { + @Test // DATAMONGO-462 + void writesURLsAsStringOutOfTheBox() throws Exception { URLWrapper wrapper = new URLWrapper(); - wrapper.url = new URL("http://springsource.org"); - DBObject sink = new BasicDBObject(); + wrapper.url = new URL("https://springsource.org"); + org.bson.Document sink = new org.bson.Document(); converter.write(wrapper, sink); - assertThat(sink.get("url"), is((Object) "http://springsource.org")); + assertThat(sink.get("url")).isEqualTo("https://springsource.org"); } - /** - * @see DATAMONGO-462 - */ - @Test - public void readsURLFromStringOutOfTheBox() throws Exception { - DBObject dbObject = new BasicDBObject("url", "http://springsource.org"); - URLWrapper result = converter.read(URLWrapper.class, dbObject); - assertThat(result.url, is(new URL("http://springsource.org"))); + @Test // DATAMONGO-462 + void readsURLFromStringOutOfTheBox() throws Exception { + org.bson.Document document = new org.bson.Document("url", "https://springsource.org"); + URLWrapper result = converter.read(URLWrapper.class, document); + assertThat(result.url).isEqualTo(new URL("https://springsource.org")); } - /** - * @see DATAMONGO-485 - */ - @Test - public void writesComplexIdCorrectly() { + @Test // DATAMONGO-485 + void writesComplexIdCorrectly() { ComplexId id = new ComplexId(); id.innerId = 4711L; @@ -1189,138 +1172,132 @@ public void writesComplexIdCorrectly() { ClassWithComplexId entity = new ClassWithComplexId(); entity.complexId = id; - DBObject dbObject = new BasicDBObject(); - converter.write(entity, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(entity, document); - Object idField = dbObject.get("_id"); - assertThat(idField, is(notNullValue())); - assertThat(idField, is(instanceOf(DBObject.class))); - assertThat(((DBObject) idField).get("innerId"), is((Object) 4711L)); + Object idField = document.get("_id"); + assertThat(idField).isNotNull(); + assertThat(idField).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) idField).get("innerId")).isEqualTo(4711L); } - /** - * @see DATAMONGO-485 - */ - @Test - public void readsComplexIdCorrectly() { + @Test // DATAMONGO-485 + void readsComplexIdCorrectly() { - DBObject innerId = new BasicDBObject("innerId", 4711L); - DBObject entity = new BasicDBObject("_id", innerId); + org.bson.Document innerId = new org.bson.Document("innerId", 4711L); + org.bson.Document entity = new org.bson.Document("_id", innerId); ClassWithComplexId result = converter.read(ClassWithComplexId.class, entity); - assertThat(result.complexId, is(notNullValue())); - assertThat(result.complexId.innerId, is(4711L)); + assertThat(result.complexId).isNotNull(); + assertThat(result.complexId.innerId).isEqualTo(4711L); } - /** - * @see DATAMONGO-489 - */ - @Test - public void readsArraysAsMapValuesCorrectly() { + @Test // DATAMONGO-489 + void readsArraysAsMapValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Foo"); list.add("Bar"); - DBObject map = new BasicDBObject("key", list); - DBObject wrapper = new BasicDBObject("mapOfStrings", map); + org.bson.Document map = new org.bson.Document("key", list); + org.bson.Document wrapper = new org.bson.Document("mapOfStrings", map); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, wrapper); - assertThat(result.mapOfStrings, is(notNullValue())); + assertThat(result.mapOfStrings).isNotNull(); String[] values = result.mapOfStrings.get("key"); - assertThat(values, is(notNullValue())); - assertThat(values, is(arrayWithSize(2))); + assertThat(values).isNotNull(); + assertThat(values).hasSize(2); } - /** - * @see DATAMONGO-497 - */ - @Test - public void readsEmptyCollectionIntoConstructorCorrectly() { + @Test // DATAMONGO-497 + void readsEmptyCollectionIntoConstructorCorrectly() { - DBObject source = new BasicDBObject("attributes", new BasicDBList()); + org.bson.Document source = new org.bson.Document("attributes", new BasicDBList()); TypWithCollectionConstructor result = converter.read(TypWithCollectionConstructor.class, source); - assertThat(result.attributes, is(notNullValue())); + assertThat(result.attributes).isNotNull(); + } + + @Test // DATAMONGO-2400 + void writeJavaTimeValuesViaCodec() { + + configureConverterWithNativeJavaTimeCodec(); + TypeWithLocalDateTime source = new TypeWithLocalDateTime(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("date", source.date); + } + + void configureConverterWithNativeJavaTimeCodec() { + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions + .create(MongoCustomConversions.MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + converter.afterPropertiesSet(); } private static void assertSyntheticFieldValueOf(Object target, Object expected) { for (int i = 0; i < 10; i++) { try { - assertThat(ReflectionTestUtils.getField(target, "this$" + i), is(expected)); + assertThat(ReflectionTestUtils.getField(target, "this$" + i)).isEqualTo(expected); return; } catch (IllegalArgumentException e) { // Suppress and try next } } - fail(String.format("Didn't find synthetic field on %s!", target)); + fail(String.format("Didn't find synthetic field on %s", target)); } - /** - * @see DATAMGONGO-508 - */ - @Test - public void eagerlyReturnsDBRefObjectIfTargetAlreadyIsOne() { + @Test // DATAMGONGO-508 + void eagerlyReturnsDBRefObjectIfTargetAlreadyIsOne() { DBRef dbRef = new DBRef("collection", "id"); MongoPersistentProperty property = mock(MongoPersistentProperty.class); - assertThat(converter.createDBRef(dbRef, property), is(dbRef)); + assertThat(converter.createDBRef(dbRef, property)).isEqualTo(dbRef); } - /** - * @see DATAMONGO-523 - */ - @Test - public void considersTypeAliasAnnotation() { + @Test // DATAMONGO-523, DATAMONGO-1509 + void considersTypeAliasAnnotation() { Aliased aliased = new Aliased(); aliased.name = "foo"; - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(aliased, result); - Object type = result.get("_class"); - assertThat(type, is(notNullValue())); - assertThat(type.toString(), is("_")); + assertTypeHint(result, "_"); } - /** - * @see DATAMONGO-533 - */ - @Test - public void marshalsThrowableCorrectly() { + @Test // DATAMONGO-533 + void marshalsThrowableCorrectly() { ThrowableWrapper wrapper = new ThrowableWrapper(); wrapper.throwable = new Exception(); - DBObject dbObject = new BasicDBObject(); - converter.write(wrapper, dbObject); + org.bson.Document document = new org.bson.Document(); + converter.write(wrapper, document); } - /** - * @see DATAMONGO-592 - */ - @Test - public void recursivelyConvertsSpELReadValue() { + @Test // DATAMONGO-592 + void recursivelyConvertsSpELReadValue() { - DBObject input = (DBObject) JSON - .parse("{ \"_id\" : { \"$oid\" : \"50ca271c4566a2b08f2d667a\" }, \"_class\" : \"com.recorder.TestRecorder2$ObjectContainer\", \"property\" : { \"property\" : 100 } }"); + org.bson.Document input = org.bson.Document.parse( + "{ \"_id\" : { \"$oid\" : \"50ca271c4566a2b08f2d667a\" }, \"_class\" : \"com.recorder.TestRecorder2$ObjectContainer\", \"property\" : { \"property\" : 100 } }"); converter.read(ObjectContainer.class, input); } - /** - * @see DATAMONGO-724 - */ - @Test - @SuppressWarnings("unchecked") - public void mappingConsidersCustomConvertersNotWritingTypeInformation() { + @Test // DATAMONGO-724 + void mappingConsidersCustomConvertersNotWritingTypeInformation() { Person person = new Person(); person.firstname = "Dave"; @@ -1331,24 +1308,25 @@ public void mappingConsidersCustomConvertersNotWritingTypeInformation() { entity.mapOfObjects = new HashMap(); entity.mapOfObjects.put("foo", person); - CustomConversions conversions = new CustomConversions(Arrays.asList(new Converter() { + CustomConversions conversions = new MongoCustomConversions( + Arrays.asList(new Converter() { - @Override - public DBObject convert(Person source) { - return new BasicDBObject().append("firstname", source.firstname)// - .append("_class", Person.class.getName()); - } + @Override + public org.bson.Document convert(Person source) { + return new org.bson.Document().append("firstname", source.firstname)// + .append("_class", Person.class.getName()); + } - }, new Converter() { + }, new Converter() { - @Override - public Person convert(DBObject source) { - Person person = new Person(); - person.firstname = source.get("firstname").toString(); - person.lastname = "converter"; - return person; - } - })); + @Override + public Person convert(org.bson.Document source) { + Person person = new Person(); + person.firstname = source.get("firstname").toString(); + person.lastname = "converter"; + return person; + } + })); MongoMappingContext context = new MongoMappingContext(); context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); @@ -1358,39 +1336,37 @@ public Person convert(DBObject source) { mongoConverter.setCustomConversions(conversions); mongoConverter.afterPropertiesSet(); - BasicDBObject dbObject = new BasicDBObject(); - mongoConverter.write(entity, dbObject); + org.bson.Document document = new org.bson.Document(); + mongoConverter.write(entity, document); - ClassWithMapProperty result = mongoConverter.read(ClassWithMapProperty.class, dbObject); + ClassWithMapProperty result = mongoConverter.read(ClassWithMapProperty.class, document); - assertThat(result.mapOfPersons, is(notNullValue())); + assertThat(result.mapOfPersons).isNotNull(); Person personCandidate = result.mapOfPersons.get("foo"); - assertThat(personCandidate, is(notNullValue())); - assertThat(personCandidate.firstname, is("Dave")); + assertThat(personCandidate).isNotNull(); + assertThat(personCandidate.firstname).isEqualTo("Dave"); - assertThat(result.mapOfObjects, is(notNullValue())); + assertThat(result.mapOfObjects).isNotNull(); Object value = result.mapOfObjects.get("foo"); - assertThat(value, is(notNullValue())); - assertThat(value, is(instanceOf(Person.class))); - assertThat(((Person) value).firstname, is("Dave")); - assertThat(((Person) value).lastname, is("converter")); + assertThat(value).isNotNull(); + assertThat(value).isInstanceOf(Person.class); + assertThat(((Person) value).firstname).isEqualTo("Dave"); + assertThat(((Person) value).lastname).isEqualTo("converter"); } - /** - * @see DATAMONGO-743 - */ - @Test - public void readsIntoStringsOutOfTheBox() { + @Test // DATAMONGO-743, DATAMONGO-2198 + void readsIntoStringsOutOfTheBox() { - DBObject dbObject = new BasicDBObject("firstname", "Dave"); - assertThat(converter.read(String.class, dbObject), is("{ \"firstname\" : \"Dave\"}")); + String target = converter.read(String.class, new org.bson.Document("firstname", "Dave")); + + assertThat(target).startsWith("{"); + assertThat(target).endsWith("}"); + assertThat(target).contains("\"firstname\""); + assertThat(target).contains("\"Dave\""); } - /** - * @see DATAMONGO-766 - */ - @Test - public void writesProjectingTypeCorrectly() { + @Test // DATAMONGO-766 + void writesProjectingTypeCorrectly() { NestedType nested = new NestedType(); nested.c = "C"; @@ -1400,21 +1376,17 @@ public void writesProjectingTypeCorrectly() { type.foo = "bar"; type.a = nested; - BasicDBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(type, result); - assertThat(result.get("name"), is((Object) "name")); - DBObject aValue = DBObjectTestUtils.getAsDBObject(result, "a"); - assertThat(aValue.get("b"), is((Object) "bar")); - assertThat(aValue.get("c"), is((Object) "C")); + assertThat(result.get("name")).isEqualTo((Object) "name"); + org.bson.Document aValue = DocumentTestUtils.getAsDocument(result, "a"); + assertThat(aValue.get("b")).isEqualTo((Object) "bar"); + assertThat(aValue.get("c")).isEqualTo((Object) "C"); } - /** - * @see DATAMONGO-812 - * @see DATAMONGO-893 - */ - @Test - public void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects() { + @Test // DATAMONGO-812, DATAMONGO-893, DATAMONGO-1509 + void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects() { Address address = new Address(); address.city = "London"; @@ -1423,33 +1395,27 @@ public void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects( Object result = converter.convertToMongoType(Collections.singletonList(address), ClassTypeInformation.from(InterfaceType.class)); - assertThat(result, is(instanceOf(BasicDBList.class))); + assertThat(result).isInstanceOf(List.class); - BasicDBList dbList = (BasicDBList) result; - assertThat(dbList, hasSize(1)); - assertThat(getTypedValue(getAsDBObject(dbList, 0), "_class", String.class), equalTo(Address.class.getName())); + List dbList = (List) result; + assertThat(dbList).hasSize(1); + assertTypeHint(getAsDocument(dbList, 0), Address.class); } - /** - * @see DATAMONGO-812 - */ - @Test - public void convertsListToBasicDBListWithoutTypeInformationForSimpleTypes() { + @Test // DATAMONGO-812 + void convertsListToBasicDBListWithoutTypeInformationForSimpleTypes() { Object result = converter.convertToMongoType(Collections.singletonList("foo")); - assertThat(result, is(instanceOf(BasicDBList.class))); + assertThat(result).isInstanceOf(List.class); - BasicDBList dbList = (BasicDBList) result; - assertThat(dbList, hasSize(1)); - assertThat(dbList.get(0), instanceOf(String.class)); + List dbList = (List) result; + assertThat(dbList).hasSize(1); + assertThat(dbList.get(0)).isInstanceOf(String.class); } - /** - * @see DATAMONGO-812 - */ - @Test - public void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects() { + @Test // DATAMONGO-812, DATAMONGO-1509 + void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects() { Address address = new Address(); address.city = "London"; @@ -1457,81 +1423,67 @@ public void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects Object result = converter.convertToMongoType(new Address[] { address }, ClassTypeInformation.OBJECT); - assertThat(result, is(instanceOf(BasicDBList.class))); + assertThat(result).isInstanceOf(List.class); - BasicDBList dbList = (BasicDBList) result; - assertThat(dbList, hasSize(1)); - assertThat(getTypedValue(getAsDBObject(dbList, 0), "_class", String.class), equalTo(Address.class.getName())); + List dbList = (List) result; + assertThat(dbList).hasSize(1); + assertTypeHint(getAsDocument(dbList, 0), Address.class); } - /** - * @see DATAMONGO-812 - */ - @Test - public void convertsArrayToBasicDBListWithoutTypeInformationForSimpleTypes() { + @Test // DATAMONGO-812 + void convertsArrayToBasicDBListWithoutTypeInformationForSimpleTypes() { Object result = converter.convertToMongoType(new String[] { "foo" }); - assertThat(result, is(instanceOf(BasicDBList.class))); + assertThat(result).isInstanceOf(List.class); - BasicDBList dbList = (BasicDBList) result; - assertThat(dbList, hasSize(1)); - assertThat(dbList.get(0), instanceOf(String.class)); + List dbList = (List) result; + assertThat(dbList).hasSize(1); + assertThat(dbList.get(0)).isInstanceOf(String.class); } - /** - * @see DATAMONGO-833 - */ - @Test - public void readsEnumSetCorrectly() { + @Test // DATAMONGO-833 + void readsEnumSetCorrectly() { BasicDBList enumSet = new BasicDBList(); enumSet.add("SECOND"); - DBObject dbObject = new BasicDBObject("enumSet", enumSet); + org.bson.Document document = new org.bson.Document("enumSet", enumSet); - ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, dbObject); + ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.enumSet, is(instanceOf(EnumSet.class))); - assertThat(result.enumSet.size(), is(1)); - assertThat(result.enumSet, hasItem(SampleEnum.SECOND)); + assertThat(result.enumSet).isInstanceOf(EnumSet.class); + assertThat(result.enumSet.size()).isEqualTo(1); + assertThat(result.enumSet).contains(SampleEnum.SECOND); } - /** - * @see DATAMONGO-833 - */ - @Test - public void readsEnumMapCorrectly() { + @Test // DATAMONGO-833 + void readsEnumMapCorrectly() { - BasicDBObject enumMap = new BasicDBObject("FIRST", "Dave"); - ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, new BasicDBObject("enumMap", enumMap)); + org.bson.Document enumMap = new org.bson.Document("FIRST", "Dave"); + ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, + new org.bson.Document("enumMap", enumMap)); - assertThat(result.enumMap, is(instanceOf(EnumMap.class))); - assertThat(result.enumMap.size(), is(1)); - assertThat(result.enumMap.get(SampleEnum.FIRST), is("Dave")); + assertThat(result.enumMap).isInstanceOf(EnumMap.class); + assertThat(result.enumMap.size()).isEqualTo(1); + assertThat(result.enumMap.get(SampleEnum.FIRST)).isEqualTo("Dave"); } - /** - * @see DATAMONGO-887 - */ - @Test - public void readsTreeMapCorrectly() { + @Test // DATAMONGO-887 + void readsTreeMapCorrectly() { - DBObject person = new BasicDBObject("foo", "Dave"); - DBObject treeMapOfPerson = new BasicDBObject("key", person); - DBObject document = new BasicDBObject("treeMapOfPersons", treeMapOfPerson); + org.bson.Document person = new org.bson.Document("foo", "Dave"); + org.bson.Document treeMapOfPerson = new org.bson.Document("key", person); + org.bson.Document document = new org.bson.Document("treeMapOfPersons", treeMapOfPerson); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, document); - assertThat(result.treeMapOfPersons, is(notNullValue())); - assertThat(result.treeMapOfPersons.get("key"), is(notNullValue())); - assertThat(result.treeMapOfPersons.get("key").firstname, is("Dave")); + assertThat(result.treeMapOfPersons).isNotNull(); + assertThat(result.treeMapOfPersons.get("key")).isNotNull(); + assertThat(result.treeMapOfPersons.get("key").firstname).isEqualTo("Dave"); } - /** - * @see DATAMONGO-887 - */ - @Test - public void writesTreeMapCorrectly() { + @Test // DATAMONGO-887 + void writesTreeMapCorrectly() { Person person = new Person(); person.firstname = "Dave"; @@ -1540,333 +1492,270 @@ public void writesTreeMapCorrectly() { source.treeMapOfPersons = new TreeMap(); source.treeMapOfPersons.put("key", person); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(source, result); - DBObject map = getAsDBObject(result, "treeMapOfPersons"); - DBObject entry = getAsDBObject(map, "key"); - assertThat(entry.get("foo"), is((Object) "Dave")); + org.bson.Document map = getAsDocument(result, "treeMapOfPersons"); + org.bson.Document entry = getAsDocument(map, "key"); + assertThat(entry.get("foo")).isEqualTo("Dave"); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoBoxCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoBoxCorrectly() { ClassWithGeoBox object = new ClassWithGeoBox(); object.box = new Box(new Point(1, 2), new Point(3, 4)); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); - assertThat(dbo.get("box"), is(instanceOf(DBObject.class))); - assertThat(dbo.get("box"), is((Object) new BasicDBObject().append("first", toDbObject(object.box.getFirst())) - .append("second", toDbObject(object.box.getSecond())))); + assertThat(document).isNotNull(); + assertThat(document.get("box")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("box")).isEqualTo((Object) new org.bson.Document() + .append("first", toDocument(object.box.getFirst())).append("second", toDocument(object.box.getSecond()))); } - private static DBObject toDbObject(Point point) { - return new BasicDBObject("x", point.getX()).append("y", point.getY()); + private static org.bson.Document toDocument(Point point) { + return new org.bson.Document("x", point.getX()).append("y", point.getY()); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldReadEntityWithGeoBoxCorrectly() { + @Test // DATAMONGO-858 + void shouldReadEntityWithGeoBoxCorrectly() { ClassWithGeoBox object = new ClassWithGeoBox(); object.box = new Box(new Point(1, 2), new Point(3, 4)); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - ClassWithGeoBox result = converter.read(ClassWithGeoBox.class, dbo); + ClassWithGeoBox result = converter.read(ClassWithGeoBox.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.box, is(object.box)); + assertThat(result).isNotNull(); + assertThat(result.box).isEqualTo(object.box); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoPolygonCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoPolygonCorrectly() { ClassWithGeoPolygon object = new ClassWithGeoPolygon(); object.polygon = new Polygon(new Point(1, 2), new Point(3, 4), new Point(4, 5)); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); + assertThat(document).isNotNull(); - assertThat(dbo.get("polygon"), is(instanceOf(DBObject.class))); - DBObject polygonDbo = (DBObject) dbo.get("polygon"); + assertThat(document.get("polygon")).isInstanceOf(org.bson.Document.class); + org.bson.Document polygonDoc = (org.bson.Document) document.get("polygon"); @SuppressWarnings("unchecked") - List points = (List) polygonDbo.get("points"); + List points = (List) polygonDoc.get("points"); - assertThat(points, hasSize(3)); - assertThat(points, Matchers. hasItems(toDbObject(object.polygon.getPoints().get(0)), - toDbObject(object.polygon.getPoints().get(1)), toDbObject(object.polygon.getPoints().get(2)))); + assertThat(points).hasSize(3); + assertThat(points).contains(toDocument(object.polygon.getPoints().get(0)), + toDocument(object.polygon.getPoints().get(1)), toDocument(object.polygon.getPoints().get(2))); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldReadEntityWithGeoPolygonCorrectly() { + @Test // DATAMONGO-858 + void shouldReadEntityWithGeoPolygonCorrectly() { ClassWithGeoPolygon object = new ClassWithGeoPolygon(); object.polygon = new Polygon(new Point(1, 2), new Point(3, 4), new Point(4, 5)); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - ClassWithGeoPolygon result = converter.read(ClassWithGeoPolygon.class, dbo); + ClassWithGeoPolygon result = converter.read(ClassWithGeoPolygon.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.polygon, is(object.polygon)); + assertThat(result).isNotNull(); + assertThat(result.polygon).isEqualTo(object.polygon); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoCircleCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoCircleCorrectly() { ClassWithGeoCircle object = new ClassWithGeoCircle(); Circle circle = new Circle(new Point(1, 2), 3); Distance radius = circle.getRadius(); object.circle = circle; - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); - assertThat(dbo.get("circle"), is(instanceOf(DBObject.class))); - assertThat( - dbo.get("circle"), - is((Object) new BasicDBObject("center", new BasicDBObject("x", circle.getCenter().getX()).append("y", circle - .getCenter().getY())).append("radius", radius.getNormalizedValue()).append("metric", - radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("circle")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("circle")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", circle.getCenter().getX()).append("y", circle.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldReadEntityWithGeoCircleCorrectly() { + @Test // DATAMONGO-858 + void shouldReadEntityWithGeoCircleCorrectly() { ClassWithGeoCircle object = new ClassWithGeoCircle(); object.circle = new Circle(new Point(1, 2), 3); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - ClassWithGeoCircle result = converter.read(ClassWithGeoCircle.class, dbo); + ClassWithGeoCircle result = converter.read(ClassWithGeoCircle.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.circle, is(result.circle)); + assertThat(result).isNotNull(); + assertThat(result.circle).isEqualTo(result.circle); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoSphereCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoSphereCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); Sphere sphere = new Sphere(new Point(1, 2), 3); Distance radius = sphere.getRadius(); object.sphere = sphere; - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); - assertThat(dbo.get("sphere"), is(instanceOf(DBObject.class))); - assertThat( - dbo.get("sphere"), - is((Object) new BasicDBObject("center", new BasicDBObject("x", sphere.getCenter().getX()).append("y", sphere - .getCenter().getY())).append("radius", radius.getNormalizedValue()).append("metric", - radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("sphere")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("sphere")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoSphereWithMetricDistanceCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoSphereWithMetricDistanceCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); Sphere sphere = new Sphere(new Point(1, 2), new Distance(3, Metrics.KILOMETERS)); Distance radius = sphere.getRadius(); object.sphere = sphere; - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); - assertThat(dbo.get("sphere"), is(instanceOf(DBObject.class))); - assertThat( - dbo.get("sphere"), - is((Object) new BasicDBObject("center", new BasicDBObject("x", sphere.getCenter().getX()).append("y", sphere - .getCenter().getY())).append("radius", radius.getNormalizedValue()).append("metric", - radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("sphere")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("sphere")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldReadEntityWithGeoSphereCorrectly() { + @Test // DATAMONGO-858 + void shouldReadEntityWithGeoSphereCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); object.sphere = new Sphere(new Point(1, 2), 3); - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - ClassWithGeoSphere result = converter.read(ClassWithGeoSphere.class, dbo); + ClassWithGeoSphere result = converter.read(ClassWithGeoSphere.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.sphere, is(object.sphere)); + assertThat(result).isNotNull(); + assertThat(result.sphere).isEqualTo(object.sphere); } - /** - * @DATAMONGO-858 - */ - @Test - public void shouldWriteEntityWithGeoShapeCorrectly() { + @Test // DATAMONGO-858 + void shouldWriteEntityWithGeoShapeCorrectly() { ClassWithGeoShape object = new ClassWithGeoShape(); Sphere sphere = new Sphere(new Point(1, 2), 3); Distance radius = sphere.getRadius(); object.shape = sphere; - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - assertThat(dbo, is(notNullValue())); - assertThat(dbo.get("shape"), is(instanceOf(DBObject.class))); - assertThat( - dbo.get("shape"), - is((Object) new BasicDBObject("center", new BasicDBObject("x", sphere.getCenter().getX()).append("y", sphere - .getCenter().getY())).append("radius", radius.getNormalizedValue()).append("metric", - radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("shape")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("shape")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } - /** - * @DATAMONGO-858 - */ - @Test - @Ignore - public void shouldReadEntityWithGeoShapeCorrectly() { + @Test // DATAMONGO-858 + @Disabled + void shouldReadEntityWithGeoShapeCorrectly() { ClassWithGeoShape object = new ClassWithGeoShape(); Sphere sphere = new Sphere(new Point(1, 2), 3); object.shape = sphere; - DBObject dbo = new BasicDBObject(); - converter.write(object, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(object, document); - ClassWithGeoShape result = converter.read(ClassWithGeoShape.class, dbo); + ClassWithGeoShape result = converter.read(ClassWithGeoShape.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.shape, is((Shape) sphere)); + assertThat(result).isNotNull(); + assertThat(result.shape).isEqualTo(sphere); } - /** - * @see DATAMONGO-976 - */ - @Test - public void shouldIgnoreTextScorePropertyWhenWriting() { + @Test // DATAMONGO-976 + void shouldIgnoreTextScorePropertyWhenWriting() { ClassWithTextScoreProperty source = new ClassWithTextScoreProperty(); source.score = Float.MAX_VALUE; - BasicDBObject dbo = new BasicDBObject(); - converter.write(source, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(source, document); - assertThat(dbo.get("score"), nullValue()); + assertThat(document.get("score")).isNull(); } - /** - * @see DATAMONGO-976 - */ - @Test - public void shouldIncludeTextScorePropertyWhenReading() { + @Test // DATAMONGO-976 + void shouldIncludeTextScorePropertyWhenReading() { - ClassWithTextScoreProperty entity = converter - .read(ClassWithTextScoreProperty.class, new BasicDBObject("score", 5F)); - assertThat(entity.score, equalTo(5F)); + ClassWithTextScoreProperty entity = converter.read(ClassWithTextScoreProperty.class, + new org.bson.Document("score", 5F)); + assertThat(entity.score).isEqualTo(5F); } - /** - * @see DATAMONGO-1001 - */ - @Test - public void shouldWriteCglibProxiedClassTypeInformationCorrectly() { + @Test // DATAMONGO-1001, DATAMONGO-1509 + void shouldWriteCglibProxiedClassTypeInformationCorrectly() { ProxyFactory factory = new ProxyFactory(); factory.setTargetClass(GenericType.class); factory.setProxyTargetClass(true); GenericType proxied = (GenericType) factory.getProxy(); - BasicDBObject dbo = new BasicDBObject(); - converter.write(proxied, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(proxied, document); - assertThat(dbo.get("_class"), is((Object) GenericType.class.getName())); + assertTypeHint(document, GenericType.class); } - /** - * @see DATAMONGO-1001 - */ - @Test - public void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() { + @Test // DATAMONGO-1001 + void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() { LazyLoadingProxy mock = mock(LazyLoadingProxy.class); - BasicDBObject dbo = new BasicDBObject(); - converter.write(mock, dbo); + org.bson.Document document = new org.bson.Document(); + converter.write(mock, document); verify(mock, times(1)).getTarget(); } - /** - * @see DATAMONGO-1034 - */ - @Test - public void rejectsBasicDbListToBeConvertedIntoComplexType() { + @Test // DATAMONGO-1034 + void rejectsBasicDbListToBeConvertedIntoComplexType() { - BasicDBList inner = new BasicDBList(); + List inner = new ArrayList<>(); inner.add("key"); inner.add("value"); - BasicDBList outer = new BasicDBList(); + List outer = new ArrayList<>(); outer.add(inner); outer.add(inner); - BasicDBObject source = new BasicDBObject("attributes", outer); + org.bson.Document source = new org.bson.Document("attributes", outer); - exception.expect(MappingException.class); - exception.expectMessage(Item.class.getName()); - exception.expectMessage(BasicDBList.class.getName()); - - converter.read(Item.class, source); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> converter.read(Item.class, source)); } - /** - * @see DATAMONGO-1058 - */ - @Test - public void readShouldRespectExplicitFieldNameForDbRef() { + @Test // DATAMONGO-1058 + void readShouldRespectExplicitFieldNameForDbRef() { - BasicDBObject source = new BasicDBObject(); + org.bson.Document source = new org.bson.Document(); source.append("explict-name-for-db-ref", new DBRef("foo", "1")); converter.read(ClassWithExplicitlyNamedDBRefProperty.class, source); @@ -1875,164 +1764,136 @@ public void readShouldRespectExplicitFieldNameForDbRef() { Mockito.any(DbRefResolverCallback.class), Mockito.any(DbRefProxyHandler.class)); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void writeShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { + @Test // DATAMONGO-1050 + void writeShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { RootForClassWithExplicitlyRenamedIdField source = new RootForClassWithExplicitlyRenamedIdField(); source.id = "rootId"; source.nested = new ClassWithExplicitlyRenamedField(); source.nested.id = "nestedId"; - DBObject sink = new BasicDBObject(); + org.bson.Document sink = new org.bson.Document(); converter.write(source, sink); - assertThat((String) sink.get("_id"), is("rootId")); - assertThat((DBObject) sink.get("nested"), is(new BasicDBObjectBuilder().add("id", "nestedId").get())); + assertThat(sink.get("_id")).isEqualTo("rootId"); + assertThat(sink.get("nested")).isEqualTo(new org.bson.Document().append("id", "nestedId")); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void readShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { + @Test // DATAMONGO-1050 + void readShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { - DBObject source = new BasicDBObjectBuilder().add("_id", "rootId") - .add("nested", new BasicDBObject("id", "nestedId")).get(); + org.bson.Document source = new org.bson.Document().append("_id", "rootId").append("nested", + new org.bson.Document("id", "nestedId")); RootForClassWithExplicitlyRenamedIdField sink = converter.read(RootForClassWithExplicitlyRenamedIdField.class, source); - assertThat(sink.id, is("rootId")); - assertThat(sink.nested, notNullValue()); - assertThat(sink.nested.id, is("nestedId")); + assertThat(sink.id).isEqualTo("rootId"); + assertThat(sink.nested).isNotNull(); + assertThat(sink.nested.id).isEqualTo("nestedId"); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void namedIdFieldShouldExtractValueFromUnderscoreIdField() { + @Test // DATAMONGO-1050 + void namedIdFieldShouldExtractValueFromUnderscoreIdField() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", "A").add("id", "B").get(); + org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); - ClassWithNamedIdField withNamedIdField = converter.read(ClassWithNamedIdField.class, dbo); + ClassWithNamedIdField withNamedIdField = converter.read(ClassWithNamedIdField.class, document); - assertThat(withNamedIdField.id, is("A")); + assertThat(withNamedIdField.id).isEqualTo("A"); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void explicitlyRenamedIfFieldShouldExtractValueFromIdField() { + @Test // DATAMONGO-1050 + void explicitlyRenamedIfFieldShouldExtractValueFromIdField() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", "A").add("id", "B").get(); + org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); ClassWithExplicitlyRenamedField withExplicitlyRenamedField = converter.read(ClassWithExplicitlyRenamedField.class, - dbo); + document); - assertThat(withExplicitlyRenamedField.id, is("B")); + assertThat(withExplicitlyRenamedField.id).isEqualTo("B"); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void annotatedIdFieldShouldExtractValueFromUnderscoreIdField() { + @Test // DATAMONGO-1050 + void annotatedIdFieldShouldExtractValueFromUnderscoreIdField() { - DBObject dbo = new BasicDBObjectBuilder().add("_id", "A").add("id", "B").get(); + org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); - ClassWithAnnotatedIdField withAnnotatedIdField = converter.read(ClassWithAnnotatedIdField.class, dbo); + ClassWithAnnotatedIdField withAnnotatedIdField = converter.read(ClassWithAnnotatedIdField.class, document); - assertThat(withAnnotatedIdField.key, is("A")); + assertThat(withAnnotatedIdField.key).isEqualTo("A"); } - /** - * @see DATAMONGO-1102 - */ - @Test - public void convertsJava8DateTimeTypesToDateAndBack() { + @Test // DATAMONGO-1102 + void convertsJava8DateTimeTypesToDateAndBack() { TypeWithLocalDateTime source = new TypeWithLocalDateTime(); LocalDateTime reference = source.date; - BasicDBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(source, result); - assertThat(result.get("date"), is(instanceOf(Date.class))); - assertThat(converter.read(TypeWithLocalDateTime.class, result).date, is(reference)); + assertThat(result.get("date")).isInstanceOf(Date.class); + assertThat(converter.read(TypeWithLocalDateTime.class, result).date) + .isEqualTo(reference.truncatedTo(ChronoUnit.MILLIS)); } - /** - * @see DATAMONGO-1128 - */ - @Test - public void writesOptionalsCorrectly() { + @Test // DATAMONGO-1128 + @Disabled("really we should find a solution for this") + void writesOptionalsCorrectly() { TypeWithOptional type = new TypeWithOptional(); type.localDateTime = Optional.of(LocalDateTime.now()); - DBObject result = new BasicDBObject(); + org.bson.Document result = new org.bson.Document(); converter.write(type, result); - assertThat(getAsDBObject(result, "string"), is((DBObject) new BasicDBObject())); + assertThat(getAsDocument(result, "string")).isEqualTo(new org.bson.Document()); - DBObject localDateTime = getAsDBObject(result, "localDateTime"); - assertThat(localDateTime.get("value"), is(instanceOf(Date.class))); + org.bson.Document localDateTime = getAsDocument(result, "localDateTime"); + assertThat(localDateTime.get("value")).isInstanceOf(Date.class); } - /** - * @see DATAMONGO-1128 - */ - @Test - public void readsOptionalsCorrectly() { + @Test // DATAMONGO-1128 + @Disabled("Broken by DATAMONGO-1992 - In fact, storing Optional fields seems an anti-pattern.") + void readsOptionalsCorrectly() { LocalDateTime now = LocalDateTime.now(); Date reference = Date.from(now.atZone(systemDefault()).toInstant()); - BasicDBObject optionalOfLocalDateTime = new BasicDBObject("value", reference); - DBObject result = new BasicDBObject("localDateTime", optionalOfLocalDateTime); + org.bson.Document optionalOfLocalDateTime = new org.bson.Document("value", reference); + org.bson.Document result = new org.bson.Document("localDateTime", optionalOfLocalDateTime); TypeWithOptional read = converter.read(TypeWithOptional.class, result); - assertThat(read.string, is(Optional. empty())); - assertThat(read.localDateTime, is(Optional.of(now))); + assertThat(read.string).isEmpty(); + assertThat(read.localDateTime).isEqualTo(Optional.of(now)); } - /** - * @see DATAMONGO-1118 - */ - @Test - @SuppressWarnings("unchecked") - public void convertsMapKeyUsingCustomConverterForAndBackwards() { + @Test // DATAMONGO-1118 + void convertsMapKeyUsingCustomConverterForAndBackwards() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); - converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter(), - new StringToFooNumConverter()))); + converter.setCustomConversions( + new MongoCustomConversions(Arrays.asList(new FooBarEnumToStringConverter(), new StringToFooNumConverter()))); converter.afterPropertiesSet(); ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey(); source.map = new HashMap(); source.map.put(FooBarEnum.FOO, "wohoo"); - DBObject target = new BasicDBObject(); + org.bson.Document target = new org.bson.Document(); converter.write(source, target); - assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map, is(source.map)); + assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map).isEqualTo(source.map); } - /** - * @see DATAMONGO-1118 - */ - @Test - public void writesMapKeyUsingCustomConverter() { + @Test // DATAMONGO-1118 + void writesMapKeyUsingCustomConverter() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); - converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter()))); + converter.setCustomConversions(new MongoCustomConversions(Arrays.asList(new FooBarEnumToStringConverter()))); converter.afterPropertiesSet(); ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey(); @@ -2040,376 +1901,2766 @@ public void writesMapKeyUsingCustomConverter() { source.map.put(FooBarEnum.FOO, "spring"); source.map.put(FooBarEnum.BAR, "data"); - DBObject target = new BasicDBObject(); + org.bson.Document target = new org.bson.Document(); converter.write(source, target); - DBObject map = DBObjectTestUtils.getAsDBObject(target, "map"); + org.bson.Document map = DocumentTestUtils.getAsDocument(target, "map"); - assertThat(map.containsField("foo-enum-value"), is(true)); - assertThat(map.containsField("bar-enum-value"), is(true)); + assertThat(map.containsKey("foo-enum-value")).isTrue(); + assertThat(map.containsKey("bar-enum-value")).isTrue(); } - /** - * @see DATAMONGO-1118 - */ - @Test - public void readsMapKeyUsingCustomConverter() { + @Test // DATAMONGO-1118 + void readsMapKeyUsingCustomConverter() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); - converter.setCustomConversions(new CustomConversions(Arrays.asList(new StringToFooNumConverter()))); + converter.setCustomConversions(new MongoCustomConversions(Arrays.asList(new StringToFooNumConverter()))); converter.afterPropertiesSet(); - DBObject source = new BasicDBObject("map", new BasicDBObject("foo-enum-value", "spring")); + org.bson.Document source = new org.bson.Document("map", new org.bson.Document("foo-enum-value", "spring")); ClassWithMapUsingEnumAsKey target = converter.read(ClassWithMapUsingEnumAsKey.class, source); - assertThat(target.map.get(FooBarEnum.FOO), is("spring")); + assertThat(target.map.get(FooBarEnum.FOO)).isEqualTo("spring"); } - static class GenericType { - T content; + @Test // DATAMONGO-1471 + void readsDocumentWithPrimitiveIdButNoValue() { + assertThat(converter.read(ClassWithIntId.class, new org.bson.Document())).isNotNull(); } - static class ClassWithEnumProperty { + @Test // DATAMONGO-1497 + void readsPropertyFromNestedFieldCorrectly() { - SampleEnum sampleEnum; - List enums; - EnumSet enumSet; - EnumMap enumMap; + org.bson.Document source = new org.bson.Document("nested", new org.bson.Document("sample", "value")); + TypeWithPropertyInNestedField result = converter.read(TypeWithPropertyInNestedField.class, source); + + assertThat(result.sample).isEqualTo("value"); } - static enum SampleEnum { - FIRST { - @Override - void method() {} - }, - SECOND { - @Override - void method() { + @Test // DATAMONGO-1525 + void readsEmptyEnumSet() { - } - }; + org.bson.Document source = new org.bson.Document("enumSet", Collections.emptyList()); - abstract void method(); + assertThat(converter.read(ClassWithEnumProperty.class, source).enumSet).isEqualTo(EnumSet.noneOf(SampleEnum.class)); } - static interface InterfaceType { + @Test // DATAMONGO-1757 + void failsReadingDocumentIntoSimpleType() { - } + org.bson.Document nested = new org.bson.Document("key", "value"); + org.bson.Document source = new org.bson.Document("map", new org.bson.Document("key", nested)); - static class Address implements InterfaceType { - String street; - String city; + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> converter.read(TypeWithMapOfLongValues.class, source)); } - interface Contact { + @Test // DATAMONGO-1831 + void shouldConvertArrayInConstructorCorrectly() { + org.bson.Document source = new org.bson.Document("array", Collections.emptyList()); + + assertThat(converter.read(WithArrayInConstructor.class, source).array).isEmpty(); } - static class Person implements Contact { + @Test // DATAMONGO-1831 + void shouldConvertNullForArrayInConstructorCorrectly() { - @Id String id; + org.bson.Document source = new org.bson.Document(); - LocalDate birthDate; + assertThat(converter.read(WithArrayInConstructor.class, source).array).isNull(); + } - @Field("foo") String firstname; - String lastname; + @Test // DATAMONGO-1898 + void writesInterfaceBackedEnumsToSimpleNameByDefault() { - Set
                    addresses; + org.bson.Document document = new org.bson.Document(); - public Person() { + DocWithInterfacedEnum source = new DocWithInterfacedEnum(); + source.property = InterfacedEnum.INSTANCE; - } + converter.write(source, document); - @PersistenceConstructor - public Person(Set
                    addresses) { - this.addresses = addresses; - } + assertThat(document) // + .hasSize(2) // + .hasEntrySatisfying("_class", __ -> {}) // + .hasEntrySatisfying("property", value -> InterfacedEnum.INSTANCE.name().equals(value)); } - static class ClassWithSortedMap { - SortedMap map; - } + @Test // DATAMONGO-1898 + void rejectsConversionFromStringToEnumBackedInterface() { - static class ClassWithMapProperty { - Map map; - Map> mapOfLists; - Map mapOfObjects; - Map mapOfStrings; - Map mapOfPersons; - TreeMap treeMapOfPersons; - } + org.bson.Document document = new org.bson.Document("property", InterfacedEnum.INSTANCE.name()); - static class ClassWithNestedMaps { - Map>> nestedMaps; + assertThatExceptionOfType(ConverterNotFoundException.class) // + .isThrownBy(() -> converter.read(DocWithInterfacedEnum.class, document)); } - static class BirthDateContainer { - LocalDate birthDate; - } + @Test // DATAMONGO-1898 + void readsInterfacedEnumIfConverterIsRegistered() { - static class BigDecimalContainer { - BigDecimal value; - Map map; - List collection; - } + org.bson.Document document = new org.bson.Document("property", InterfacedEnum.INSTANCE.name()); - static class CollectionWrapper { - List contacts; - List> strings; - List> listOfMaps; - Set contactsSet; - } + Converter enumConverter = new Converter() { - static class LocaleWrapper { - Locale locale; - } + @Override + public SomeInterface convert(String source) { + return InterfacedEnum.valueOf(source); + } + }; - static class ClassWithBigIntegerId { - @Id BigInteger id; + converter.setCustomConversions(new MongoCustomConversions(Collections.singletonList(enumConverter))); + converter.afterPropertiesSet(); + + DocWithInterfacedEnum result = converter.read(DocWithInterfacedEnum.class, document); + + assertThat(result.property).isEqualTo(InterfacedEnum.INSTANCE); } - static class A { + @Test // DATAMONGO-1904 + void readsNestedArraysCorrectly() { - String valueType; - T value; + List>> floats = Collections.singletonList(Collections.singletonList(Arrays.asList(1.0f, 2.0f))); - public A(T value) { - this.valueType = value.getClass().getName(); - this.value = value; - } - } + org.bson.Document document = new org.bson.Document("nestedFloats", floats); - static class ClassWithIntId { + WithNestedLists result = converter.read(WithNestedLists.class, document); - @Id int id; + assertThat(result.nestedFloats).hasDimensions(1, 1).isEqualTo(new float[][][] { { { 1.0f, 2.0f } } }); } - static class DefaultedConstructorArgument { + @Test // DATAMONGO-1992 + void readsImmutableObjectCorrectly() { - String foo; - int bar; - double foobar; + org.bson.Document document = new org.bson.Document("_id", "foo"); - DefaultedConstructorArgument(String foo, @Value("#root.something ?: -1") int bar, double foobar) { - this.foo = foo; - this.bar = bar; - this.foobar = foobar; - } - } + ImmutableObject result = converter.read(ImmutableObject.class, document); - static class Item { - List attributes; + assertThat(result.id).isEqualTo("foo"); + assertThat(result.witherUsed).isTrue(); } - static class Attribute { - String key; - Object value; - } + @Test // DATAMONGO-2026 + void readsImmutableObjectWithConstructorIdPropertyCorrectly() { - static class Outer { + org.bson.Document source = new org.bson.Document("_id", "spring").append("value", "data"); - class Inner { - String value; - } + ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod target = converter + .read(ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod.class, source); - Inner inner; + assertThat(target.id).isEqualTo("spring"); + assertThat(target.value).isEqualTo("data"); } - static class DBRefWrapper { + @Test // DATAMONGO-2011 + void readsNestedListsToObjectCorrectly() { - DBRef ref; - List refs; - Map refMap; - Map personMap; - } + List values = Arrays.asList("ONE", "TWO"); + org.bson.Document source = new org.bson.Document("value", Collections.singletonList(values)); - static class URLWrapper { - URL url; + assertThat(converter.read(Attribute.class, source).value).isInstanceOf(List.class); } - static class ClassWithComplexId { + @Test // DATAMONGO-2043 + void omitsTypeHintWhenWritingSimpleTypes() { - @Id ComplexId complexId; - } + org.bson.Document target = new org.bson.Document(); + converter.write(new org.bson.Document("value", "FitzChivalry"), target); - static class ComplexId { - Long innerId; + assertThat(target).doesNotContainKeys("_class"); } - static class TypWithCollectionConstructor { - - List attributes; + @Test // DATAMONGO-1798 + void convertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObjectId() { - public TypWithCollectionConstructor(List attributes) { - this.attributes = attributes; - } + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), ObjectId.class)).isEqualTo(source); } - @TypeAlias("_") - static class Aliased { - String name; + @Test // DATAMONGO-1798 + void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsString() { + + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), String.class)).isEqualTo(source.toHexString()); } - static class ThrowableWrapper { + @Test // DATAMONGO-1798 + void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObject() { - Throwable throwable; + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), Object.class)).isEqualTo(source.toHexString()); + } + + @Test // DATAMONGO-2135 + void addsEqualObjectsToCollection() { + + org.bson.Document itemDocument = new org.bson.Document("itemKey", "123"); + org.bson.Document orderDocument = new org.bson.Document("items", + Arrays.asList(itemDocument, itemDocument, itemDocument)); + + Order order = converter.read(Order.class, orderDocument); + + assertThat(order.items).hasSize(3); + } + + @Test // DATAMONGO-1849 + void mapsValueToExplicitTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.script = "if (a > b) a else b"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("script")).isEqualTo(new Code(source.script)); + } + + @Test // DATAMONGO-2328 + void readsScriptAsStringWhenAnnotatedWithFieldTargetType() { + + String reference = "if (a > b) a else b"; + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("script", new Code(reference))); + + assertThat(target.script).isEqualTo(reference); + } + + @Test // DATAMONGO-1849 + void mapsCollectionValueToExplicitTargetType() { + + String script = "if (a > b) a else b"; + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.scripts = Collections.singletonList(script); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("scripts", List.class)).containsExactly(new Code(script)); + } + + @Test // DATAMONGO-1849 + void mapsBigDecimalToDecimal128WhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.bigDecimal = BigDecimal.valueOf(3.14159D); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("bigDecimal")).isEqualTo(new Decimal128(source.bigDecimal)); + } + + @Test // DATAMONGO-2328 + void mapsDateToLongWhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.dateAsLong = new Date(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("dateAsLong")).isEqualTo(source.dateAsLong.getTime()); + } + + @Test // DATAMONGO-2328 + void readsLongAsDateWhenAnnotatedWithFieldTargetType() { + + Date reference = new Date(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("dateAsLong", reference.getTime())); + + assertThat(target.dateAsLong).isEqualTo(reference); + } + + @Test // DATAMONGO-2328 + void mapsLongToDateWhenAnnotatedWithFieldTargetType() { + + Date date = new Date(); + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.longAsDate = date.getTime(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("longAsDate")).isEqualTo(date); + } + + @Test // DATAMONGO-2328 + void readsDateAsLongWhenAnnotatedWithFieldTargetType() { + + Date reference = new Date(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("longAsDate", reference)); + + assertThat(target.longAsDate).isEqualTo(reference.getTime()); + } + + @Test // DATAMONGO-2328 + void mapsStringAsBooleanWhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.stringAsBoolean = "true"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("stringAsBoolean")).isEqualTo(true); + } + + @Test // DATAMONGO-2328 + void readsBooleanAsStringWhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("stringAsBoolean", true)); + + assertThat(target.stringAsBoolean).isEqualTo("true"); + } + + @Test // DATAMONGO-2328 + void mapsDateAsObjectIdWhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.dateAsObjectId = new Date(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + // need to compare the the timestamp as ObjectId has an internal counter + assertThat(target.get("dateAsObjectId", ObjectId.class).getTimestamp()) + .isEqualTo(new ObjectId(source.dateAsObjectId).getTimestamp()); + } + + @Test // DATAMONGO-2328 + void readsObjectIdAsDateWhenAnnotatedWithFieldTargetType() { + + ObjectId reference = new ObjectId(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("dateAsObjectId", reference)); + + assertThat(target.dateAsObjectId).isEqualTo(new Date(reference.getTimestamp())); + } + + @Test // DATAMONGO-2410 + void shouldAllowReadingBackDbObject() { + + assertThat(converter.read(BasicDBObject.class, new org.bson.Document("property", "value"))) + .isEqualTo(new BasicDBObject("property", "value")); + assertThat(converter.read(DBObject.class, new org.bson.Document("property", "value"))) + .isEqualTo(new BasicDBObject("property", "value")); + } + + @Test // DATAMONGO-2479 + void entityCallbacksAreNotSetByDefault() { + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2479 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + converter.setApplicationContext(ctx); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2479 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + converter.setApplicationContext(ctx); + + EntityCallbacks callbacks = EntityCallbacks.create(); + converter.setEntityCallbacks(callbacks); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2479 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + EntityCallbacks callbacks = EntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + converter.setEntityCallbacks(callbacks); + converter.setApplicationContext(ctx); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2479 + void resolveDBRefMapValueShouldInvokeCallbacks() { + + AfterConvertCallback afterConvertCallback = spy(new ReturningAfterConvertCallback()); + converter.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + when(resolver.fetch(Mockito.any(DBRef.class))).thenReturn(new org.bson.Document()); + DBRef dbRef = mock(DBRef.class); + + org.bson.Document refMap = new org.bson.Document("foo", dbRef); + org.bson.Document document = new org.bson.Document("personMap", refMap); + + DBRefWrapper result = converter.read(DBRefWrapper.class, document); + + verify(afterConvertCallback).onAfterConvert(eq(result.personMap.get("foo")), eq(new org.bson.Document()), any()); + } + + @Test // DATAMONGO-2300 + void readAndConvertDBRefNestedByMapCorrectly() { + + org.bson.Document cluster = new org.bson.Document("_id", 100L); + DBRef dbRef = new DBRef("clusters", 100L); + + org.bson.Document data = new org.bson.Document("_id", 3L); + data.append("cluster", dbRef); + + MappingMongoConverter spyConverter = spy(converter); + Mockito.doReturn(cluster).when(spyConverter).readRef(dbRef); + + Map result = spyConverter.readMap(spyConverter.getConversionContext(ObjectPath.ROOT), data, + ClassTypeInformation.MAP); + + assertThat(((Map) result.get("cluster")).get("_id")).isEqualTo(100L); + } + + @Test // GH-3546 + void readFlattensNestedDocumentToStringIfNecessary() { + + org.bson.Document source = new org.bson.Document("s", + new org.bson.Document("json", "string").append("_id", UUID.randomUUID())); + + Address target = converter.read(Address.class, source); + assertThat(target.street).isNotNull(); + } + + @Test // DATAMONGO-1902 + void writeFlattensUnwrappedType() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.transientValue = "must-not-be-written"; + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("_id", "id-1") // + .containsEntry("stringValue", "string-val") // + .containsEntry("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .containsEntry("with-at-field-annotation", "@Field") // + .doesNotContainKey("embeddableValue") // + .doesNotContainKey("transientValue"); + } + + @Test // DATAMONGO-1902 + void writePrefixesUnwrappedType() { + + WithPrefixedNullableUnwrapped source = new WithPrefixedNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.transientValue = "must-not-be-written"; + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("_id", "id-1") // + .containsEntry("prefix-stringValue", "string-val") // + .containsEntry("prefix-listValue", Arrays.asList("list-val-1", "list-val-2")) // + .containsEntry("prefix-with-at-field-annotation", "@Field") // + .doesNotContainKey("embeddableValue") // + .doesNotContainKey("transientValue") // + .doesNotContainKey("prefix-transientValue"); + } + + @Test // DATAMONGO-1902 + void writeNullUnwrappedType() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = null; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target) // + .doesNotContainKey("prefix-stringValue").doesNotContainKey("prefix-listValue") + .doesNotContainKey("embeddableValue"); + } + + @Test // DATAMONGO-1902 + void writeDeepNestedUnwrappedType() { + + WrapperAroundWithUnwrapped source = new WrapperAroundWithUnwrapped(); + source.someValue = "root-level-value"; + source.nullableEmbedded = new WithNullableUnwrapped(); + source.nullableEmbedded.id = "id-1"; + source.nullableEmbedded.embeddableValue = new EmbeddableType(); + source.nullableEmbedded.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.nullableEmbedded.embeddableValue.stringValue = "string-val"; + source.nullableEmbedded.embeddableValue.transientValue = "must-not-be-written"; + source.nullableEmbedded.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("someValue", "root-level-value") // + .containsEntry("nullableEmbedded", new org.bson.Document("_id", "id-1").append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field")); // + } + + @Test // DATAMONGO-1902 + void readUnwrappedType() { + + org.bson.Document source = new org.bson.Document("_id", "id-1") // + .append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field"); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readPrefixedUnwrappedType() { + + org.bson.Document source = new org.bson.Document("_id", "id-1") // + .append("prefix-stringValue", "string-val") // + .append("prefix-listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("prefix-with-at-field-annotation", "@Field"); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + WithPrefixedNullableUnwrapped target = converter.read(WithPrefixedNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readNullableUnwrappedTypeWhenSourceDoesNotContainValues() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isNull(); + } + + @Test // DATAMONGO-1902 + void readEmptyUnwrappedTypeWhenSourceDoesNotContainValues() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithEmptyUnwrappedType target = converter.read(WithEmptyUnwrappedType.class, source); + assertThat(target.embeddableValue).isNotNull(); + } + + @Test // DATAMONGO-1902 + void readDeepNestedUnwrappedType() { + + org.bson.Document source = new org.bson.Document("someValue", "root-level-value").append("nullableEmbedded", + new org.bson.Document("_id", "id-1").append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field")); + + WrapperAroundWithUnwrapped target = converter.read(WrapperAroundWithUnwrapped.class, source); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + assertThat(target.someValue).isEqualTo("root-level-value"); + assertThat(target.nullableEmbedded).isNotNull(); + assertThat(target.nullableEmbedded.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readUnwrappedTypeWithComplexValue() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("address", + new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")); + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + + Address expected = new Address(); + expected.city = "Gotham"; + expected.street = "1007 Mountain Drive"; + + assertThat(target.embeddableValue.address) // + .isEqualTo(expected); + } + + @Test // GH-4491 + void readUnwrappedTypeWithComplexValueUsingConstructor() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("stringValue", "hello").append("address", + new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")); + + WithUnwrappedConstructor target = converter.read(WithUnwrappedConstructor.class, source); + + Address expected = new Address(); + expected.city = "Gotham"; + expected.street = "1007 Mountain Drive"; + + assertThat(target.embeddableValue.stringValue) // + .isEqualTo("hello"); + assertThat(target.embeddableValue.address) // + .isEqualTo(expected); + } + + @Test // GH-4491 + void readUnwrappedTypeWithComplexValueUsingConstructorWhenUnwrappedPropertiesNotPresent() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithUnwrappedConstructor target = converter.read(WithUnwrappedConstructor.class, source); + + assertThat(target.id).isEqualTo("id-1"); + assertThat(target.embeddableValue).isNotNull(); // it's defined as Empty + assertThat(target.embeddableValue.stringValue) // + .isNull(); + assertThat(target.embeddableValue.address) // + .isNull(); + } + + @Test // DATAMONGO-1902 + void writeUnwrappedTypeWithComplexValue() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.address = new Address(); + source.embeddableValue.address.city = "Gotham"; + source.embeddableValue.address.street = "1007 Mountain Drive"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target) // + .containsEntry("address", new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")) // + .doesNotContainKey("street") // + .doesNotContainKey("city"); // + + // use exact key matching, do not dive into nested documents + Assertions.assertThat(target) // + .doesNotContainKey("address.s") // + .doesNotContainKey("address.city"); + } + + @Test // GH-3580 + void shouldFallbackToConfiguredCustomConversionTargetOnRead() { + + GenericTypeConverter genericTypeConverter = spy(new GenericTypeConverter()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(genericTypeConverter); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("_class", SubTypeOfGenericType.class.getName()).append("value", + "v1"); + GenericType target = converter.read(GenericType.class, source); + + assertThat(target).isInstanceOf(GenericType.class); + assertThat(target.content).isEqualTo("v1"); + + verify(genericTypeConverter).convert(eq(source)); + } + + @Test // GH-3580 + void shouldUseMostConcreteCustomConversionTargetOnRead() { + + GenericTypeConverter genericTypeConverter = spy(new GenericTypeConverter()); + SubTypeOfGenericTypeConverter subTypeOfGenericTypeConverter = spy(new SubTypeOfGenericTypeConverter()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(genericTypeConverter); + it.registerConverter(subTypeOfGenericTypeConverter); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("_class", SubTypeOfGenericType.class.getName()).append("value", + "v1"); + GenericType target = converter.read(GenericType.class, source); + + assertThat(target).isInstanceOf(SubTypeOfGenericType.class); + assertThat(target.content).isEqualTo("v1_s"); + + verify(genericTypeConverter, never()).convert(any()); + verify(subTypeOfGenericTypeConverter).convert(eq(source)); + } + + @Test // GH-3660 + void usesCustomConverterForMapTypesOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("1st", "one").append("2nd", 2).append("_class", + TypeImplementingMap.class.getName()); + + TypeImplementingMap target = converter.read(TypeImplementingMap.class, source); + + assertThat(target).isEqualTo(new TypeImplementingMap("one", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesThatImplementMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeWrappingTypeImplementingMap source = new TypeWrappingTypeImplementingMap(); + source.typeImplementingMap = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("typeImplementingMap", new org.bson.Document("1st", "one").append("2nd", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("typeImplementingMap", + new org.bson.Document("1st", "one").append("2nd", 2)) + .append("_class", TypeWrappingTypeImplementingMap.class.getName()); + + TypeWrappingTypeImplementingMap target = converter.read(TypeWrappingTypeImplementingMap.class, source); + + assertThat(target.typeImplementingMap).isEqualTo(new TypeImplementingMap("one", 2)); + } + + @Test // GH-3407 + void shouldWriteNullPropertyCorrectly() { + + WithFieldWrite fieldWrite = new WithFieldWrite(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlways", null).doesNotContainKey("writeNonNull"); + assertThat(document).containsEntry("writeAlwaysPersonDBRef", null).doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldWriteSimplePropertyCorrectlyAfterConversionReturnsNull() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Integer.class, String.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlways = 10; + fieldWrite.writeNonNull = 20; + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlways", null).doesNotContainKey("writeNonNull"); + } + + @Test // GH-4710 + void shouldWriteComplexPropertyCorrectlyAfterConversionReturnsNull() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Person.class, String.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPerson = new Person(); + fieldWrite.writeNonNullPerson = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPerson", null).doesNotContainKey("writeNonNullPerson"); + } + + @Test // GH-4710 + void shouldDelegateWriteOfDBRefToCustomConversionIfConfigured() { + + MongoCustomConversions conversions = new MongoCustomConversions( + ConverterBuilder.writing(Person.class, DBRef.class, it -> new DBRef("persons", "n/a")).andReading(it -> null) + .getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPersonDBRef = new Person(); + fieldWrite.writeNonNullPersonDBRef = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPersonDBRef", new DBRef("persons", "n/a"));// .doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldDelegateWriteOfDBRefToCustomConversionIfConfiguredAndCheckNulls() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Person.class, DBRef.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPersonDBRef = new Person(); + fieldWrite.writeNonNullPersonDBRef = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPersonDBRef", null).doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldApplyNullConversionToPropertyValueConverters() { + + MongoCustomConversions conversions = new MongoCustomConversions( + MongoCustomConversions.MongoConverterConfigurationAdapter.from(Collections.emptyList()) + .configurePropertyConversions(registrar -> { + registrar.registerConverter(Person.class, "firstname", new MongoValueConverter() { + @Override + public String readNull(MongoConversionContext context) { + return "NULL"; + } + + @Override + public String writeNull(MongoConversionContext context) { + return "NULL"; + } + + @Override + public String read(String value, MongoConversionContext context) { + return ""; + } + + @Override + public String write(String value, MongoConversionContext context) { + return ""; + } + }); + })); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + org.bson.Document document = new org.bson.Document(); + converter.write(new Person(), document); + + assertThat(document).containsEntry("foo", "NULL"); + + document = new org.bson.Document("foo", null); + Person result = converter.read(Person.class, document); + + assertThat(result.firstname).isEqualTo("NULL"); + } + + @Test // GH-3686 + void readsCollectionContainingNullValue() { + + org.bson.Document source = new org.bson.Document("items", + Arrays.asList(new org.bson.Document("itemKey", "i1"), null, new org.bson.Document("itemKey", "i3"))); + + Order target = converter.read(Order.class, source); + + assertThat(target.items).map(it -> it != null ? it.itemKey : null).containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsArrayContainingNullValue() { + + org.bson.Document source = new org.bson.Document("arrayOfStrings", Arrays.asList("i1", null, "i3")); + + WithArrays target = converter.read(WithArrays.class, source); + + assertThat(target.arrayOfStrings).containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsMapContainingNullValue() { + + org.bson.Document source = new org.bson.Document("mapOfObjects", + new org.bson.Document("item1", "i1").append("item2", null).append("item3", "i3")); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("item1", "i1").containsEntry("item2", null).containsEntry("item3", + "i3"); + } + + @Test // GH-3670 + void appliesCustomConverterEvenToSimpleTypes() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new MongoSimpleTypeConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("content", new Binary(new byte[] { 0x00, 0x42 })); + + GenericType target = converter.read(GenericType.class, source); + assertThat(target.content).isInstanceOf(byte[].class); + } + + @Test // GH-3702 + void readsRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("raw", + new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.raw).isInstanceOf(org.bson.Document.class) + .isEqualTo(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3702 + void readsListOfRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("listOfRaw", + Arrays.asList(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1)))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.listOfRaw) + .containsExactly(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3692 + void readsMapThatDoesNotComeAsDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("mapOfObjects", + Collections.singletonMap("simple", 1)); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("simple", 1); + } + + @Test // GH-3851 + void associationMappingShouldFallBackToDefaultIfNoAtReferenceAnnotationPresent/* as done via jmolecules */() { + + UUID id = UUID.randomUUID(); + Person sourceValue = new Person(); + sourceValue.id = id.toString(); + + DocumentAccessor accessor = new DocumentAccessor(new org.bson.Document()); + MongoPersistentProperty persistentProperty = mock(MongoPersistentProperty.class); + when(persistentProperty.isAssociation()).thenReturn(true); + when(persistentProperty.getMongoField()).thenReturn(MongoField.fromKey("pName")); + doReturn(TypeInformation.of(Person.class)).when(persistentProperty).getTypeInformation(); + doReturn(Person.class).when(persistentProperty).getType(); + doReturn(Person.class).when(persistentProperty).getRawType(); + + converter.writePropertyInternal(sourceValue, accessor, persistentProperty, null); + + assertThat(accessor.getDocument()) + .isEqualTo(new org.bson.Document("pName", new org.bson.Document("_id", id.toString()))); + } + + @Test // GH-2860 + void projectShouldReadSimpleInterfaceProjection() { + + org.bson.Document source = new org.bson.Document("birthDate", + Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter"); + + EntityProjectionIntrospector discoverer = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = discoverer.introspect(PersonProjection.class, Person.class); + PersonProjection person = converter.project(projection, source); + + assertThat(person.getBirthDate()).isEqualTo(LocalDate.of(1999, 12, 1)); + assertThat(person.getFirstname()).isEqualTo("Walter"); + } + + @Test // GH-2860 + void projectShouldReadSimpleDtoProjection() { + + org.bson.Document source = new org.bson.Document("birthDate", + Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter"); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(PersonDto.class, Person.class); + PersonDto person = converter.project(projection, source); + + assertThat(person.getBirthDate()).isEqualTo(LocalDate.of(1999, 12, 1)); + assertThat(person.getFirstname()).isEqualTo("Walter"); + } + + @Test // GH-2860 + void projectShouldReadNestedProjection() { + + org.bson.Document source = new org.bson.Document("addresses", + Collections.singletonList(new org.bson.Document("s", "hwy"))); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(WithNestedProjection.class, + Person.class); + WithNestedProjection person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(AddressProjection::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-4609 + void projectShouldReadNestedInterfaceProjection() { + + org.bson.Document source = new org.bson.Document("foo", "spring").append("address", + new org.bson.Document("s", "data").append("city", "mongodb")); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(WithNestedInterfaceProjection.class, Person.class); + WithNestedInterfaceProjection person = converter.project(projection, source); + + assertThat(person.getFirstname()).isEqualTo("spring"); + assertThat(person.getAddress().getStreet()).isEqualTo("data"); + } + + @Test // GH-4609 + void projectShouldReadNestedDtoProjection() { + + org.bson.Document source = new org.bson.Document("foo", "spring").append("address", + new org.bson.Document("s", "data").append("city", "mongodb")); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(WithNestedDtoProjection.class, Person.class); + WithNestedDtoProjection person = converter.project(projection, source); + + assertThat(person.getFirstname()).isEqualTo("spring"); + assertThat(person.getAddress().getStreet()).isEqualTo("data"); + } + + @Test // GH-4626 + void projectShouldReadDtoProjectionPropertiesOnlyOnce() { + + ByteBuffer number = ByteBuffer.allocate(8); + number.putDouble(1.2d); + number.flip(); + + org.bson.Document source = new org.bson.Document("number", number); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(DoubleHolderDto.class, + WithDoubleHolder.class); + DoubleHolderDto result = converter.project(projection, source); + + assertThat(result.number.number).isCloseTo(1.2, Percentage.withPercentage(1)); + } + + @Test // GH-2860 + void projectShouldReadProjectionWithNestedEntity() { + + org.bson.Document source = new org.bson.Document("addresses", + Collections.singletonList(new org.bson.Document("s", "hwy"))); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(ProjectionWithNestedEntity.class, Person.class); + ProjectionWithNestedEntity person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(Address::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-3998 + void shouldReadOpenProjection() { + + org.bson.Document author = new org.bson.Document("firstName", "Walter").append("lastName", "White"); + org.bson.Document book = new org.bson.Document("_id", "foo").append("name", "my-book").append("author", author); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + BookProjection projection = converter.project(introspector.introspect(BookProjection.class, Book.class), book); + + assertThat(projection.getName()).isEqualTo("my-book by Walter White"); + } + + @Test // GH-4120 + void shouldReadDtoProjection() { + + org.bson.Document author = new org.bson.Document("firstName", "Walter").append("lastName", "White"); + org.bson.Document book = new org.bson.Document("_id", "foo").append("name", "my-book").append("author", author); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + AuthorOnly projection = converter.project(introspector.introspect(AuthorOnly.class, Book.class), book); + + assertThat(projection.getAuthor().getFirstName()).isEqualTo("Walter"); + assertThat(projection.getAuthor().getLastName()).isEqualTo("White"); + } + + @Test // GH-3596 + void simpleConverter() { + + WithValueConverters wvc = new WithValueConverters(); + wvc.converterWithDefaultCtor = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("converterWithDefaultCtor", new org.bson.Document("foo", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.converterWithDefaultCtor).startsWith("spring"); + } + + @Test // GH-3596 + void enumConverter() { + + WithValueConverters wvc = new WithValueConverters(); + wvc.converterEnum = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("converterEnum", new org.bson.Document("bar", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.converterEnum).isEqualTo("spring"); + } + + @Test // GH-3596 + void beanConverter() { + + DefaultListableBeanFactory defaultListableBeanFactory = new DefaultListableBeanFactory(); + defaultListableBeanFactory.registerBeanDefinition("someDependency", + BeanDefinitionBuilder.rootBeanDefinition(SomeDependency.class).getBeanDefinition()); + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerPropertyValueConverterFactory( + PropertyValueConverterFactory.beanFactoryAware(defaultListableBeanFactory)); + })); + converter.afterPropertiesSet(); + + WithContextValueConverters wvc = new WithContextValueConverters(); + wvc.converterBean = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target.get("converterBean", org.bson.Document.class)).satisfies(it -> { + assertThat(it).containsKey("ooo"); + assertThat((String) it.get("ooo")).startsWith("spring - "); + }); + + WithContextValueConverters read = converter.read(WithContextValueConverters.class, target); + assertThat(read.converterBean).startsWith("spring -"); + } + + @Test // GH-3596 + void pathConfiguredConverter/*no annotation required*/() { + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions.create(it -> { + + it.configurePropertyConversions(registrar -> { + registrar.registerConverter(WithValueConverters.class, "viaRegisteredConverter", + new PropertyValueConverter() { + + @Nullable + @Override + public String read(@Nullable org.bson.Document nativeValue, MongoConversionContext context) { + return nativeValue.getString("bar"); + } + + @Nullable + @Override + public org.bson.Document write(@Nullable String domainValue, MongoConversionContext context) { + return new org.bson.Document("bar", domainValue); + } + }); + }); + })); + + WithValueConverters wvc = new WithValueConverters(); + wvc.viaRegisteredConverter = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("viaRegisteredConverter", new org.bson.Document("bar", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.viaRegisteredConverter).isEqualTo("spring"); + } + + @Test // GH-4098 + void resolvesCyclicNonAssociationValueFromSource/* and does not attempt to be smart and look up id values in context */() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("value", "v1").append("cycle", + new org.bson.Document("_id", "id-1").append("value", "v2")); + + assertThat(converter.read(Cyclic.class, source).cycle.value).isEqualTo("v2"); + } + + @Test // GH-4371 + void shouldConvertTypesToStringTargetType() { + + org.bson.Document source = org.bson.Document.parse(""" + { + city : ["Gotham", "Metropolis"] + } + """); + + assertThat(converter.read(Address.class, source).city).isEqualTo("Gotham,Metropolis"); + } + + @Test // GH-2350 + void shouldConvertBsonUndefinedToNull() { + + org.bson.Document source = new org.bson.Document("s", "hallway drive").append("city", new BsonUndefined()); + assertThat(converter.read(Address.class, source).city).isNull(); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnWriteIfFieldTypeIsKey() { + + WithPropertyHavingDotsInFieldName source = new WithPropertyHavingDotsInFieldName(); + source.value = "A"; + + assertThat(write(source)).containsEntry("field.name.with.dots", "A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnReadIfFieldTypeIsKey() { + + org.bson.Document source = new org.bson.Document("field.name.with.dots", "A"); + + WithPropertyHavingDotsInFieldName target = converter.read(WithPropertyHavingDotsInFieldName.class, source); + assertThat(target.value).isEqualTo("A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnWriteOfNestedPropertyIfFieldTypeIsKey() { + + WrapperForTypeWithPropertyHavingDotsInFieldName source = new WrapperForTypeWithPropertyHavingDotsInFieldName(); + source.nested = new WithPropertyHavingDotsInFieldName(); + source.nested.value = "A"; + + assertThat(write(source).get("nested", org.bson.Document.class)).containsEntry("field.name.with.dots", "A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnReadOfNestedIfFieldTypeIsKey() { + + org.bson.Document source = new org.bson.Document("nested", new org.bson.Document("field.name.with.dots", "A")); + + WrapperForTypeWithPropertyHavingDotsInFieldName target = converter + .read(WrapperForTypeWithPropertyHavingDotsInFieldName.class, source); + assertThat(target.nested).isNotNull(); + assertThat(target.nested.value).isEqualTo("A"); + } + + @Test // GH-4464 + void writeShouldAllowDotsInMapKeyNameIfConfigured() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + Person person = new Person(); + person.firstname = "bart"; + person.lastname = "simpson"; + + ClassWithMapProperty source = new ClassWithMapProperty(); + source.mapOfPersons = Map.of("map.key.with.dots", person); + + assertThat(write(source).get("mapOfPersons", org.bson.Document.class)).containsKey("map.key.with.dots"); + } + + @Test // GH-4464 + void readShouldAllowDotsInMapKeyNameIfConfigured() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + Person person = new Person(); + person.firstname = "bart"; + person.lastname = "simpson"; + + org.bson.Document source = new org.bson.Document("mapOfPersons", + new org.bson.Document("map.key.with.dots", write(person))); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfPersons).containsEntry("map.key.with.dots", person); + } + + @ValueSource(classes = { ComplexIdAndNoAnnotation.class, ComplexIdAndIdAnnotation.class, + ComplexIdAndMongoIdAnnotation.class, ComplexIdAndFieldAnnotation.class }) + @ParameterizedTest // GH-4524 + void projectShouldReadComplexIdType(Class projectionTargetType) { + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + ComplexId idValue = ComplexId.of(101L); + org.bson.Document source = new org.bson.Document("_id", new org.bson.Document("innerId", idValue.innerId)) + .append("value", "abc").append("_class", ComplexIdAndNoAnnotation.class.getName()); + + EntityProjection projection = introspector.introspect(projectionTargetType, + ComplexIdAndNoAnnotation.class); + + assertThat(converter.project(projection, source)) // + .isInstanceOf(projectionTargetType) // + .extracting("id").isEqualTo(idValue); + } + + @Test // GH-4877 + void shouldReadNonIdFieldCalledIdFromSource() { + + WithRenamedIdPropertyAndAnotherPropertyNamedId source = new WithRenamedIdPropertyAndAnotherPropertyNamedId(); + source.abc = "actual-id-value"; + source.id = "just-a-field"; + + org.bson.Document document = write(source); + assertThat(document).containsEntry("_id", source.abc).containsEntry("id", source.id); + + WithRenamedIdPropertyAndAnotherPropertyNamedId target = converter + .read(WithRenamedIdPropertyAndAnotherPropertyNamedId.class, document); + assertThat(target.abc).isEqualTo(source.abc); + assertThat(target.id).isEqualTo(source.id); + } + + @Test // GH-4706 + void shouldWriteVectorValues() { + + WithVector source = new WithVector(); + source.embeddings = Vector.of(1.1d, 2.2d, 3.3d); + + org.bson.Document document = write(source); + assertThat(document.getList("embeddings", BsonDouble.class)).hasSize(3); + } + + @Test // GH-4706 + void shouldReadVectorValues() { + + org.bson.Document document = new org.bson.Document("embeddings", List.of(1.1d, 2.2d, 3.3d)); + WithVector withVector = converter.read(WithVector.class, document); + assertThat(withVector.embeddings.toDoubleArray()).contains(1.1d, 2.2d, 3.3d); + } + + @Test // GH-4706 + void writesByteArrayAsIsIfNoFieldInstructionsGiven() { + + WithArrays source = new WithArrays(); + source.arrayOfPrimitiveBytes = new byte[] { 0, 1, 2 }; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("arrayOfPrimitiveBytes", byte[].class)).isSameAs(source.arrayOfPrimitiveBytes); + } + + @Test // GH-3444 + void convertsBigIntegerToDecimal128IfFieldTypeIndicatesConversion() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.bigInteger = BigInteger.valueOf(101); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("bigInteger")).isEqualTo(new Decimal128(source.bigInteger.longValueExact())); + } + + @Test // GH-3444 + void usesDecimal128NumericFormat() { + + MappingMongoConverter converter = createConverter(MongoCustomConversions.BigDecimalRepresentation.DECIMAL128); + + BigDecimalContainer container = new BigDecimalContainer(); + container.value = BigDecimal.valueOf(2.5d); + container.map = Collections.singletonMap("foo", container.value); + + org.bson.Document document = new org.bson.Document(); + converter.write(container, document); + + assertThat(document.get("value")).isInstanceOf(Decimal128.class); + assertThat(((org.bson.Document) document.get("map")).get("foo")).isInstanceOf(Decimal128.class); + } + + @Test // GH-3444 + void usesStringNumericFormat() { + + MappingMongoConverter converter = createConverter(MongoCustomConversions.BigDecimalRepresentation.STRING); + + BigDecimalContainer container = new BigDecimalContainer(); + container.value = BigDecimal.valueOf(2.5d); + container.map = Collections.singletonMap("foo", container.value); + + org.bson.Document document = new org.bson.Document(); + converter.write(container, document); + + assertThat(document).containsEntry("value", "2.5"); + assertThat(document).containsEntry("map.foo", "2.5"); + } + + private MappingMongoConverter createConverter( + MongoCustomConversions.BigDecimalRepresentation bigDecimalRepresentation) { + + MongoCustomConversions conversions = MongoCustomConversions.create( + it -> it.registerConverter(new ByteBufferToDoubleHolderConverter()).bigDecimal(bigDecimalRepresentation)); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(context); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + mappingContext.getPersistentEntity(Address.class); + + MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + return converter; + } + + org.bson.Document write(Object source) { + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + return target; + } + + static class WithVector { + + Vector embeddings; + } + + static class GenericType { + T content; + } + + static class ClassWithEnumProperty { + + SampleEnum sampleEnum; + List enums; + EnumSet enumSet; + EnumMap enumMap; + } + + enum SampleEnum { + FIRST { + @Override + void method() {} + }, + SECOND { + @Override + void method() {} + }; + + abstract void method(); + } + + interface InterfaceType { + + } + + static class Address implements InterfaceType { + + @Field("s") String street; + String city; + + public String getStreet() { + return street; + } + + public String getCity() { + return city; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(street, address.street) && Objects.equals(city, address.city); + } + + @Override + public int hashCode() { + return Objects.hash(street, city); + } + } + + interface Contact { + + } + + static class Person implements Contact { + + @Id String id; + + Date birthDate; + + @Field("foo") String firstname; + String lastname; + + Set
                    addresses; + Address address; + + Person() { + + } + + @PersistenceConstructor + public Person(Set
                    addresses) { + this.addresses = addresses; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(birthDate, person.birthDate) + && Objects.equals(firstname, person.firstname) && Objects.equals(lastname, person.lastname) + && Objects.equals(addresses, person.addresses); + } + + @Override + public int hashCode() { + return Objects.hash(id, birthDate, firstname, lastname, addresses); + } + } + + interface PersonProjection { + + LocalDate getBirthDate(); + + String getFirstname(); + } + + interface WithNestedProjection { + + Set getAddresses(); + } + + interface WithNestedInterfaceProjection { + String getFirstname(); + + AddressProjection getAddress(); + } + + interface WithNestedDtoProjection { + String getFirstname(); + + AddressDto getAddress(); + } + + interface ProjectionWithNestedEntity { + + Set
                    getAddresses(); + } + + interface AddressProjection { + + String getStreet(); + } + + class AddressDto { + + String street; + + public String getStreet() { + return street; + } + + public void setStreet(String street) { + this.street = street; + } + } + + static class PersonDto { + + LocalDate birthDate; + + @Field("foo") String firstname; + String lastname; + + public PersonDto(LocalDate birthDate, String firstname, String lastname) { + this.birthDate = birthDate; + this.firstname = firstname; + this.lastname = lastname; + } + + public LocalDate getBirthDate() { + return birthDate; + } + + public String getFirstname() { + return firstname; + } + + public String getLastname() { + return lastname; + } + } + + static class ClassWithSortedMap { + SortedMap map; + } + + static class ClassWithMapProperty { + Map map; + Map autoInitMap = Collections.singletonMap("spring", "data"); + Map> mapOfLists; + Map mapOfObjects; + Map mapOfStrings; + Map mapOfPersons; + TreeMap treeMapOfPersons; + + public Map getMap() { + return map; + } + + public Map getAutoInitMap() { + return this.autoInitMap; + } + } + + static class ClassWithNestedMaps { + Map>> nestedMaps; + } + + static class BirthDateContainer { + Date birthDate; + } + + static class BigDecimalContainer { + BigDecimal value; + Map map; + List collection; + } + + static class CollectionWrapper { + List contacts; + List> strings; + List> listOfMaps; + Set contactsSet; + List autoInitList = Collections.singletonList("spring"); + + public List getContacts() { + return contacts; + } + + public Set getContactsSet() { + return contactsSet; + } + + public List getAutoInitList() { + return autoInitList; + } + } + + static class LocaleWrapper { + Locale locale; + } + + static class ClassWithBigIntegerId { + @Id BigInteger id; + } + + static class A { + + String valueType; + T value; + + A(T value) { + this.valueType = value.getClass().getName(); + this.value = value; + } + } + + static class ClassWithIntId { + + @Id int id; + } + + static class DefaultedConstructorArgument { + + String foo; + int bar; + double foobar; + + DefaultedConstructorArgument(String foo, @Value("#root.something ?: -1") int bar, double foobar) { + this.foo = foo; + this.bar = bar; + this.foobar = foobar; + } + } + + static class Item { + List attributes; + } + + static class Attribute { + String key; + Object value; + } + + static class Outer { + + class Inner { + String value; + } + + Inner inner; + } + + static class DBRefWrapper { + + DBRef ref; + List refs; + Map refMap; + Map personMap; + } + + static class URLWrapper { + URL url; + } + + static class ClassWithComplexId { + + @Id ComplexId complexId; + } + + static class ComplexId { + + Long innerId; + + static ComplexId of(Long value) { + + ComplexId id = new ComplexId(); + id.innerId = value; + return id; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComplexId complexId = (ComplexId) o; + return Objects.equals(innerId, complexId.innerId); + } + + @Override + public int hashCode() { + return Objects.hash(innerId); + } + } + + static class TypWithCollectionConstructor { + + List attributes; + + public TypWithCollectionConstructor(List attributes) { + this.attributes = attributes; + } + } + + @TypeAlias("_") + static class Aliased { + String name; + } + + static class ThrowableWrapper { + + Throwable throwable; } @Document static class PrimitiveContainer { - @Field("property") private final int m_property; + @Field("property") private int m_property; + + @PersistenceConstructor + public PrimitiveContainer(@Value("#root.property") int a_property) { + m_property = a_property; + } + + public int property() { + return m_property; + } + } + + @Document + static class ObjectContainer { + + @Field("property") private PrimitiveContainer m_property; + + @PersistenceConstructor + public ObjectContainer(@Value("#root.property") PrimitiveContainer a_property) { + m_property = a_property; + } + + public PrimitiveContainer property() { + return m_property; + } + } + + class ClassWithGeoBox { + + Box box; + } + + class ClassWithGeoCircle { + + Circle circle; + } + + class ClassWithGeoSphere { + + Sphere sphere; + } + + class ClassWithGeoPolygon { + + Polygon polygon; + } + + class ClassWithGeoShape { + + Shape shape; + } + + class ClassWithTextScoreProperty { + + @TextScore Float score; + } + + class ClassWithExplicitlyNamedDBRefProperty { + + @Field("explict-name-for-db-ref") // + @org.springframework.data.mongodb.core.mapping.DBRef // + ClassWithIntId dbRefProperty; + + public ClassWithIntId getDbRefProperty() { + return dbRefProperty; + } + } + + static class RootForClassWithExplicitlyRenamedIdField { + + @Id String id; + ClassWithExplicitlyRenamedField nested; + } + + static class ClassWithExplicitlyRenamedField { + + @Field("id") String id; + } + + static class RootForClassWithNamedIdField { + + String id; + ClassWithNamedIdField nested; + } + + static class ClassWithNamedIdField { + + String id; + } + + static class ClassWithAnnotatedIdField { + + @Id String key; + } + + static class TypeWithLocalDateTime { + + LocalDateTime date; + + TypeWithLocalDateTime() { + this.date = LocalDateTime.now(); + } + } + + static class TypeWithOptional { + + Optional string = Optional.empty(); + Optional localDateTime = Optional.empty(); + } + + static class ClassWithMapUsingEnumAsKey { + + enum FooBarEnum { + FOO, BAR + } + + Map map; + } + + @WritingConverter + static class FooBarEnumToStringConverter implements Converter { + + @Override + public String convert(FooBarEnum source) { + + if (source == null) { + return null; + } + + return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value"; + } + } + + @ReadingConverter + static class StringToFooNumConverter implements Converter { + + @Override + public FooBarEnum convert(String source) { + + if (source == null) { + return null; + } + + if ("foo-enum-value".equals(source)) { + return FooBarEnum.FOO; + } + if ("bar-enum-value".equals(source)) { + return FooBarEnum.BAR; + } + + throw new ConversionNotSupportedException(source, String.class, null); + } + } + + static class TypeWithPropertyInNestedField { + @Field("nested.sample") String sample; + } + + static class TypeWithMapOfLongValues { + Map map; + } + + static class WithArrayInConstructor { + + final String[] array; + + public WithArrayInConstructor(String[] array) { + this.array = array; + } + } + + static class WithArrays { + String[] arrayOfStrings; + byte[] arrayOfPrimitiveBytes; + } + + // DATAMONGO-1898 + + // DATACMNS-1278 + static interface SomeInterface {} + + static enum InterfacedEnum implements SomeInterface { + INSTANCE; + } + + static class DocWithInterfacedEnum { + SomeInterface property; + } + + // DATAMONGO-1904 + + static class WithNestedLists { + float[][][] nestedFloats; + } + + static class ImmutableObject { + + final String id; + final String name; + final boolean witherUsed; + + private ImmutableObject(String id) { + this.id = id; + this.name = null; + this.witherUsed = false; + } + + private ImmutableObject(String id, String name, boolean witherUsed) { + this.id = id; + this.name = name; + this.witherUsed = witherUsed; + } + + public ImmutableObject() { + this.id = null; + this.name = null; + witherUsed = false; + } + + public ImmutableObject withId(String id) { + return new ImmutableObject(id, name, true); + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public boolean isWitherUsed() { + return witherUsed; + } + } + + static class ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod { + + final @Id String id; + String value; + + public ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod(String id) { + this.id = id; + } + } + + // DATAMONGO-2135 + static class SomeItem { + + String itemKey; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeItem someItem = (SomeItem) o; + return Objects.equals(itemKey, someItem.itemKey); + } + + @Override + public int hashCode() { + return Objects.hash(itemKey); + } + } + + static class Order { + Collection items = new ArrayList<>(); + } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + + @Field(targetType = FieldType.DECIMAL128) // + BigDecimal bigDecimal; + + @Field(targetType = FieldType.DECIMAL128) + BigInteger bigInteger; + + @Field(targetType = FieldType.INT64) // + Date dateAsLong; + + @Field(targetType = FieldType.DATE_TIME) // + Long longAsDate; + + @Field(targetType = FieldType.BOOLEAN) // + String stringAsBoolean; + + @Field(targetType = FieldType.OBJECT_ID) // + Date dateAsObjectId; + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithNullableUnwrapped nullableEmbedded; + WithEmptyUnwrappedType emptyEmbedded; + WithPrefixedNullableUnwrapped prefixedEmbedded; + } + + static class WithNullableUnwrapped { + + String id; + + @Unwrapped.Nullable EmbeddableType embeddableValue; + } + + static class WithUnwrappedConstructor { + + private final String id; + + private final @Unwrapped.Empty EmbeddableType embeddableValue; + + public WithUnwrappedConstructor(String id, EmbeddableType embeddableValue) { + this.id = id; + this.embeddableValue = embeddableValue; + } + } + + static class WithPrefixedNullableUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") EmbeddableType embeddableValue; + } + + static class WithEmptyUnwrappedType { + + String id; + + @Unwrapped.Empty EmbeddableType embeddableValue; + } + + static class EmbeddableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + + Address address; + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EmbeddableType that = (EmbeddableType) o; + return Objects.equals(stringValue, that.stringValue) && Objects.equals(listValue, that.listValue) + && Objects.equals(atFieldAnnotatedValue, that.atFieldAnnotatedValue) + && Objects.equals(transientValue, that.transientValue) && Objects.equals(address, that.address); + } + + @Override + public int hashCode() { + return Objects.hash(stringValue, listValue, atFieldAnnotatedValue, transientValue, address); + } + } + + static class ReturningAfterConvertCallback implements AfterConvertCallback { + + @Override + public Person onAfterConvert(Person entity, org.bson.Document document, String collection) { - @PersistenceConstructor - public PrimitiveContainer(@Value("#root.property") int a_property) { - m_property = a_property; + return entity; } + } - public int property() { - return m_property; + static class SubTypeOfGenericType extends GenericType { + + } + + @ReadingConverter + static class GenericTypeConverter implements Converter> { + + @Override + public GenericType convert(org.bson.Document source) { + + GenericType target = new GenericType<>(); + target.content = source.get("value"); + return target; } } - @Document - static class ObjectContainer { + @ReadingConverter + static class SubTypeOfGenericTypeConverter implements Converter { - @Field("property") private final PrimitiveContainer m_property; + @Override + public SubTypeOfGenericType convert(org.bson.Document source) { - @PersistenceConstructor - public ObjectContainer(@Value("#root.property") PrimitiveContainer a_property) { - m_property = a_property; + SubTypeOfGenericType target = new SubTypeOfGenericType(); + target.content = source.getString("value") + "_s"; + return target; } + } - public PrimitiveContainer property() { - return m_property; + @WritingConverter + static class TypeImplementingMapToDocumentConverter implements Converter { + + @Nullable + @Override + public org.bson.Document convert(TypeImplementingMap source) { + return new org.bson.Document("1st", source.val1).append("2nd", source.val2); } } - class ClassWithGeoBox { + @ReadingConverter + static class DocumentToTypeImplementingMapConverter implements Converter { - Box box; + @Nullable + @Override + public TypeImplementingMap convert(org.bson.Document source) { + return new TypeImplementingMap(source.getString("1st"), source.getInteger("2nd")); + } } - class ClassWithGeoCircle { + @ReadingConverter + public static class MongoSimpleTypeConverter implements Converter { - Circle circle; + @Override + public byte[] convert(Binary source) { + return source.getData(); + } } - class ClassWithGeoSphere { + static class TypeWrappingTypeImplementingMap { - Sphere sphere; + String id; + TypeImplementingMap typeImplementingMap; } - class ClassWithGeoPolygon { + static class TypeImplementingMap implements Map { - Polygon polygon; + String val1; + int val2; + + TypeImplementingMap(String val1, int val2) { + this.val1 = val1; + this.val2 = val2; + } + + @Override + public int size() { + return 0; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public boolean containsKey(Object key) { + return false; + } + + @Override + public boolean containsValue(Object value) { + return false; + } + + @Override + public String get(Object key) { + return null; + } + + @Nullable + @Override + public String put(String key, String value) { + return null; + } + + @Override + public String remove(Object key) { + return null; + } + + @Override + public void putAll(@NonNull Map m) { + + } + + @Override + public void clear() { + + } + + @NonNull + @Override + public Set keySet() { + return null; + } + + @NonNull + @Override + public Collection values() { + return null; + } + + @NonNull + @Override + public Set> entrySet() { + return null; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TypeImplementingMap that = (TypeImplementingMap) o; + return val2 == that.val2 && Objects.equals(val1, that.val1); + } + + @Override + public int hashCode() { + return Objects.hash(val1, val2); + } } - class ClassWithGeoShape { + static class WithRawDocumentProperties { - Shape shape; + String id; + org.bson.Document raw; + List listOfRaw; } - class ClassWithTextScoreProperty { + static class WithFieldWrite { + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Person writeNonNullPerson; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Person writeAlwaysPerson; + + @org.springframework.data.mongodb.core.mapping.DBRef + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Person writeNonNullPersonDBRef; + + @org.springframework.data.mongodb.core.mapping.DBRef + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Person writeAlwaysPersonDBRef; - @TextScore Float score; } - class ClassWithExplicitlyNamedDBRefProperty { + static class WithValueConverters { - @Field("explict-name-for-db-ref")// - @org.springframework.data.mongodb.core.mapping.DBRef// - ClassWithIntId dbRefProperty; + @ValueConverter(Converter1.class) String converterWithDefaultCtor; - public ClassWithIntId getDbRefProperty() { - return dbRefProperty; + @ValueConverter(Converter2.class) String converterEnum; + + String viaRegisteredConverter; + } + + static class WithContextValueConverters { + + @ValueConverter(Converter3.class) String converterBean; + + String viaRegisteredConverter; + } + + static class Converter3 implements MongoValueConverter { + + private final SomeDependency someDependency; + + public Converter3(@Autowired SomeDependency someDependency) { + this.someDependency = someDependency; + } + + @Override + public Object read(org.bson.Document value, MongoConversionContext context) { + return value.get("ooo"); + } + + @Override + public org.bson.Document write(Object value, MongoConversionContext context) { + return new org.bson.Document("ooo", value + " - " + someDependency.toString()); } } - static class RootForClassWithExplicitlyRenamedIdField { + static class SomeDependency { - @Id String id; - ClassWithExplicitlyRenamedField nested; } - static class ClassWithExplicitlyRenamedField { + enum Converter2 implements MongoValueConverter { - @Field("id") String id; + INSTANCE; + + @Nullable + @Override + public String read(@Nullable org.bson.Document value, MongoConversionContext context) { + return value.getString("bar"); + } + + @Nullable + @Override + public org.bson.Document write(@Nullable String value, MongoConversionContext context) { + return new org.bson.Document("bar", value); + } } - static class RootForClassWithNamedIdField { + static class Converter1 implements MongoValueConverter { - String id; - ClassWithNamedIdField nested; + @Nullable + @Override + public String read(@Nullable org.bson.Document value, MongoConversionContext context) { + return value.getString("foo"); + } + + @Nullable + @Override + public org.bson.Document write(@Nullable String value, MongoConversionContext context) { + return new org.bson.Document("foo", value); + } } - static class ClassWithNamedIdField { + interface BookProjection { - String id; + @Value("#{target.name + ' by ' + target.author.firstName + ' ' + target.author.lastName}") + String getName(); } - static class ClassWithAnnotatedIdField { + static class AuthorOnly { - @Id String key; + final AuthorNameOnly author; + + public AuthorOnly(AuthorNameOnly author) { + this.author = author; + } + + public AuthorNameOnly getAuthor() { + return author; + } } - static class TypeWithLocalDateTime { + static class AuthorNameOnly { - LocalDateTime date; + final String firstName; - TypeWithLocalDateTime() { - this.date = LocalDateTime.now(); + final String lastName; + + public AuthorNameOnly(String firstName, String lastName) { + this.firstName = firstName; + this.lastName = lastName; + } + + public String getFirstName() { + return firstName; + } + + public String getLastName() { + return lastName; } } - static class TypeWithOptional { + static class Book { - Optional string = Optional.empty(); - Optional localDateTime = Optional.empty(); + @Id String id; + + String name; + + Author author = new Author(); + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Author getAuthor() { + return author; + } + + public void setAuthor(Author author) { + this.author = author; + } } - static class ClassWithMapUsingEnumAsKey { + static class Author { - static enum FooBarEnum { - FOO, BAR; + @Id String id; + + String firstName; + + String lastName; + + public String getId() { + return id; } - Map map; + public void setId(String id) { + this.id = id; + } + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } } - @WritingConverter - static class FooBarEnumToStringConverter implements Converter { + static class Cyclic { - @Override - public String convert(FooBarEnum source) { + @Id String id; + String value; + Cyclic cycle; - if (source == null) { - return null; + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public Cyclic getCycle() { + return cycle; + } + + public void setCycle(Cyclic cycle) { + this.cycle = cycle; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } + Cyclic cyclic = (Cyclic) o; + return Objects.equals(id, cyclic.id) && Objects.equals(value, cyclic.value) + && Objects.equals(cycle, cyclic.cycle); + } - return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value"; + @Override + public int hashCode() { + return Objects.hash(id, value, cycle); } } + static class WrapperForTypeWithPropertyHavingDotsInFieldName { + + WithPropertyHavingDotsInFieldName nested; + } + + static class WithPropertyHavingDotsInFieldName { + + @Field(name = "field.name.with.dots", nameType = Type.KEY) String value; + } + + static class ComplexIdAndFieldAnnotation { + + @Field("_id") // + ComplexId id; + String value; + } + + static class ComplexIdAndMongoIdAnnotation { + + @MongoId // + ComplexId id; + String value; + } + + static class ComplexIdAndIdAnnotation { + + @Id // + ComplexId id; + String value; + } + + static class ComplexIdAndNoAnnotation { + + ComplexId id; + String value; + } + @ReadingConverter - static class StringToFooNumConverter implements Converter { + static class ByteBufferToDoubleHolderConverter implements Converter { @Override - public FooBarEnum convert(String source) { + public DoubleHolder convert(ByteBuffer source) { + return new DoubleHolder(source.getDouble()); + } + } - if (source == null) { - return null; - } + record DoubleHolder(double number) { - if ("foo-enum-value".equals(source)) { - return FooBarEnum.FOO; - } - if ("bar-enum-value".equals(source)) { - return FooBarEnum.BAR; - } + } - throw new ConversionNotSupportedException(source, String.class, null); + static class WithDoubleHolder { + DoubleHolder number; + } + + static class DoubleHolderDto { + DoubleHolder number; + + public DoubleHolderDto(DoubleHolder number) { + this.number = number; } } + + static class WithRenamedIdPropertyAndAnotherPropertyNamedId { + + @Id String abc; + String id; + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java index d20d0a9b11..b57ab35ea1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,293 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Arrays; +import java.util.Objects; import java.util.UUID; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mongodb.core.MongoOperations; +import org.bson.BinaryVector; +import org.bson.types.Binary; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoVector; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.util.ObjectUtils; /** * Integration tests for {@link MongoConverters}. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class MongoConvertersIntegrationTests { - static final String COLLECTION = "_sample"; + static final String COLLECTION = "converter-tests"; - @Autowired - MongoOperations template; + @Template // + static MongoTestTemplate template; - @Before + @BeforeEach public void setUp() { - template.dropCollection(COLLECTION); + template.flush(COLLECTION); } - @Test + @Test // DATAMONGO-422 public void writesUUIDBinaryCorrectly() { Wrapper wrapper = new Wrapper(); wrapper.uuid = UUID.randomUUID(); template.save(wrapper); - assertThat(wrapper.id, is(notNullValue())); + assertThat(wrapper.id).isNotNull(); Wrapper result = template.findOne(Query.query(Criteria.where("id").is(wrapper.id)), Wrapper.class); - assertThat(result.uuid, is(wrapper.uuid)); + assertThat(result.uuid).isEqualTo(wrapper.uuid); + } + + @Test // DATAMONGO-1802 + public void shouldConvertBinaryDataOnRead() { + + WithBinaryDataInArray wbd = new WithBinaryDataInArray(); + wbd.data = "calliope-mini".getBytes(); + + template.save(wbd); + + assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataInArray.class)).isEqualTo(wbd); + } + + @Test // DATAMONGO-1802 + public void shouldConvertEmptyBinaryDataOnRead() { + + WithBinaryDataInArray wbd = new WithBinaryDataInArray(); + wbd.data = new byte[0]; + + template.save(wbd); + + assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataInArray.class)).isEqualTo(wbd); + } + + @Test // DATAMONGO-1802 + public void shouldReadBinaryType() { + + WithBinaryDataType wbd = new WithBinaryDataType(); + wbd.data = new Binary("calliope-mini".getBytes()); + + template.save(wbd); + + assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataType.class)).isEqualTo(wbd); + } + + @Test // GH-4706 + public void shouldReadAndWriteVectors() { + + WithVectors source = new WithVectors(); + source.vector = Vector.of(1.1, 2.2, 3.3); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4706 + public void shouldReadAndWriteFloatVectors() { + + WithVectors source = new WithVectors(); + source.vector = Vector.of(1.1f, 2.2f, 3.3f); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + // top-level arrays are converted into doubles by MongoDB with all their conversion imprecisions + assertThat(loaded.vector.getClass().getName()).contains("DoubleVector"); + assertThat(loaded.vector).isNotEqualTo(source.vector); } + @Test // GH-4706 + public void shouldReadAndWriteBinFloat32Vectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.floatVector(new float[] { 1.1f, 2.2f, 3.3f }); + source.vector = MongoVector.of(source.binVector); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + } + + @Test // GH-4706 + public void shouldReadAndWriteBinInt8Vectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.int8Vector(new byte[] { 1, 2, 3 }); + source.vector = MongoVector.of(source.binVector); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + } + + @Test // GH-4706 + public void shouldReadAndWriteBinPackedVectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.packedBitVector(new byte[] { 1, 2, 3 }, (byte) 1); + source.vector = MongoVector.of(source.binVector); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + } + + @Document(COLLECTION) static class Wrapper { String id; UUID uuid; } + + @Document(COLLECTION) + static class WithVectors { + + ObjectId id; + Vector vector; + BinaryVector binVector; + + @Override + public boolean equals(Object o) { + if (!(o instanceof WithVectors that)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(vector, that.vector)) { + return false; + } + return ObjectUtils.nullSafeEquals(binVector, that.binVector); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHash(id, vector, binVector); + } + } + + @Document(COLLECTION) + static class WithBinaryDataInArray { + + @Id String id; + byte[] data; + + public String getId() { + return this.id; + } + + public byte[] getData() { + return this.data; + } + + public void setId(String id) { + this.id = id; + } + + public void setData(byte[] data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithBinaryDataInArray that = (WithBinaryDataInArray) o; + return Objects.equals(id, that.id) && Arrays.equals(data, that.data); + } + + @Override + public int hashCode() { + int result = Objects.hash(id); + result = 31 * result + Arrays.hashCode(data); + return result; + } + + public String toString() { + return "MongoConvertersIntegrationTests.WithBinaryDataInArray(id=" + this.getId() + ", data=" + + java.util.Arrays.toString(this.getData()) + ")"; + } + } + + @Document(COLLECTION) + static class WithBinaryDataType { + + @Id String id; + Binary data; + + public WithBinaryDataType() {} + + public String getId() { + return this.id; + } + + public Binary getData() { + return this.data; + } + + public void setId(String id) { + this.id = id; + } + + public void setData(Binary data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithBinaryDataType that = (WithBinaryDataType) o; + return Objects.equals(id, that.id) && Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(id, data); + } + + public String toString() { + return "MongoConvertersIntegrationTests.WithBinaryDataType(id=" + this.getId() + ", data=" + this.getData() + ")"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java index 053c7bce44..72cd807d3b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,129 +15,171 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.math.BigDecimal; +import java.net.URI; +import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Currency; - -import org.junit.Test; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +import org.assertj.core.data.TemporalUnitLessThanOffset; +import org.bson.BsonTimestamp; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; +import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicIntegerToIntegerConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicLongToLongConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.BsonTimestampToInstantConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.CurrencyToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.DocumentToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.IntegerToAtomicIntegerConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.LongToAtomicLongConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToCurrencyConverter; import org.springframework.data.mongodb.core.geo.Sphere; -import com.mongodb.DBObject; - /** * Unit tests for {@link MongoConverters}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl */ -public class MongoConvertersUnitTests { +class MongoConvertersUnitTests { @Test - public void convertsBigDecimalToStringAndBackCorrectly() { + void convertsBigDecimalToStringAndBackCorrectly() { BigDecimal bigDecimal = BigDecimal.valueOf(254, 1); String value = BigDecimalToStringConverter.INSTANCE.convert(bigDecimal); - assertThat(value, is("25.4")); + assertThat(value).isEqualTo("25.4"); BigDecimal reference = StringToBigDecimalConverter.INSTANCE.convert(value); - assertThat(reference, is(bigDecimal)); + assertThat(reference).isEqualTo(bigDecimal); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsBoxToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + void convertsBoxToDocumentAndBackCorrectly() { Box box = new Box(new Point(1, 2), new Point(3, 4)); - DBObject dbo = GeoConverters.BoxToDbObjectConverter.INSTANCE.convert(box); - Shape shape = GeoConverters.DbObjectToBoxConverter.INSTANCE.convert(dbo); + Document document = GeoConverters.BoxToDocumentConverter.INSTANCE.convert(box); + Shape shape = GeoConverters.DocumentToBoxConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) box)); + assertThat(shape).isEqualTo(box); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsCircleToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + void convertsCircleToDocumentAndBackCorrectly() { Circle circle = new Circle(new Point(1, 2), 3); - DBObject dbo = GeoConverters.CircleToDbObjectConverter.INSTANCE.convert(circle); - Shape shape = GeoConverters.DbObjectToCircleConverter.INSTANCE.convert(dbo); + Document document = GeoConverters.CircleToDocumentConverter.INSTANCE.convert(circle); + Shape shape = GeoConverters.DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) circle)); + assertThat(shape).isEqualTo(circle); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsPolygonToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + void convertsPolygonToDocumentAndBackCorrectly() { Polygon polygon = new Polygon(new Point(1, 2), new Point(2, 3), new Point(3, 4), new Point(5, 6)); - DBObject dbo = GeoConverters.PolygonToDbObjectConverter.INSTANCE.convert(polygon); - Shape shape = GeoConverters.DbObjectToPolygonConverter.INSTANCE.convert(dbo); + Document document = GeoConverters.PolygonToDocumentConverter.INSTANCE.convert(polygon); + Shape shape = GeoConverters.DocumentToPolygonConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) polygon)); + assertThat(shape).isEqualTo(polygon); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsSphereToDbObjectAndBackCorrectly() { + @Test // DATAMONGO-858 + void convertsSphereToDocumentAndBackCorrectly() { Sphere sphere = new Sphere(new Point(1, 2), 3); - DBObject dbo = GeoConverters.SphereToDbObjectConverter.INSTANCE.convert(sphere); - org.springframework.data.geo.Shape shape = GeoConverters.DbObjectToSphereConverter.INSTANCE.convert(dbo); + Document document = GeoConverters.SphereToDocumentConverter.INSTANCE.convert(sphere); + org.springframework.data.geo.Shape shape = GeoConverters.DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) sphere)); + assertThat(shape).isEqualTo(sphere); } - /** - * @see DATAMONGO-858 - */ - @Test - public void convertsPointToListAndBackCorrectly() { + @Test // DATAMONGO-858 + void convertsPointToListAndBackCorrectly() { Point point = new Point(1, 2); - DBObject dbo = GeoConverters.PointToDbObjectConverter.INSTANCE.convert(point); - org.springframework.data.geo.Point converted = GeoConverters.DbObjectToPointConverter.INSTANCE.convert(dbo); + Document document = GeoConverters.PointToDocumentConverter.INSTANCE.convert(point); + org.springframework.data.geo.Point converted = GeoConverters.DocumentToPointConverter.INSTANCE.convert(document); - assertThat(converted, is((org.springframework.data.geo.Point) point)); + assertThat(converted).isEqualTo(point); } - /** - * @see DATAMONGO-1372 - */ - @Test - public void convertsCurrencyToStringCorrectly() { - assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD")), is("USD")); + @Test // DATAMONGO-1372 + void convertsCurrencyToStringCorrectly() { + assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD"))).isEqualTo("USD"); } - /** - * @see DATAMONGO-1372 - */ - @Test - public void convertsStringToCurrencyCorrectly() { - assertThat(StringToCurrencyConverter.INSTANCE.convert("USD"), is(Currency.getInstance("USD"))); + @Test // DATAMONGO-1372 + void convertsStringToCurrencyCorrectly() { + assertThat(StringToCurrencyConverter.INSTANCE.convert("USD")).isEqualTo(Currency.getInstance("USD")); + } + + @Test // DATAMONGO-1416 + void convertsAtomicLongToLongCorrectly() { + assertThat(AtomicLongToLongConverter.INSTANCE.convert(new AtomicLong(100L))).isEqualTo(100L); + } + + @Test // DATAMONGO-1416 + void convertsAtomicIntegerToIntegerCorrectly() { + assertThat(AtomicIntegerToIntegerConverter.INSTANCE.convert(new AtomicInteger(100))).isEqualTo(100); + } + + @Test // DATAMONGO-1416 + void convertsLongToAtomicLongCorrectly() { + assertThat(LongToAtomicLongConverter.INSTANCE.convert(100L)).isInstanceOf(AtomicLong.class); + } + + @Test // DATAMONGO-1416 + void convertsIntegerToAtomicIntegerCorrectly() { + assertThat(IntegerToAtomicIntegerConverter.INSTANCE.convert(100)).isInstanceOf(AtomicInteger.class); + } + + @Test // DATAMONGO-2113 + void convertsBsonTimestampToInstantCorrectly() { + + assertThat(BsonTimestampToInstantConverter.INSTANCE.convert(new BsonTimestamp(6615900307735969796L))) + .isCloseTo(Instant.ofEpochSecond(1540384327), new TemporalUnitLessThanOffset(100, ChronoUnit.MILLIS)); + } + + @Test // DATAMONGO-2210 + void convertsUrisToString() { + + MongoCustomConversions conversions = new MongoCustomConversions(); + + assertThat(conversions.getSimpleTypeHolder().isSimpleType(URI.class)).isTrue(); + + ConfigurableConversionService conversionService = new DefaultConversionService(); + conversions.registerConvertersIn(conversionService); + + assertThat(conversionService.convert(URI.create("/segment"), String.class)).isEqualTo("/segment"); + assertThat(conversionService.convert("/segment", URI.class)).isEqualTo(URI.create("/segment")); + } + + @Test // GH-3546 + void convertsDocumentWithUUidToString() { + + UUID uuid = UUID.randomUUID(); + assertThat(DocumentToStringConverter.INSTANCE.convert(new Document("_id", uuid))) + .isEqualTo("{\"_id\": \"" + uuid.toString() + "\"}"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java new file mode 100644 index 0000000000..9382b835ea --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.time.ZonedDateTime; +import java.util.Collections; +import java.util.Date; + +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.Foo; + +/** + * Unit tests for {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +class MongoCustomConversionsUnitTests { + + @Test // DATAMONGO-2349 + void nonAnnotatedConverterForJavaTimeTypeShouldOnlyBeRegisteredAsReadingConverter() { + + MongoCustomConversions conversions = new MongoCustomConversions( + Collections.singletonList(new DateToZonedDateTimeConverter())); + + assertThat(conversions.hasCustomReadTarget(Date.class, ZonedDateTime.class)).isTrue(); + assertThat(conversions.hasCustomWriteTarget(Date.class)).isFalse(); + } + + @Test // GH-3596 + void propertyValueConverterRegistrationWorksAsExpected() { + + PersistentProperty persistentProperty = mock(PersistentProperty.class); + PersistentEntity owner = mock(PersistentEntity.class); + when(persistentProperty.getName()).thenReturn("name"); + when(persistentProperty.getOwner()).thenReturn(owner); + when(owner.getType()).thenReturn(Foo.class); + + MongoCustomConversions conversions = MongoCustomConversions.create(config -> { + + config.configurePropertyConversions( + registry -> registry.registerConverter(Foo.class, "name", mock(PropertyValueConverter.class))); + }); + + assertThat(conversions.getPropertyValueConversions().hasValueConverter(persistentProperty)).isTrue(); + } + + @Test // GH-4390 + void doesNotReturnConverterForNativeTimeTimeIfUsingDriverCodec() { + + MongoCustomConversions conversions = MongoCustomConversions.create(config -> { + config.useNativeDriverJavaTimeCodecs(); + }); + + assertThat(conversions.getCustomWriteTarget(Date.class)).isEmpty(); + } + + static class DateToZonedDateTimeConverter implements Converter { + + @Override + public ZonedDateTime convert(Date source) { + return ZonedDateTime.now(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java index 7f09c500eb..16b198004c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,184 +15,144 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.springframework.data.domain.Example.*; import static org.springframework.data.domain.ExampleMatcher.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.regex.Pattern; -import org.bson.BSONObject; -import org.hamcrest.core.Is; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.bson.conversions.Bson; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Example; -import org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers; -import org.springframework.data.domain.ExampleMatcher.StringMatcher; +import org.springframework.data.domain.ExampleMatcher; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.ClassWithGeoTypes; import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.WithDBRef; +import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.test.util.IsBsonObject; - -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; +import org.springframework.data.util.TypeInformation; /** * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class MongoExampleMapperUnitTests { - - MongoExampleMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; +@ExtendWith(MockitoExtension.class) +class MongoExampleMapperUnitTests { - @Mock MongoDbFactory factory; + private MongoExampleMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; - @Before - public void setUp() { + @BeforeEach + void setUp() { this.context = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.afterPropertiesSet(); this.mapper = new MongoExampleMapper(converter); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; - IsBsonObject expected = isBsonObject().containing("_id", "steelheart"); - - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("_id", "steelheart"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; probe.stringValue = "firefight"; probe.intValue = 100; - IsBsonObject expected = isBsonObject().// - containing("_id", "steelheart").// - containing("stringValue", "firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("_id", "steelheart") // + .containsEntry("stringValue", "firefight") // + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; probe.intValue = 100; - IsBsonObject expected = isBsonObject().// - containing("stringValue", "firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) // + .containsEntry("stringValue", "firefight") // + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { FlatDocument probe = new FlatDocument(); probe.listOfString = Arrays.asList("Prof", "Tia", "David"); - BasicDBList list = new BasicDBList(); - list.addAll(Arrays.asList("Prof", "Tia", "David")); - - IsBsonObject expected = isBsonObject().// - containing("listOfString", list); + List list = (Arrays.asList("Prof", "Tia", "David")); - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("listOfString", list); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "Mitosis"; - IsBsonObject expected = isBsonObject().containing("custom_field_name", "Mitosis"); - - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("custom_field_name", "Mitosis"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void typedExampleShouldContainTypeRestriction() { + @Test // DATAMONGO-1245 + void typedExampleShouldContainTypeRestriction() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; - DBObject dbo = mapper.getMappedExample(Example.of(probe), context.getPersistentEntity(WrapperDocument.class)); + org.bson.Document document = mapper.getMappedExample(Example.of(probe), + context.getRequiredPersistentEntity(WrapperDocument.class)); - assertThat(dbo, - isBsonObject().containing("_class", new BasicDBObject("$in", new String[] { probe.getClass().getName() }))); + assertThat(document).containsEntry("_class", + new org.bson.Document("$in", Collections.singletonList(probe.getClass().getName()))); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; - IsBsonObject expected = isBsonObject().containing("flatDoc\\.stringValue", "conflux"); - - assertThat(mapper.getMappedExample(of(probe), context.getPersistentEntity(WrapperDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.stringValue", "conflux"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -200,15 +160,12 @@ public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictM Example example = Example.of(probe, matching().withIncludeNullValues()); - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(WrapperDocument.class)), // - isBsonObject().containing("flatDoc.stringValue", "conflux")); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc.stringValue", "conflux"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -216,18 +173,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarti Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.STARTING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "^firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "^firefight")// + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "fire.ight"; @@ -235,18 +187,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMat Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.STARTING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "^" + Pattern.quote("fire.ight")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "^" + Pattern.quote("fire.ight"))// + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -254,18 +201,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.ENDING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "firefight$").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "firefight$") // + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -273,18 +215,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.REGEX)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "firefight").// - containing("custom_field_name.$regex", "^(cat|dog).*shelter\\d?"); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "firefight") // + .containsEntry("custom_field_name.$regex", "^(cat|dog).*shelter\\d?"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -292,18 +229,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMat Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.ENDING).withIgnoreCase()); - IsBsonObject expected = isBsonObject().// - containing("stringValue", new BasicDBObject("$regex", "firefight$").append("$options", "i")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", new org.bson.Document("$regex", "firefight$").append("$options", "i")) // + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -311,65 +243,55 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { Example example = Example.of(probe, matching().withIgnoreCase()); - IsBsonObject expected = isBsonObject().// - containing("stringValue", new BasicDBObject("$regex", Pattern.quote("firefight")).append("$options", "i")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", + new org.bson.Document("$regex", Pattern.quote("firefight")).append("$options", "i")) // + .containsEntry("intValue", 100); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedWhenContainingDBRef() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedWhenContainingDBRef() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; probe.referenceDocument = new ReferenceDocument(); probe.referenceDocument.id = "200"; - DBObject dbo = mapper.getMappedExample(of(probe), context.getPersistentEntity(WithDBRef.class)); - com.mongodb.DBRef reference = getTypedValue(dbo, "referenceDocument", com.mongodb.DBRef.class); + org.bson.Document document = mapper.getMappedExample(of(probe), + context.getRequiredPersistentEntity(WithDBRef.class)); + com.mongodb.DBRef reference = getTypedValue(document, "referenceDocument", com.mongodb.DBRef.class); - assertThat(reference.getId(), Is. is("200")); - assertThat(reference.getCollectionName(), is("refDoc")); + assertThat(reference.getId()).isEqualTo("200"); + assertThat(reference.getCollectionName()).isEqualTo("refDoc"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedWhenDBRefIsNull() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedWhenDBRefIsNull() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; - DBObject dbo = mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)); + org.bson.Document document = mapper.getMappedExample(of(probe), + context.getRequiredPersistentEntity(FlatDocument.class)); - assertThat(dbo, isBsonObject().containing("stringValue", "steelheart")); + assertThat(document).containsEntry("stringValue", "steelheart"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { ClassWithGeoTypes probe = new ClassWithGeoTypes(); probe.legacyPoint = new Point(10D, 20D); - DBObject dbo = mapper.getMappedExample(of(probe), context.getPersistentEntity(WithDBRef.class)); + org.bson.Document document = mapper.getMappedExample(of(probe), + context.getRequiredPersistentEntity(WithDBRef.class)); - assertThat(dbo.get("legacyPoint.x"), Is. is(10D)); - assertThat(dbo.get("legacyPoint.y"), Is. is(20D)); + assertThat(document.get("legacyPoint.x")).isEqualTo(10D); + assertThat(document.get("legacyPoint.y")).isEqualTo(20D); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldExcludeFieldWithCustomNameCorrectly() { + @Test // DATAMONGO-1245 + void mappingShouldExcludeFieldWithCustomNameCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -378,18 +300,13 @@ public void mappingShouldExcludeFieldWithCustomNameCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("customNamedField")); - IsBsonObject expected = isBsonObject().// - containing("stringValue", "string").// - containing("intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", "string") // + .containsEntry("intValue", 10); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldExcludeFieldCorrectly() { + @Test // DATAMONGO-1245 + void mappingShouldExcludeFieldCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -398,18 +315,13 @@ public void mappingShouldExcludeFieldCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("stringValue")); - IsBsonObject expected = isBsonObject().// - containing("custom_field_name", "foo").// - containing("intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("custom_field_name", "foo") // + .containsEntry("intValue", 10); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldExcludeNestedFieldCorrectly() { + @Test // DATAMONGO-1245 + void mappingShouldExcludeNestedFieldCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -419,18 +331,13 @@ public void mappingShouldExcludeNestedFieldCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("flatDoc.stringValue")); - IsBsonObject expected = isBsonObject().// - containing("flatDoc\\.custom_field_name", "foo").// - containing("flatDoc\\.intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(WrapperDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.custom_field_name", "foo")// + .containsEntry("flatDoc\\.intValue", 10); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { + @Test // DATAMONGO-1245 + void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -440,18 +347,13 @@ public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("flatDoc.customNamedField")); - IsBsonObject expected = isBsonObject().// - containing("flatDoc\\.stringValue", "string").// - containing("flatDoc\\.intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(WrapperDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.stringValue", "string") // + .containsEntry("flatDoc\\.intValue", 10); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { + @Test // DATAMONGO-1245 + void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -459,27 +361,147 @@ public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMa Example example = Example.of(probe, matching().withMatcher("stringValue", GenericPropertyMatchers.contains())); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", ".*firefight.*").// - containing("custom_field_name", "steelheart"); - - assertThat(mapper.getMappedExample(example, context.getPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", ".*firefight.*") // + .containsEntry("custom_field_name", "steelheart"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void mappingShouldIncludePropertiesFromHierarchicalDocument() { + @Test // DATAMONGO-1245 + void mappingShouldIncludePropertiesFromHierarchicalDocument() { HierachicalDocument probe = new HierachicalDocument(); probe.stringValue = "firefight"; probe.customNamedField = "steelheart"; probe.anotherStringValue = "calamity"; - DBObject dbo = mapper.getMappedExample(of(probe), context.getPersistentEntity(FlatDocument.class)); + org.bson.Document document = mapper.getMappedExample(of(probe), + context.getRequiredPersistentEntity(FlatDocument.class)); + + assertThat(document).containsEntry("anotherStringValue", "calamity"); + } + + @Test // DATAMONGO-1459 + void mapsAnyMatchingExampleCorrectly() { + + FlatDocument probe = new FlatDocument(); + probe.stringValue = "firefight"; + probe.customNamedField = "steelheart"; + + Example example = Example.of(probe, ExampleMatcher.matchingAny()); + + assertThat(mapper.getMappedExample(example)).containsKeys("$or", "_class"); + } + + @Test // DATAMONGO-1768 + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { + + WrapperDocument probe = new WrapperDocument(); + probe.flatDoc = new FlatDocument(); + probe.flatDoc.stringValue = "conflux"; + + org.bson.Document document = mapper + .getMappedExample(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_class"))); + + assertThat(document).doesNotContainKey("_class"); + } + + @Test // DATAMONGO-1768 + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPathWhenUsingCustomTypeMapper() { + + WrapperDocument probe = new WrapperDocument(); + probe.flatDoc = new FlatDocument(); + probe.flatDoc.stringValue = "conflux"; + + MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper() { + + @Override + public boolean isTypeKey(String key) { + return "_foo".equals(key); + } + + @Override + public void writeTypeRestrictions(org.bson.Document result, Set> restrictedTypes) { + result.put("_foo", "bar"); + } + + @Override + public void writeType(TypeInformation info, Bson sink) { + ((org.bson.Document) sink).put("_foo", "bar"); + + } + }); + mappingMongoConverter.afterPropertiesSet(); - assertThat(dbo, isBsonObject().containing("anotherStringValue", "calamity")); + org.bson.Document document = new MongoExampleMapper(mappingMongoConverter) + .getMappedExample(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_foo"))); + + assertThat(document).doesNotContainKeys("_class", "_foo"); + } + + @Test // DATAMONGO-1768 + void untypedExampleShouldNotInferTypeRestriction() { + + WrapperDocument probe = new WrapperDocument(); + probe.flatDoc = new FlatDocument(); + probe.flatDoc.stringValue = "conflux"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).doesNotContainKey("_class"); + } + + @Test // DATAMONGO-1902 + void mapsUnwrappedType() { + + WithUnwrapped probe = new WithUnwrapped(); + probe.unwrappedValue = new UnwrappableType(); + probe.unwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.unwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("stringValue", "string-value").containsEntry("with-at-field-annotation", + "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsPrefixedUnwrappedType() { + + WithUnwrapped probe = new WithUnwrapped(); + probe.prefixedUnwrappedValue = new UnwrappableType(); + probe.prefixedUnwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.prefixedUnwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("prefix-stringValue", "string-value") + .containsEntry("prefix-with-at-field-annotation", "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsNestedUnwrappedType() { + + WrapperAroundWithUnwrapped probe = new WrapperAroundWithUnwrapped(); + probe.withUnwrapped = new WithUnwrapped(); + probe.withUnwrapped.unwrappedValue = new UnwrappableType(); + probe.withUnwrapped.unwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.withUnwrapped.unwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("withUnwrapped.stringValue", "string-value") + .containsEntry("withUnwrapped.with-at-field-annotation", "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsNestedPrefixedUnwrappedType() { + + WrapperAroundWithUnwrapped probe = new WrapperAroundWithUnwrapped(); + probe.withUnwrapped = new WithUnwrapped(); + probe.withUnwrapped.prefixedUnwrappedValue = new UnwrappableType(); + probe.withUnwrapped.prefixedUnwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.withUnwrapped.prefixedUnwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("withUnwrapped.prefix-stringValue", "string-value") + .containsEntry("withUnwrapped.prefix-with-at-field-annotation", "@Field"); } static class FlatDocument { @@ -503,10 +525,35 @@ static class WrapperDocument { FlatDocument flatDoc; } - @Document(collection = "refDoc") + @Document("refDoc") static class ReferenceDocument { @Id String id; String value; } + + @Document + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withUnwrapped; + } + + @Document + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + @Unwrapped.Nullable("prefix-") UnwrappableType prefixedUnwrappedValue; + } + + static class UnwrappableType { + + @Indexed String stringValue; + + @Indexed // + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java new file mode 100644 index 0000000000..558c0d65ec --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java @@ -0,0 +1,209 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; + +/** + * Unit tests for {@link MongoJsonSchemaMapper}. + * + * @author Christoph Strobl + */ +public class MongoJsonSchemaMapperUnitTests { + + MongoJsonSchemaMapper mapper; + + Document addressProperty = new Document("type", "object").append("required", Arrays.asList("street", "postCode")) + .append("properties", + new Document("street", new Document("type", "string")).append("postCode", new Document("type", "string"))); + + Document mappedAddressProperty = new Document("type", "object") + .append("required", Arrays.asList("street", "post_code")).append("properties", + new Document("street", new Document("type", "string")).append("post_code", new Document("type", "string"))); + + Document nameProperty = new Document("type", "string"); + Document gradePointAverageProperty = new Document("bsonType", "double"); + Document yearProperty = new Document("bsonType", "int").append("minimum", 2017).append("maximum", 3017) + .append("exclusiveMaximum", true); + + Document properties = new Document("name", nameProperty) // + .append("gradePointAverage", gradePointAverageProperty) // + .append("year", yearProperty); + + Document mappedProperties = new Document("name", new Document(nameProperty)) // + .append("gpa", new Document(gradePointAverageProperty)) // + .append("year", new Document(yearProperty)); + + List requiredProperties = Arrays.asList("name", "gradePointAverage"); + List mappedRequiredProperties = Arrays.asList("name", "gpa"); + + Document $jsonSchema = new Document("type", "object") // + .append("required", requiredProperties) // + .append("properties", properties); + + Document mapped$jsonSchema = new Document("type", "object") // + .append("required", mappedRequiredProperties) // + .append("properties", mappedProperties); + + Document sourceSchemaDocument = new Document("$jsonSchema", $jsonSchema); + Document mappedSchemaDocument = new Document("$jsonSchema", mapped$jsonSchema); + + String complexSchemaJsonString = "{ $jsonSchema: {" + // + " type: \"object\"," + // + " required: [ \"name\", \"year\", \"major\", \"gpa\" ]," + // + " properties: {" + // + " name: {" + // + " type: \"string\"," + // + " description: \"must be a string and is required\"" + // + " }," + // + " gender: {" + // + " type: \"string\"," + // + " description: \"must be a string and is not required\"" + // + " }," + // + " year: {" + // + " bsonType: \"int\"," + // + " minimum: 2017," + // + " maximum: 3017," + // + " exclusiveMaximum: true," + // + " description: \"must be an integer in [ 2017, 3017 ] and is required\"" + // + " }," + // + " major: {" + // + " type: \"string\"," + // + " enum: [ \"Math\", \"English\", \"Computer Science\", \"History\", null ]," + // + " description: \"can only be one of the enum values and is required\"" + // + " }," + // + " gpa: {" + // + " bsonType: \"double\"," + // + " description: \"must be a double and is required\"" + // + " }" + // + " }" + // + " } }"; + + @BeforeEach + public void setUp() { + mapper = new MongoJsonSchemaMapper(new MappingMongoConverter(mock(DbRefResolver.class), new MongoMappingContext())); + } + + @Test // DATAMONGO-1835 + public void noNullSchemaAllowed() { + + assertThatIllegalArgumentException().isThrownBy(() -> mapper.mapSchema(null, Object.class)); + } + + @Test // DATAMONGO-1835 + public void noNullDomainTypeAllowed() { + + assertThatIllegalArgumentException() + .isThrownBy(() -> mapper.mapSchema(new Document("$jsonSchema", new Document()), null)); + } + + @Test // DATAMONGO-1835 + public void schemaDocumentMustContain$jsonSchemaField() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mapper.mapSchema(new Document("foo", new Document()), Object.class)); + } + + @Test // DATAMONGO-1835 + public void objectTypeSkipsFieldMapping() { + assertThat(mapper.mapSchema(sourceSchemaDocument, Object.class)).isEqualTo(sourceSchemaDocument); + } + + @Test // DATAMONGO-1835 + public void mapSchemaProducesNewDocument() { + assertThat(mapper.mapSchema(sourceSchemaDocument, Object.class)).isNotSameAs(sourceSchemaDocument); + } + + @Test // DATAMONGO-1835 + public void mapSchemaMapsPropertiesToFieldNames() { + assertThat(mapper.mapSchema(sourceSchemaDocument, Student.class)).isEqualTo(mappedSchemaDocument); + } + + @Test // DATAMONGO-1835 + public void mapSchemaLeavesSourceDocumentUntouched() { + + Document source = Document.parse(complexSchemaJsonString); + mapper.mapSchema(source, Student.class); + + assertThat(source).isEqualTo(Document.parse(complexSchemaJsonString)); + } + + @Test // DATAMONGO-1835 + public void mapsNestedPropertiesCorrectly() { + + Document schema = new Document("$jsonSchema", new Document("type", "object") // + .append("properties", new Document(properties).append("address", addressProperty))); + + Document expectedSchema = new Document("$jsonSchema", new Document("type", "object") // + .append("properties", new Document(mappedProperties).append("address", mappedAddressProperty))); + + assertThat(mapper.mapSchema(schema, Student.class)).isEqualTo(expectedSchema); + } + + @Test // DATAMONGO-1835 + public void constructReferenceSchemaCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("name", "year", "major", "gradePointAverage").description("") // + .properties(string("name").description("must be a string and is required"), // + string("gender").description("must be a string and is not required"), // + int32("year").description("must be an integer in [ 2017, 3017 ] and is required").gte(2017).lt(3017), // + string("major").description("can only be one of the enum values and is required").possibleValues("Math", + "English", "Computer Science", "History", null), // + float64("gradePointAverage").description("must be a double and is required") // + ).build(); + + assertThat(mapper.mapSchema(schema.toDocument(), Student.class)).isEqualTo(Document.parse(complexSchemaJsonString)); + } + + static class Student { + + String name; + Gender gender; + Integer year; + String major; + + @Field("gpa") // + Double gradePointAverage; + Address address; + } + + static class Address { + + String city; + String street; + + @Field("post_code") // + String postCode; + } + + static enum Gender { + M, F + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java index c7acd2f0fa..fb19ecf3b6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,120 +15,92 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; +import org.bson.Document; import org.bson.types.Code; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.convert.converter.Converter; -import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToNamedMongoScriptCoverter; -import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter; -import org.springframework.data.mongodb.core.convert.NamedMongoScriptConvertsUnitTests.DboToNamedMongoScriptConverterUnitTests; -import org.springframework.data.mongodb.core.convert.NamedMongoScriptConvertsUnitTests.NamedMongoScriptToDboConverterUnitTests; +import org.springframework.data.mongodb.core.convert.MongoConverters.DocumentToNamedMongoScriptConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDocumentConverter; +import org.springframework.data.mongodb.core.convert.NamedMongoScriptConvertsUnitTests.DocumentToNamedMongoScriptConverterUnitTests; +import org.springframework.data.mongodb.core.convert.NamedMongoScriptConvertsUnitTests.NamedMongoScriptToDocumentConverterUnitTests; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; - /** * Unit tests for {@link Converter} implementations for {@link NamedMongoScript}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 */ @RunWith(Suite.class) -@SuiteClasses({ NamedMongoScriptToDboConverterUnitTests.class, DboToNamedMongoScriptConverterUnitTests.class }) +@SuiteClasses({ NamedMongoScriptToDocumentConverterUnitTests.class, + DocumentToNamedMongoScriptConverterUnitTests.class }) public class NamedMongoScriptConvertsUnitTests { static final String FUNCTION_NAME = "echo"; static final String JS_FUNCTION = "function(x) { return x; }"; static final NamedMongoScript ECHO_SCRIPT = new NamedMongoScript(FUNCTION_NAME, JS_FUNCTION); - static final DBObject FUNCTION = new BasicDBObjectBuilder().add("_id", FUNCTION_NAME) - .add("value", new Code(JS_FUNCTION)).get(); + static final Document FUNCTION = new org.bson.Document().append("_id", FUNCTION_NAME).append("value", + new Code(JS_FUNCTION)); /** * @author Christoph Strobl */ - public static class NamedMongoScriptToDboConverterUnitTests { + public static class NamedMongoScriptToDocumentConverterUnitTests { - NamedMongoScriptToDBObjectConverter converter = NamedMongoScriptToDBObjectConverter.INSTANCE; - - /** - * @see DATAMONGO-479 - */ - @Test - public void convertShouldReturnEmptyDboWhenScriptIsNull() { - assertThat(converter.convert(null), is((DBObject) new BasicDBObject())); - } + NamedMongoScriptToDocumentConverter converter = NamedMongoScriptToDocumentConverter.INSTANCE; - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void convertShouldConvertScriptNameCorreclty() { - DBObject dbo = converter.convert(ECHO_SCRIPT); + Document document = converter.convert(ECHO_SCRIPT); - Object id = dbo.get("_id"); - assertThat(id, is(instanceOf(String.class))); - assertThat(id, is((Object) FUNCTION_NAME)); + Object id = document.get("_id"); + assertThat(id).isInstanceOf(String.class).isEqualTo(FUNCTION_NAME); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void convertShouldConvertScriptCodeCorreclty() { - DBObject dbo = converter.convert(ECHO_SCRIPT); + Document document = converter.convert(ECHO_SCRIPT); - Object code = dbo.get("value"); - assertThat(code, is(instanceOf(Code.class))); - assertThat(code, is((Object) new Code(JS_FUNCTION))); + Object code = document.get("value"); + assertThat(code).isInstanceOf(Code.class).isEqualTo(new Code(JS_FUNCTION)); } } /** * @author Christoph Strobl */ - public static class DboToNamedMongoScriptConverterUnitTests { + public static class DocumentToNamedMongoScriptConverterUnitTests { - DBObjectToNamedMongoScriptCoverter converter = DBObjectToNamedMongoScriptCoverter.INSTANCE; + DocumentToNamedMongoScriptConverter converter = DocumentToNamedMongoScriptConverter.INSTANCE; - /** - * @see DATAMONGO-479 - */ - @Test - public void convertShouldReturnNullIfSourceIsNull() { - assertThat(converter.convert(null), is(nullValue())); + @Test // DATAMONGO-479, DATAMONGO-2385 + public void convertShouldReturnNullIfSourceIsEmpty() { + assertThat(converter.convert(new Document())).isNull(); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void convertShouldConvertIdCorreclty() { NamedMongoScript script = converter.convert(FUNCTION); - assertThat(script.getName(), is(FUNCTION_NAME)); + assertThat(script.getName()).isEqualTo(FUNCTION_NAME); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void convertShouldConvertScriptValueCorreclty() { NamedMongoScript script = converter.convert(FUNCTION); - assertThat(script.getCode(), is(notNullValue())); - assertThat(script.getCode(), is(JS_FUNCTION)); + assertThat(script.getCode()).isEqualTo(JS_FUNCTION); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java index efce959f5e..9df15a674a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collection; @@ -28,6 +27,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; + import org.springframework.data.mongodb.core.convert.MongoConverters.NumberToNumberConverterFactory; /** @@ -52,11 +52,9 @@ public static Collection parameters() { return Arrays. asList(longToInt, atomicIntToInt, atomicIntToDouble, atomicLongToInt, atomicLongToLong); } - /** - * @see DATAMONGO-1288 - */ - @Test + @Test // DATAMONGO-1288 public void convertsToTargetTypeCorrectly() { - assertThat(NumberToNumberConverterFactory.INSTANCE.getConverter(expected.getClass()).convert(source), is(expected)); + assertThat(NumberToNumberConverterFactory.INSTANCE.getConverter(expected.getClass()).convert(source)) + .isEqualTo(expected); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java new file mode 100644 index 0000000000..b772772444 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java @@ -0,0 +1,109 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.util.ClassTypeInformation; + +/** + * Unit tests for {@link ObjectPath}. + * + * @author Christoph Strobl + */ +public class ObjectPathUnitTests { + + MongoPersistentEntity one; + MongoPersistentEntity two; + MongoPersistentEntity three; + + @BeforeEach + public void setUp() { + + one = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityOne.class)); + two = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityTwo.class)); + three = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityThree.class)); + } + + @Test // DATAMONGO-1703 + public void getPathItemShouldReturnMatch() { + + ObjectPath path = ObjectPath.ROOT.push(new EntityOne(), one, "id-1"); + + assertThat(path.getPathItem("id-1", "one", EntityOne.class)).isNotNull(); + } + + @Test // DATAMONGO-1703 + public void getPathItemShouldReturnNullWhenNoTypeMatchFound() { + + ObjectPath path = ObjectPath.ROOT.push(new EntityOne(), one, "id-1"); + + assertThat(path.getPathItem("id-1", "one", EntityThree.class)).isNull(); + } + + @Test // DATAMONGO-1703 + public void getPathItemShouldReturnCachedItemWhenIdAndCollectionMatchAndIsAssignable() { + + ObjectPath path = ObjectPath.ROOT.push(new EntityTwo(), one, "id-1"); + + assertThat(path.getPathItem("id-1", "one", EntityOne.class)).isNotNull(); + } + + @Test // DATAMONGO-1703 + public void getPathItemShouldReturnNullWhenIdAndCollectionMatchButNotAssignable() { + + ObjectPath path = ObjectPath.ROOT.push(new EntityOne(), one, "id-1"); + + assertThat(path.getPathItem("id-1", "one", EntityTwo.class)).isNull(); + } + + @Test // DATAMONGO-1703 + public void getPathItemShouldReturnNullWhenIdAndCollectionMatchAndAssignableToInterface() { + + ObjectPath path = ObjectPath.ROOT.push(new EntityThree(), one, "id-1"); + + assertThat(path.getPathItem("id-1", "one", ValueInterface.class)).isNotNull(); + } + + @Test // DATAMONGO-2267 + public void collectionLookupShouldBeLazy/* because we may need to resolve SpEL which can be pretty expensive */() { + + MongoPersistentEntity spied = spy(one); + ObjectPath path = ObjectPath.ROOT.push(new EntityThree(), spied, "id-1"); + + verify(spied, never()).getCollection(); + + path.getPathItem("id-1", "foo", EntityTwo.class); + + verify(spied).getCollection(); + } + + @Document("one") + static class EntityOne {} + + static class EntityTwo extends EntityOne {} + + interface ValueInterface {} + + @Document("three") + static class EntityThree implements ValueInterface {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java old mode 100644 new mode 100755 index bc6b8272ac..72d0055389 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,326 +15,396 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.core.aggregation.AggregationExpressionCriteria.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.TreeMap; +import java.util.regex.Pattern; +import org.bson.BsonRegularExpression; +import org.bson.conversions.Bson; +import org.bson.types.Code; import org.bson.types.ObjectId; -import org.hamcrest.core.Is; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.convert.WritingConverter; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.TextScore; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.TextQuery; -import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; -import com.mongodb.QueryBuilder; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.model.Filters; /** * Unit tests for {@link QueryMapper}. - * + * * @author Oliver Gierke * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch + * @author David Julia + * @author Gyungrai Wang */ -@RunWith(MockitoJUnitRunner.class) public class QueryMapperUnitTests { - QueryMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; + private QueryMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; - @Mock MongoDbFactory factory; - - @Before - public void setUp() { + @BeforeEach + void beforeEach() { + MongoCustomConversions conversions = new MongoCustomConversions(); this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); } @Test - public void translatesIdPropertyIntoIdKey() { + void translatesIdPropertyIntoIdKey() { - DBObject query = new BasicDBObject("foo", "value"); - MongoPersistentEntity entity = context.getPersistentEntity(Sample.class); + org.bson.Document query = new org.bson.Document("foo", "value"); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(Sample.class); - DBObject result = mapper.getMappedObject(query, entity); - assertThat(result.get("_id"), is(notNullValue())); - assertThat(result.get("foo"), is(nullValue())); + org.bson.Document result = mapper.getMappedObject(query, entity); + assertThat(result).containsKey("_id"); + assertThat(result).doesNotContainKey("foo"); } @Test - public void convertsStringIntoObjectId() { + void convertsStringIntoObjectId() { - DBObject query = new BasicDBObject("_id", new ObjectId().toString()); - DBObject result = mapper.getMappedObject(query, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); + org.bson.Document query = new org.bson.Document("_id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(query, context.getPersistentEntity(IdWrapper.class)); + assertThat(result.get("_id")).isInstanceOf(ObjectId.class); } @Test - public void handlesBigIntegerIdsCorrectly() { + void handlesBigIntegerIdsCorrectly() { - DBObject dbObject = new BasicDBObject("id", new BigInteger("1")); - DBObject result = mapper.getMappedObject(dbObject, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is((Object) "1")); + org.bson.Document document = new org.bson.Document("id", new BigInteger("1")); + org.bson.Document result = mapper.getMappedObject(document, context.getPersistentEntity(IdWrapper.class)); + assertThat(result).containsEntry("_id", "1"); } @Test - public void handlesObjectIdCapableBigIntegerIdsCorrectly() { + void handlesObjectIdCapableBigIntegerIdsCorrectly() { ObjectId id = new ObjectId(); - DBObject dbObject = new BasicDBObject("id", new BigInteger(id.toString(), 16)); - DBObject result = mapper.getMappedObject(dbObject, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is((Object) id)); + org.bson.Document document = new org.bson.Document("id", new BigInteger(id.toString(), 16)); + org.bson.Document result = mapper.getMappedObject(document, context.getPersistentEntity(IdWrapper.class)); + assertThat(result).containsEntry("_id", id); } - /** - * @see DATAMONGO-278 - */ - @Test - public void translates$NeCorrectly() { + @Test // GH-4490 + void translates$GtCorrectly() { + + Criteria criteria = where("id").gt(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$gt")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$GteCorrectly() { + + Criteria criteria = where("id").gte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$gte")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$LteCorrectly() { + + Criteria criteria = where("id").lte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lte")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$LtCorrectly() { + + Criteria criteria = where("id").lt(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lt")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translatesMultipleCompareOperatorsCorrectly() { + + Criteria criteria = where("id").lt(new ObjectId().toString()).lte(new ObjectId().toString()) + .gt(new ObjectId().toString()).gte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lt")).isInstanceOf(ObjectId.class); + assertThat(document.get("$lte")).isInstanceOf(ObjectId.class); + assertThat(document.get("$gt")).isInstanceOf(ObjectId.class); + assertThat(document.get("$gte")).isInstanceOf(ObjectId.class); + } + + @Test // DATAMONGO-278 + void translates$NeCorrectly() { Criteria criteria = where("foo").ne(new ObjectId().toString()); - DBObject result = mapper.getMappedObject(criteria.getCriteriaObject(), context.getPersistentEntity(Sample.class)); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(Sample.class)); Object object = result.get("_id"); - assertThat(object, is(instanceOf(DBObject.class))); - DBObject dbObject = (DBObject) object; - assertThat(dbObject.get("$ne"), is(instanceOf(ObjectId.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$ne")).isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-326 - */ - @Test - public void handlesEnumsCorrectly() { + @Test // DATAMONGO-326 + void handlesEnumsCorrectly() { Query query = query(where("foo").is(Enum.INSTANCE)); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(String.class))); + assertThat(object).isInstanceOf(String.class); } @Test - public void handlesEnumsInNotEqualCorrectly() { + void handlesEnumsInNotEqualCorrectly() { Query query = query(where("foo").ne(Enum.INSTANCE)); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(DBObject.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); - Object ne = ((DBObject) object).get("$ne"); - assertThat(ne, is(instanceOf(String.class))); - assertThat(ne.toString(), is(Enum.INSTANCE.name())); + Object ne = ((org.bson.Document) object).get("$ne"); + assertThat(ne).isInstanceOf(String.class).hasToString(Enum.INSTANCE.name()); } @Test - public void handlesEnumsIn$InCorrectly() { + void handlesEnumsIn$InCorrectly() { Query query = query(where("foo").in(Enum.INSTANCE)); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(DBObject.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); - Object in = ((DBObject) object).get("$in"); - assertThat(in, is(instanceOf(BasicDBList.class))); + Object in = ((org.bson.Document) object).get("$in"); + assertThat(in).isInstanceOf(List.class); - BasicDBList list = (BasicDBList) in; - assertThat(list.size(), is(1)); - assertThat(list.get(0), is(instanceOf(String.class))); - assertThat(list.get(0).toString(), is(Enum.INSTANCE.name())); + List list = (List) in; + assertThat(list).hasSize(1); + assertThat(list.get(0)).isInstanceOf(String.class).hasToString(Enum.INSTANCE.name()); } - /** - * @see DATAMONGO-373 - */ - @Test - public void handlesNativelyBuiltQueryCorrectly() { + @Test // DATAMONGO-373 + void handlesNativelyBuiltQueryCorrectly() { - DBObject query = new QueryBuilder().or(new BasicDBObject("foo", "bar")).get(); - mapper.getMappedObject(query, null); + Bson query = new BasicDBObject(Filters.or(new BasicDBObject("foo", "bar")).toBsonDocument(org.bson.Document.class, + MongoClientSettings.getDefaultCodecRegistry())); + mapper.getMappedObject(query, Optional.empty()); } - /** - * @see DATAMONGO-369 - */ - @Test - public void handlesAllPropertiesIfDBObject() { + @Test // DATAMONGO-369 + void handlesAllPropertiesIfDocument() { - DBObject query = new BasicDBObject(); - query.put("foo", new BasicDBObject("$in", Arrays.asList(1, 2))); + org.bson.Document query = new org.bson.Document(); + query.put("foo", new org.bson.Document("$in", Arrays.asList(1, 2))); query.put("bar", new Person()); - DBObject result = mapper.getMappedObject(query, null); - assertThat(result.get("bar"), is(notNullValue())); + org.bson.Document result = mapper.getMappedObject(query, Optional.empty()); + assertThat(result).containsKey("bar"); } - /** - * @see DATAMONGO-429 - */ - @Test - public void transformsArraysCorrectly() { + @Test // DATAMONGO-429 + void transformsArraysCorrectly() { Query query = new BasicQuery("{ 'tags' : { '$all' : [ 'green', 'orange']}}"); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); - assertThat(result, is(query.getQueryObject())); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); + assertThat(result.toJson()).isEqualTo(query.getQueryObject().toJson()); } @Test - public void doesHandleNestedFieldsWithDefaultIdNames() { + void doesHandleNestedFieldsWithDefaultIdNames() { - BasicDBObject dbObject = new BasicDBObject("id", new ObjectId().toString()); - dbObject.put("nested", new BasicDBObject("id", new ObjectId().toString())); + org.bson.Document document = new org.bson.Document("id", new ObjectId().toString()); + document.put("nested", new org.bson.Document("id", new ObjectId().toString())); - MongoPersistentEntity entity = context.getPersistentEntity(ClassWithDefaultId.class); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithDefaultId.class); - DBObject result = mapper.getMappedObject(dbObject, entity); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); - assertThat(((DBObject) result.get("nested")).get("_id"), is(instanceOf(ObjectId.class))); + org.bson.Document result = mapper.getMappedObject(document, entity); + assertThat(result.get("_id")).isInstanceOf(ObjectId.class); + assertThat(((org.bson.Document) result.get("nested")).get("_id")).isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-493 - */ - @Test - public void doesNotTranslateNonIdPropertiesFor$NeCriteria() { + @Test // DATAMONGO-493 + void doesNotTranslateNonIdPropertiesFor$NeCriteria() { ObjectId accidentallyAnObjectId = new ObjectId(); Query query = Query .query(Criteria.where("id").is("id_value").and("publishers").ne(accidentallyAnObjectId.toString())); - DBObject dbObject = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(UserEntity.class)); - assertThat(dbObject.get("publishers"), is(instanceOf(DBObject.class))); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(UserEntity.class)); + assertThat(document.get("publishers")).isInstanceOf(org.bson.Document.class); - DBObject publishers = (DBObject) dbObject.get("publishers"); - assertThat(publishers.containsField("$ne"), is(true)); - assertThat(publishers.get("$ne"), is(instanceOf(String.class))); + org.bson.Document publishers = (org.bson.Document) document.get("publishers"); + assertThat(publishers).containsKey("$ne"); + assertThat(publishers.get("$ne")).isInstanceOf(String.class); } - /** - * @see DATAMONGO-494 - */ - @Test - public void usesEntityMetadataInOr() { + @Test // DATAMONGO-494 + void usesEntityMetadataInOr() { Query query = query(new Criteria().orOperator(where("foo").is("bar"))); - DBObject result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Sample.class)); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(Sample.class)); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.keySet(), hasItem("$or")); + assertThat(result.keySet()).hasSize(1).containsOnly("$or"); - BasicDBList ors = getAsDBList(result, "$or"); - assertThat(ors, hasSize(1)); - DBObject criterias = getAsDBObject(ors, 0); - assertThat(criterias.keySet(), hasSize(1)); - assertThat(criterias.get("_id"), is(notNullValue())); - assertThat(criterias.get("foo"), is(nullValue())); + List ors = getAsDBList(result, "$or"); + assertThat(ors).hasSize(1); + org.bson.Document criterias = getAsDocument(ors, 0); + assertThat(criterias.keySet()).hasSize(1).doesNotContain("foo"); + assertThat(criterias).containsKey("_id"); } @Test - public void translatesPropertyReferenceCorrectly() { + void translatesPropertyReferenceCorrectly() { Query query = query(where("field").is(new CustomizedField())); - DBObject result = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsField("foo"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("foo").hasSize(1); } @Test - public void translatesNestedPropertyReferenceCorrectly() { + void translatesNestedPropertyReferenceCorrectly() { Query query = query(where("field.field").is(new CustomizedField())); - DBObject result = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsField("foo.foo"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("foo.foo"); + assertThat(result.keySet()).hasSize(1); } @Test - public void returnsOriginalKeyIfNoPropertyReference() { + void returnsOriginalKeyIfNoPropertyReference() { Query query = query(where("bar").is(new CustomizedField())); - DBObject result = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsField("bar"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("bar"); + assertThat(result.keySet()).hasSize(1); } @Test - public void convertsAssociationCorrectly() { + void convertsAssociationCorrectly() { Reference reference = new Reference(); reference.id = 5L; Query query = query(where("reference").is(reference)); - DBObject object = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); + org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDBRef.class)); Object referenceObject = object.get("reference"); - assertThat(referenceObject, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(referenceObject).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsNestedAssociationCorrectly() { + void convertsNestedAssociationCorrectly() { Reference reference = new Reference(); reference.id = 5L; Query query = query(where("withDbRef.reference").is(reference)); - DBObject object = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRefWrapper.class)); Object referenceObject = object.get("withDbRef.reference"); - assertThat(referenceObject, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(referenceObject).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsInKeywordCorrectly() { + void convertsInKeywordCorrectly() { Reference first = new Reference(); first.id = 5L; @@ -343,246 +413,287 @@ public void convertsInKeywordCorrectly() { second.id = 6L; Query query = query(where("reference").in(first, second)); - DBObject result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDBRef.class)); - DBObject reference = DBObjectTestUtils.getAsDBObject(result, "reference"); + org.bson.Document reference = DocumentTestUtils.getAsDocument(result, "reference"); - BasicDBList inClause = getAsDBList(reference, "$in"); - assertThat(inClause, hasSize(2)); - assertThat(inClause.get(0), is(instanceOf(com.mongodb.DBRef.class))); - assertThat(inClause.get(1), is(instanceOf(com.mongodb.DBRef.class))); + List inClause = getAsDBList(reference, "$in"); + assertThat(inClause).hasSize(2); + assertThat(inClause.get(0)).isInstanceOf(com.mongodb.DBRef.class); + assertThat(inClause.get(1)).isInstanceOf(com.mongodb.DBRef.class); } - /** - * @see DATAMONGO-570 - */ - @Test - public void correctlyConvertsNullReference() { + @Test // DATAMONGO-570 + void correctlyConvertsNullReference() { Query query = query(where("reference").is(null)); - DBObject object = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); + org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDBRef.class)); - assertThat(object.get("reference"), is(nullValue())); + assertThat(object.get("reference")).isNull(); } - /** - * @see DATAMONGO-629 - */ - @Test - public void doesNotMapIdIfNoEntityMetadataAvailable() { + @Test // DATAMONGO-629 + void doesNotMapIdIfNoEntityMetadataAvailable() { String id = new ObjectId().toString(); Query query = query(where("id").is(id)); - DBObject object = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(object.containsField("id"), is(true)); - assertThat(object.get("id"), is((Object) id)); - assertThat(object.containsField("_id"), is(false)); + assertThat(object).containsKey("id"); + assertThat(object).containsEntry("id", id); + assertThat(object).doesNotContainKey("_id"); } - /** - * @see DATAMONGO-677 - */ - @Test - public void handleMapWithDBRefCorrectly() { + @Test // DATAMONGO-677 + void handleMapWithDBRefCorrectly() { - DBObject mapDbObject = new BasicDBObject(); - mapDbObject.put("test", new com.mongodb.DBRef("test", "test")); - DBObject dbObject = new BasicDBObject(); - dbObject.put("mapWithDBRef", mapDbObject); + org.bson.Document mapDocument = new org.bson.Document(); + mapDocument.put("test", new com.mongodb.DBRef("test", "test")); + org.bson.Document document = new org.bson.Document(); + document.put("mapWithDBRef", mapDocument); - DBObject mapped = mapper.getMappedObject(dbObject, context.getPersistentEntity(WithMapDBRef.class)); + org.bson.Document mapped = mapper.getMappedObject(document, context.getPersistentEntity(WithMapDBRef.class)); - assertThat(mapped.containsField("mapWithDBRef"), is(true)); - assertThat(mapped.get("mapWithDBRef"), instanceOf(BasicDBObject.class)); - assertThat(((BasicDBObject) mapped.get("mapWithDBRef")).containsField("test"), is(true)); - assertThat(((BasicDBObject) mapped.get("mapWithDBRef")).get("test"), instanceOf(com.mongodb.DBRef.class)); + assertThat(mapped).containsKey("mapWithDBRef"); + assertThat(mapped.get("mapWithDBRef")).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) mapped.get("mapWithDBRef"))).containsKey("test"); + assertThat(((org.bson.Document) mapped.get("mapWithDBRef")).get("test")).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsUnderscoreIdValueWithoutMetadata() { + void convertsUnderscoreIdValueWithoutMetadata() { - DBObject dbObject = new BasicDBObject().append("_id", new ObjectId().toString()); + org.bson.Document document = new org.bson.Document().append("_id", new ObjectId().toString()); - DBObject mapped = mapper.getMappedObject(dbObject, null); - assertThat(mapped.containsField("_id"), is(true)); - assertThat(mapped.get("_id"), is(instanceOf(ObjectId.class))); + org.bson.Document mapped = mapper.getMappedObject(document, Optional.empty()); + assertThat(mapped).containsKey("_id"); + assertThat(mapped.get("_id")).isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-705 - */ - @Test - public void convertsDBRefWithExistsQuery() { + @Test // DATAMONGO-705 + void convertsDBRefWithExistsQuery() { Query query = query(where("reference").exists(false)); - BasicMongoPersistentEntity entity = context.getPersistentEntity(WithDBRef.class); - DBObject mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); - DBObject reference = getAsDBObject(mappedObject, "reference"); - assertThat(reference.containsField("$exists"), is(true)); - assertThat(reference.get("$exists"), is((Object) false)); + org.bson.Document reference = getAsDocument(mappedObject, "reference"); + assertThat(reference).containsKey("$exists"); + assertThat(reference).containsEntry("$exists", false); } - /** - * @see DATAMONGO-706 - */ - @Test - public void convertsNestedDBRefsCorrectly() { + @Test // DATAMONGO-706 + void convertsNestedDBRefsCorrectly() { Reference reference = new Reference(); reference.id = 5L; Query query = query(where("someString").is("foo").andOperator(where("reference").in(reference))); - BasicMongoPersistentEntity entity = context.getPersistentEntity(WithDBRef.class); - DBObject mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); - assertThat(mappedObject.get("someString"), is((Object) "foo")); + assertThat(mappedObject).containsEntry("someString", "foo"); - BasicDBList andClause = getAsDBList(mappedObject, "$and"); - assertThat(andClause, hasSize(1)); + List andClause = getAsDBList(mappedObject, "$and"); + assertThat(andClause).hasSize(1); - BasicDBList inClause = getAsDBList(getAsDBObject(getAsDBObject(andClause, 0), "reference"), "$in"); - assertThat(inClause, hasSize(1)); - assertThat(inClause.get(0), is(instanceOf(com.mongodb.DBRef.class))); + List inClause = getAsDBList(getAsDocument(getAsDocument(andClause, 0), "reference"), "$in"); + assertThat(inClause).hasSize(1); + assertThat(inClause.get(0)).isInstanceOf(com.mongodb.DBRef.class); } - /** - * @see DATAMONGO-752 - */ - @Test - public void mapsSimpleValuesStartingWith$Correctly() { + @Test // GH-3853 + void convertsDocumentReferenceOnIdPropertyCorrectly() { + + Sample reference = new Sample(); + reference.foo = "s1"; + + Query query = query(where("sample").is(reference)); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("sample", "s1"); + } + + @Test // GH-4033 + void convertsNestedPathToIdPropertyOfDocumentReferenceCorrectly() { + + Query query = query(where("sample.foo").is("s1")); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("sample", "s1"); + } + + @Test // GH-4033 + void convertsNestedPathToIdPropertyOfDocumentReferenceCorrectlyWhenItShouldBeConvertedToObjectId() { + + ObjectId id = new ObjectId(); + Query query = query(where("sample.foo").is(id.toHexString())); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery.get("sample")).satisfies(it -> { + + assertThat(it).isInstanceOf(ObjectId.class); + assertThat(((ObjectId) it).toHexString()).isEqualTo(id.toHexString()); + }); + } + + @Test // GH-3853 + void convertsListDocumentReferenceOnIdPropertyCorrectly() { + + Sample reference = new Sample(); + reference.foo = "s1"; + + Query query = query(where("samples").is(Arrays.asList(reference))); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("samples", Arrays.asList("s1")); + } + + @Test // GH-3853 + void convertsDocumentReferenceOnNonIdPropertyCorrectly() { + + Customer reference = new Customer(); + reference.id = new ObjectId(); + reference.name = "c1"; + + Query query = query(where("customer").is(reference)); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("customer", "c1"); + } + + @Test // GH-3853 + void convertsListDocumentReferenceOnNonIdPropertyCorrectly() { + + Customer reference = new Customer(); + reference.id = new ObjectId(); + reference.name = "c1"; + + Query query = query(where("customers").is(Arrays.asList(reference))); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("customers", Arrays.asList("c1")); + } + + @Test // DATAMONGO-752 + void mapsSimpleValuesStartingWith$Correctly() { Query query = query(where("myvalue").is("$334")); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.get("myvalue"), is((Object) "$334")); + assertThat(result.keySet()).hasSize(1); + assertThat(result).containsEntry("myvalue", "$334"); } - /** - * @see DATAMONGO-752 - */ - @Test - public void mapsKeywordAsSimpleValuesCorrectly() { + @Test // DATAMONGO-752 + void mapsKeywordAsSimpleValuesCorrectly() { Query query = query(where("myvalue").is("$center")); - DBObject result = mapper.getMappedObject(query.getQueryObject(), null); + org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.get("myvalue"), is((Object) "$center")); + assertThat(result.keySet()).hasSize(1); + assertThat(result).containsEntry("myvalue", "$center"); } - /** - * @DATAMONGO-805 - */ - @Test - public void shouldExcludeDBRefAssociation() { + @Test // DATAMONGO-805 + void shouldExcludeDBRefAssociation() { Query query = query(where("someString").is("foo")); query.fields().exclude("reference"); - BasicMongoPersistentEntity entity = context.getPersistentEntity(WithDBRef.class); - DBObject queryResult = mapper.getMappedObject(query.getQueryObject(), entity); - DBObject fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + org.bson.Document queryResult = mapper.getMappedObject(query.getQueryObject(), entity); + org.bson.Document fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity); - assertThat(queryResult.get("someString"), is((Object) "foo")); - assertThat(fieldsResult.get("reference"), is((Object) 0)); + assertThat(queryResult).containsEntry("someString", "foo"); + assertThat(fieldsResult).containsEntry("reference", 0); } - /** - * @see DATAMONGO-686 - */ - @Test - public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() { + @Test // DATAMONGO-686 + void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() { - BasicMongoPersistentEntity persistentEntity = context.getPersistentEntity(Sample.class); + MongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(Sample.class); String idPropertyName = persistentEntity.getIdProperty().getName(); - DBObject queryObject = query(where(idPropertyName).in("42")).getQueryObject(); + org.bson.Document queryObject = query(where(idPropertyName).in("42")).getQueryObject(); - Object idValuesBefore = getAsDBObject(queryObject, idPropertyName).get("$in"); + Object idValuesBefore = getAsDocument(queryObject, idPropertyName).get("$in"); mapper.getMappedObject(queryObject, persistentEntity); - Object idValuesAfter = getAsDBObject(queryObject, idPropertyName).get("$in"); + Object idValuesAfter = getAsDocument(queryObject, idPropertyName).get("$in"); - assertThat(idValuesAfter, is(idValuesBefore)); + assertThat(idValuesAfter).isEqualTo(idValuesBefore); } - /** - * @see DATAMONGO-821 - */ - @Test - public void queryMapperShouldNotTryToMapDBRefListPropertyIfNestedInsideDBObjectWithinDBObject() { + @Test // DATAMONGO-821 + void queryMapperShouldNotTryToMapDBRefListPropertyIfNestedInsideDocumentWithinDocument() { - DBObject queryObject = query( - where("referenceList").is(new BasicDBObject("$nested", new BasicDBObject("$keys", 0L)))).getQueryObject(); + org.bson.Document queryObject = query( + where("referenceList").is(new org.bson.Document("$nested", new org.bson.Document("$keys", 0L)))) + .getQueryObject(); - DBObject mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRefList.class)); - DBObject referenceObject = getAsDBObject(mappedObject, "referenceList"); - DBObject nestedObject = getAsDBObject(referenceObject, "$nested"); + org.bson.Document mappedObject = mapper.getMappedObject(queryObject, + context.getPersistentEntity(WithDBRefList.class)); + org.bson.Document referenceObject = getAsDocument(mappedObject, "referenceList"); + org.bson.Document nestedObject = getAsDocument(referenceObject, "$nested"); - assertThat(nestedObject, is((DBObject) new BasicDBObject("$keys", 0L))); + assertThat(nestedObject).isEqualTo(new org.bson.Document("$keys", 0L)); } - /** - * @see DATAMONGO-821 - */ - @Test - public void queryMapperShouldNotTryToMapDBRefPropertyIfNestedInsideDBObjectWithinDBObject() { + @Test // DATAMONGO-821 + void queryMapperShouldNotTryToMapDBRefPropertyIfNestedInsideDocumentWithinDocument() { - DBObject queryObject = query(where("reference").is(new BasicDBObject("$nested", new BasicDBObject("$keys", 0L)))) - .getQueryObject(); + org.bson.Document queryObject = query( + where("reference").is(new org.bson.Document("$nested", new org.bson.Document("$keys", 0L)))).getQueryObject(); - DBObject mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRef.class)); - DBObject referenceObject = getAsDBObject(mappedObject, "reference"); - DBObject nestedObject = getAsDBObject(referenceObject, "$nested"); + org.bson.Document mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRef.class)); + org.bson.Document referenceObject = getAsDocument(mappedObject, "reference"); + org.bson.Document nestedObject = getAsDocument(referenceObject, "$nested"); - assertThat(nestedObject, is((DBObject) new BasicDBObject("$keys", 0L))); + assertThat(nestedObject).isEqualTo(new org.bson.Document("$keys", 0L)); } - /** - * @see DATAMONGO-821 - */ - @Test - public void queryMapperShouldMapDBRefPropertyIfNestedInDBObject() { + @Test // DATAMONGO-821 + void queryMapperShouldMapDBRefPropertyIfNestedInDocument() { Reference sample = new Reference(); sample.id = 321L; - DBObject queryObject = query(where("reference").is(new BasicDBObject("$in", Arrays.asList(sample)))) - .getQueryObject(); + org.bson.Document queryObject = query( + where("reference").is(new org.bson.Document("$in", Collections.singletonList(sample)))).getQueryObject(); - DBObject mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRef.class)); + org.bson.Document mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRef.class)); - DBObject referenceObject = getAsDBObject(mappedObject, "reference"); - BasicDBList inObject = getAsDBList(referenceObject, "$in"); + org.bson.Document referenceObject = getAsDocument(mappedObject, "reference"); + List inObject = getAsDBList(referenceObject, "$in"); - assertThat(inObject.get(0), is(instanceOf(com.mongodb.DBRef.class))); + assertThat(inObject.get(0)).isInstanceOf(com.mongodb.DBRef.class); } - /** - * @see DATAMONGO-773 - */ - @Test - public void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() { + @Test // DATAMONGO-773 + void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() { - BasicMongoPersistentEntity persistentEntity = context.getPersistentEntity(WithDBRef.class); + MongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(WithDBRef.class); Query qry = query(where("someString").is("abc")); qry.fields().include("reference"); - DBObject mappedFields = mapper.getMappedObject(qry.getFieldsObject(), persistentEntity); - assertThat(mappedFields, is(notNullValue())); + org.bson.Document mappedFields = mapper.getMappedObject(qry.getFieldsObject(), persistentEntity); + assertThat(mappedFields).isNotNull(); } - /** - * @see DATAMONGO-893 - */ - @Test - public void classInformationShouldNotBePresentInDBObjectUsedInFinderMethods() { + @Test // DATAMONGO-893 + void classInformationShouldNotBePresentInDocumentUsedInFinderMethods() { EmbeddedClass embedded = new EmbeddedClass(); embedded.id = "1"; @@ -591,241 +702,238 @@ public void classInformationShouldNotBePresentInDBObjectUsedInFinderMethods() { embedded2.id = "2"; Query query = query(where("embedded").in(Arrays.asList(embedded, embedded2))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - assertThat(dbo.toString(), equalTo("{ \"embedded\" : { \"$in\" : [ { \"_id\" : \"1\"} , { \"_id\" : \"2\"}]}}")); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); + assertThat(document).isEqualTo("{ \"embedded\" : { \"$in\" : [ { \"_id\" : \"1\"} , { \"_id\" : \"2\"}]}}"); } - /** - * @see DATAMONGO-647 - */ - @Test - public void customizedFieldNameShouldBeMappedCorrectlyWhenApplyingSort() { + @Test // DATAMONGO-1406 + void shouldMapQueryForNestedCustomizedPropertiesUsingConfiguredFieldNames() { + + EmbeddedClass embeddedClass = new EmbeddedClass(); + embeddedClass.customizedField = "hello"; - Query query = query(where("field").is("bar")).with(new Sort(Direction.DESC, "field")); - DBObject dbo = mapper.getMappedObject(query.getSortObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("foo", -1).get())); + Foo foo = new Foo(); + foo.listOfItems = Collections.singletonList(embeddedClass); + + Query query = new Query(Criteria.where("listOfItems") // + .elemMatch(new Criteria(). // + andOperator(Criteria.where("customizedField").is(embeddedClass.customizedField)))); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); + + assertThat(document).containsEntry("my_items.$elemMatch.$and", + Collections.singletonList(new org.bson.Document("fancy_custom_name", embeddedClass.customizedField))); } - /** - * @see DATAMONGO-973 - */ - @Test - public void getMappedFieldsAppendsTextScoreFieldProperlyCorrectlyWhenNotPresent() { + @Test // DATAMONGO-647 + void customizedFieldNameShouldBeMappedCorrectlyWhenApplyingSort() { + + Query query = query(where("field").is("bar")).with(Sort.by(Direction.DESC, "field")); + org.bson.Document document = mapper.getMappedObject(query.getSortObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(document).isEqualTo(new org.bson.Document().append("foo", -1)); + } + + @Test // DATAMONGO-973 + void getMappedFieldsAppendsTextScoreFieldProperlyCorrectlyWhenNotPresent() { Query query = new Query(); - DBObject dbo = mapper.getMappedFields(query.getFieldsObject(), + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("score", new BasicDBObject("$meta", "textScore")).get())); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } - /** - * @see DATAMONGO-973 - */ - @Test - public void getMappedFieldsReplacesTextScoreFieldProperlyCorrectlyWhenPresent() { + @Test // DATAMONGO-973 + void getMappedFieldsReplacesTextScoreFieldProperlyCorrectlyWhenPresent() { Query query = new Query(); query.fields().include("textScore"); - DBObject dbo = mapper.getMappedFields(query.getFieldsObject(), + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("score", new BasicDBObject("$meta", "textScore")).get())); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } - /** - * @see DATAMONGO-973 - */ - @Test - public void getMappedSortAppendsTextScoreProperlyWhenSortedByScore() { + @Test // DATAMONGO-973 + void getMappedSortAppendsTextScoreProperlyWhenSortedByScore() { - Query query = new Query().with(new Sort("textScore")); + Query query = new Query().with(Sort.by("textScore")); - DBObject dbo = mapper.getMappedSort(query.getSortObject(), + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("score", new BasicDBObject("$meta", "textScore")).get())); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } - /** - * @see DATAMONGO-973 - */ - @Test - public void getMappedSortIgnoresTextScoreWhenNotSortedByScore() { + @Test // DATAMONGO-973 + void getMappedSortIgnoresTextScoreWhenNotSortedByScore() { - Query query = new Query().with(new Sort("id")); + Query query = new Query().with(Sort.by("id")); - DBObject dbo = mapper.getMappedSort(query.getSortObject(), + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("_id", 1).get())); + assertThat(document).isEqualTo(new org.bson.Document().append("_id", 1)); } - /** - * @see DATAMONGO-1070 - */ - @Test - public void mapsIdReferenceToDBRefCorrectly() { + @Test // DATAMONGO-1070, DATAMONGO-1798 + void mapsIdReferenceToDBRefCorrectly() { ObjectId id = new ObjectId(); - DBObject query = new BasicDBObject("reference.id", new com.mongodb.DBRef("reference", id.toString())); - DBObject result = mapper.getMappedObject(query, context.getPersistentEntity(WithDBRef.class)); + org.bson.Document query = new org.bson.Document("reference.id", new com.mongodb.DBRef("reference", id)); + org.bson.Document result = mapper.getMappedObject(query, context.getPersistentEntity(WithDBRef.class)); - assertThat(result.containsField("reference"), is(true)); + assertThat(result).containsKey("reference"); com.mongodb.DBRef reference = getTypedValue(result, "reference", com.mongodb.DBRef.class); - assertThat(reference.getId(), is(instanceOf(ObjectId.class))); + assertThat(reference.getId()).isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void shouldUseExplicitlySetFieldnameForIdPropertyCandidates() { + @Test // DATAMONGO-1050 + void shouldUseExplicitlySetFieldnameForIdPropertyCandidates() { Query query = query(where("nested.id").is("bar")); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("nested.id", "bar").get())); + assertThat(document).isEqualTo(new org.bson.Document().append("nested.id", "bar")); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void shouldUseExplicitlySetFieldnameForIdPropertyCandidatesUsedInSortClause() { + @Test // DATAMONGO-1050 + void shouldUseExplicitlySetFieldnameForIdPropertyCandidatesUsedInSortClause() { - Query query = new Query().with(new Sort("nested.id")); + Query query = new Query().with(Sort.by("nested.id")); - DBObject dbo = mapper.getMappedSort(query.getSortObject(), + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("nested.id", 1).get())); + assertThat(document).isEqualTo(new org.bson.Document().append("nested.id", 1)); } - /** - * @see DATAMONGO-1135 - */ - @Test - public void nearShouldUseGeoJsonRepresentationOnUnmappedProperty() { + @Test // DATAMONGO-1135 + void nearShouldUseGeoJsonRepresentationOnUnmappedProperty() { Query query = query(where("foo").near(new GeoJsonPoint(100, 50))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, isBsonObject().containing("foo.$near.$geometry.type", "Point")); - assertThat(dbo, isBsonObject().containing("foo.$near.$geometry.coordinates.[0]", 100D)); - assertThat(dbo, isBsonObject().containing("foo.$near.$geometry.coordinates.[1]", 50D)); + assertThat(document).containsEntry("foo.$near.$geometry.type", "Point"); + assertThat(document).containsEntry("foo.$near.$geometry.coordinates.[0]", 100D); + assertThat(document).containsEntry("foo.$near.$geometry.coordinates.[1]", 50D); } - /** - * @see DATAMONGO-1135 - */ - @Test - public void nearShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { + @Test // DATAMONGO-1135 + void nearShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { Query query = query(where("geoJsonPoint").near(new GeoJsonPoint(100, 50))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, isBsonObject().containing("geoJsonPoint.$near.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPoint.$near.$geometry.type", "Point"); } - /** - * @see DATAMONGO-1135 - */ - @Test - public void nearSphereShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { + @Test // DATAMONGO-1135 + void nearSphereShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { Query query = query(where("geoJsonPoint").nearSphere(new GeoJsonPoint(100, 50))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, isBsonObject().containing("geoJsonPoint.$nearSphere.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPoint.$nearSphere.$geometry.type", "Point"); } - /** - * @see DATAMONGO-1135 - */ - @Test - public void shouldMapNameCorrectlyForGeoJsonType() { + @Test // DATAMONGO-1135 + void shouldMapNameCorrectlyForGeoJsonType() { Query query = query(where("namedGeoJsonPoint").nearSphere(new GeoJsonPoint(100, 50))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, - isBsonObject().containing("geoJsonPointWithNameViaFieldAnnotation.$nearSphere.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPointWithNameViaFieldAnnotation.$nearSphere.$geometry.type", "Point"); } - /** - * @see DATAMONGO-1135 - */ - @Test - public void withinShouldUseGeoJsonPolygonWhenMappingPolygonOn2DSphereIndex() { + @Test // DATAMONGO-1135 + void withinShouldUseGeoJsonPolygonWhenMappingPolygonOn2DSphereIndex() { Query query = query(where("geoJsonPoint") .within(new GeoJsonPolygon(new Point(0, 0), new Point(100, 100), new Point(100, 0), new Point(0, 0)))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, isBsonObject().containing("geoJsonPoint.$geoWithin.$geometry.type", "Polygon")); + assertThat(document).containsEntry("geoJsonPoint.$geoWithin.$geometry.type", "Polygon"); } - /** - * @see DATAMONGO-1134 - */ - @Test - public void intersectsShouldUseGeoJsonRepresentationCorrectly() { + @Test // DATAMONGO-1134 + void intersectsShouldUseGeoJsonRepresentationCorrectly() { Query query = query(where("geoJsonPoint") .intersects(new GeoJsonPolygon(new Point(0, 0), new Point(100, 100), new Point(100, 0), new Point(0, 0)))); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(dbo, isBsonObject().containing("geoJsonPoint.$geoIntersects.$geometry.type", "Polygon")); - assertThat(dbo, isBsonObject().containing("geoJsonPoint.$geoIntersects.$geometry.coordinates")); + assertThat(document).containsEntry("geoJsonPoint.$geoIntersects.$geometry.type", "Polygon"); + assertThat(document).containsKey("geoJsonPoint.$geoIntersects.$geometry.coordinates"); } - /** - * <<<<<<< HEAD - * - * @see DATAMONGO-1269 - */ - @Test - public void mappingShouldRetainNumericMapKey() { + @Test // DATAMONGO-1269 + void mappingShouldRetainNumericMapKey() { Query query = query(where("map.1.stringProperty").is("ba'alzamon")); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(EntityWithComplexValueTypeMap.class)); - assertThat(dbo.containsField("map.1.stringProperty"), is(true)); + assertThat(document).containsKey("map.1.stringProperty"); } - /** - * @see DATAMONGO-1269 - */ - @Test - public void mappingShouldRetainNumericPositionInList() { + @Test // GH-3688 + void mappingShouldRetainNestedNumericMapKeys() { + + Query query = query(where("outerMap.1.map.2.stringProperty").is("ba'alzamon")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map.2.stringProperty"); + } + + @Test // GH-3688 + void mappingShouldAllowSettingEntireNestedNumericKeyedMapValue() { + + Query query = query(where("outerMap.1.map").is(null)); // newEntityWithComplexValueTypeMap() + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map"); + } + + @Test // DATAMONGO-1269 + void mappingShouldRetainNumericPositionInList() { Query query = query(where("list.1.stringProperty").is("ba'alzamon")); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(EntityWithComplexValueTypeList.class)); - assertThat(dbo.containsField("list.1.stringProperty"), is(true)); + assertThat(document).containsKey("list.1.stringProperty"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectly() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectly() { Foo probe = new Foo(); probe.embedded = new EmbeddedClass(); @@ -833,140 +941,1061 @@ public void exampleShouldBeMappedCorrectly() { Query query = query(byExample(probe)); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - assertThat(dbo, isBsonObject().containing("embedded\\._id", "conflux")); + assertThat(document).containsEntry("embedded\\._id", "conflux"); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { + @Test // DATAMONGO-1245 + void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { ClassWithGeoTypes probe = new ClassWithGeoTypes(); probe.legacyPoint = new Point(10D, 20D); Query query = query(byExample(probe)); - DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDBRef.class)); - assertThat(dbo.get("legacyPoint.x"), Is. is(10D)); - assertThat(dbo.get("legacyPoint.y"), Is. is(20D)); + assertThat(document).containsEntry("legacyPoint.x", 10D); + assertThat(document).containsEntry("legacyPoint.y", 20D); } - @Document - public class Foo { - @Id private ObjectId id; - EmbeddedClass embedded; + @Test // GH-3544 + void exampleWithCombinedCriteriaShouldBeMappedCorrectly() { + + Foo probe = new Foo(); + probe.embedded = new EmbeddedClass(); + probe.embedded.id = "conflux"; + + Query query = query(byExample(probe).and("listOfItems").exists(true)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); + + assertThat(document).containsEntry("embedded\\._id", "conflux").containsEntry("my_items", + new org.bson.Document("$exists", true)); + } + + @Test // DATAMONGO-1988 + void mapsStringObjectIdRepresentationToObjectIdWhenReferencingIdProperty() { + + Query query = query(where("sample.foo").is(new ObjectId().toHexString())); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); + + assertThat(document.get("sample._id")).isInstanceOf(ObjectId.class); } - public class EmbeddedClass { - public String id; + @Test // DATAMONGO-1988 + void matchesExactFieldNameToIdProperty() { + + Query query = query(where("sample.iid").is(new ObjectId().toHexString())); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); + + assertThat(document.get("sample.iid")).isInstanceOf(String.class); } - class IdWrapper { - Object id; + @Test // DATAMONGO-1988 + void leavesNonObjectIdStringIdRepresentationUntouchedWhenReferencingIdProperty() { + + Query query = query(where("sample.foo").is("id-1")); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); + + assertThat(document.get("sample._id")).isInstanceOf(String.class); } - class ClassWithEmbedded { - @Id String id; - Sample sample; + @Test // DATAMONGO-2168 + void getMappedObjectShouldNotMapTypeHint() { + + converter.setTypeMapper(new DefaultMongoTypeMapper("className")); + + org.bson.Document update = new org.bson.Document("className", "foo"); + org.bson.Document mappedObject = mapper.getMappedObject(update, context.getPersistentEntity(UserEntity.class)); + + assertThat(mappedObject).containsEntry("className", "foo"); } - class ClassWithDefaultId { + @Test // DATAMONGO-2168 + void getMappedObjectShouldIgnorePathsLeadingToJavaLangClassProperties/* like Class#getName() */() { - String id; - ClassWithDefaultId nested; + org.bson.Document update = new org.bson.Document("className", "foo"); + org.bson.Document mappedObject = mapper.getMappedObject(update, context.getPersistentEntity(UserEntity.class)); + + assertThat(mappedObject).containsEntry("className", "foo"); } - class Sample { + @Test // DATAMONGO-2193 + void shouldNotConvertHexStringToObjectIdForRenamedNestedIdField() { - @Id private String foo; + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.id", idHex)); } - class BigIntegerId { + @Test // DATAMONGO-2221 + void shouldNotConvertHexStringToObjectIdForRenamedDeeplyNestedIdField() { - @Id private BigInteger id; + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.deeplyNested.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.deeplyNested.id", idHex)); } - enum Enum { - INSTANCE; + @Test // DATAMONGO-2221 + void shouldNotConvertHexStringToObjectIdForUnresolvablePath() { + + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.unresolvablePath.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.unresolvablePath.id", idHex)); } - class UserEntity { - String id; - List publishers = new ArrayList(); + @Test // DATAMONGO-1849 + void shouldConvertPropertyWithExplicitTargetType() { + + String script = "if (a > b) a else b"; + Query query = new Query(where("script").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("script", new Code(script))); } - class CustomizedField { + @Test // DATAMONGO-1849 + void shouldConvertCollectionPropertyWithExplicitTargetType() { - @Field("foo") CustomizedField field; + String script = "if (a > b) a else b"; + Query query = new Query(where("scripts").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("scripts", new Code(script))); } - class WithDBRef { + @Test // GH-4649 + void shouldRetainRegexPattern() { - String someString; - @DBRef Reference reference; + Query query = new Query(where("text").regex("foo")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document.get("text")).isInstanceOf(Pattern.class); + + query = new Query(where("text").regex(new BsonRegularExpression("foo"))); + document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + assertThat(document.get("text")).isInstanceOf(BsonRegularExpression.class); } - class WithDBRefList { + @Test // GH-4674 + void shouldRetainRegexPatternForIdProperty() { - String someString; - @DBRef List referenceList; + org.bson.Document javaRegex = mapper.getMappedObject(query(where("id").regex("^1234$")).getQueryObject(), + context.getPersistentEntity(WithStringId.class)); + + assertThat(javaRegex.get("_id")).isInstanceOf(Pattern.class); + + org.bson.Document bsonRegex = mapper.getMappedObject( + query(where("id").regex(new BsonRegularExpression("^1234$"))).getQueryObject(), + context.getPersistentEntity(WithStringId.class)); + + assertThat(bsonRegex.get("_id")).isInstanceOf(BsonRegularExpression.class); } - class Reference { + @Test // DATAMONGO-2339 + void findByIdUsesMappedIdFieldNameWithUnderscoreCorrectly() { - Long id; + org.bson.Document target = mapper.getMappedObject(new org.bson.Document("with_underscore", "id-1"), + context.getPersistentEntity(WithIdPropertyContainingUnderscore.class)); + + assertThat(target).isEqualTo(new org.bson.Document("_id", "id-1")); } - class WithDBRefWrapper { + @Test // DATAMONGO-2394 + void leavesDistanceUntouchedWhenUsingGeoJson() { - WithDBRef withDbRef; + Query query = query(where("geoJsonPoint").near(new GeoJsonPoint(27.987901, 86.9165379)).maxDistance(1000)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); + assertThat(document).containsEntry("geoJsonPoint.$near.$geometry.type", "Point"); + assertThat(document).containsEntry("geoJsonPoint.$near.$maxDistance", 1000.0D); } - class WithMapDBRef { + @Test // DATAMONGO-2440 + void convertsInWithNonIdFieldAndObjectIdTypeHintCorrectly() { - @DBRef Map mapWithDBRef; + String id = new ObjectId().toHexString(); + NonIdFieldWithObjectIdTargetType source = new NonIdFieldWithObjectIdTargetType(); + + source.stringAsOid = id; + + org.bson.Document target = mapper.getMappedObject(query(where("stringAsOid").in(id)).getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"stringAsOid\": {\"$in\": [{\"$oid\": \"" + id + "\"}]}}")); } - class WithTextScoreProperty { + @Test // DATAMONGO-2440 + void convertsInWithIdFieldAndObjectIdTypeHintCorrectly() { - @Id String id; - @TextScore @Field("score") Float textScore; + String id = new ObjectId().toHexString(); + NonIdFieldWithObjectIdTargetType source = new NonIdFieldWithObjectIdTargetType(); + + source.id = id; + + org.bson.Document target = mapper.getMappedObject(query(where("id").in(id)).getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"_id\": {\"$in\": [{\"$oid\": \"" + id + "\"}]}}")); } - static class RootForClassWithExplicitlyRenamedIdField { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForNonMatchingPath() { - @Id String id; - ClassWithExplicitlyRenamedField nested; + org.bson.Document target = mapper.getMappedObject( + query(where("array.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(Foo.class)); + + assertThat(target).isEqualTo(new org.bson.Document("array.$[some_item].nested.$[other_item]", "value")); } - static class ClassWithExplicitlyRenamedField { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForObjectTargetArray() { - @Field("id") String id; + org.bson.Document target = mapper.getMappedObject( + query(where("arrayObj.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayObj.$[some_item].nested.$[other_item]", "value")); } - static class ClassWithGeoTypes { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForStringTargetArray() { - double[] justAnArray; - Point legacyPoint; - GeoJsonPoint geoJsonPoint; - @Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint; + org.bson.Document target = mapper.getMappedObject( + query(where("arrayString.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayString.$[some_item].nested.$[other_item]", "value")); } - static class SimpeEntityWithoutId { + @Test // DATAMONGO-2488 + void mapsCustomFieldNamesForNestedArrayPathCorrectly() { - String stringProperty; - Integer integerProperty; + org.bson.Document target = mapper.getMappedObject( + query(where("arrayCustomName.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayCustomName.$[some_item].nes-ted.$[other_item]", "value")); } - static class EntityWithComplexValueTypeMap { - Map map; + @Test // DATAMONGO-2502 + void shouldAllowDeeplyNestedPlaceholders() { + + org.bson.Document target = mapper.getMappedObject( + query(where("level0.$[some_item].arrayObj.$[other_item].nested").is("value")).getQueryObject(), + context.getPersistentEntity(WithDeepArrayNesting.class)); + + assertThat(target).isEqualTo(new org.bson.Document("level0.$[some_item].arrayObj.$[other_item].nested", "value")); } - static class EntityWithComplexValueTypeList { - List list; + @Test // DATAMONGO-2502 + void shouldAllowDeeplyNestedPlaceholdersWithCustomName() { + + org.bson.Document target = mapper.getMappedObject( + query(where("level0.$[some_item].arrayCustomName.$[other_item].nested").is("value")).getQueryObject(), + context.getPersistentEntity(WithDeepArrayNesting.class)); + + assertThat(target) + .isEqualTo(new org.bson.Document("level0.$[some_item].arrayCustomName.$[other_item].nes-ted", "value")); + } + + @Test // DATAMONGO-2517 + void shouldParseNestedKeywordWithArgumentMatchingTheSourceEntitiesConstructorCorrectly() { + + TextQuery source = new TextQuery("test"); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithSingleStringArgConstructor.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"$text\" : { \"$search\" : \"test\" }}")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + + Query source = query(Criteria.where("unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnUnwrappedCorrectly() { + + Query source = query(Criteria.where("unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnPrefixedUnwrappedCorrectly() { + + Query source = query(Criteria.where("unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("prefix-stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + Query source = query(Criteria.where("withUnwrapped.unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withUnwrapped", new org.bson.Document("stringValue", "test"))); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedPrefixedUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + Query source = query(Criteria.where("withPrefixedUnwrapped.unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target) + .isEqualTo(new org.bson.Document("withPrefixedUnwrapped", new org.bson.Document("prefix-stringValue", "test"))); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedUnwrappedCorrectly() { + + Query source = query(Criteria.where("withUnwrapped.unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withUnwrapped.stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedPrefixedUnwrappedCorrectly() { + + Query source = query(Criteria.where("withPrefixedUnwrapped.unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withPrefixedUnwrapped.prefix-stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedIsEmpty() { + + Query query = new Query().with(Sort.by("unwrappedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("stringValue", 1).append("listValue", 1).append("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedValue() { + + // atFieldAnnotatedValue + Query query = new Query().with(Sort.by("unwrappedValue.stringValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringValue", 1)); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByPrefixedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("prefix-with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByNestedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("withUnwrapped.unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("withUnwrapped.with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByNestedPrefixedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("withPrefixedUnwrapped.unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("withPrefixedUnwrapped.prefix-with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void projectOnUnwrappedUsesFields() { + + Query query = new Query(); + query.fields().include("unwrappedValue"); + + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("stringValue", 1).append("listValue", 1).append("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void projectOnUnwrappedValue() { + + Query query = new Query(); + query.fields().include("unwrappedValue.stringValue"); + + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringValue", 1)); + } + + @Test // GH-3601 + void resolvesFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo(new org.bson.Document("renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesSimpleNestedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("simple.fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("simple.fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesSimpleNestedMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("simple.renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo(new org.bson.Document("simple.renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesFieldNameWithUnderscoreOnNestedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("double_underscore.fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("double_underscore.fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesFieldNameWithUnderscoreOnNestedMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("double_underscore.renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("double_underscore.renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3633 + void mapsNullValueForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").isNull()); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", null)); + } + + @Test // GH-3633 + void mapsNullBsonTypeForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").isNullValue()); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", new org.bson.Document("$type", 10))); + } + + @Test // GH-3635 + void $floorKeywordDoesNotMatch$or$norPattern() { + + Query query = new BasicQuery(" { $expr: { $gt: [ \"$spent\" , { $floor : \"$budget\" } ] } }"); + assertThatNoException() + .isThrownBy(() -> mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class))); + } + + @Test // GH-3659 + void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { + + Query query = query(where("address.street").is("1007 Mountain Drive")); + + MongoCustomConversions mongoCustomConversions = new MongoCustomConversions( + Collections.singletonList(new MyAddressToDocumentConverter())); + + this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(mongoCustomConversions.getSimpleTypeHolder()); + this.context.afterPropertiesSet(); + + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(mongoCustomConversions); + this.converter.afterPropertiesSet(); + + this.mapper = new QueryMapper(converter); + + assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))) + .isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); + } + + @Test // GH-3790 + void shouldAcceptExprAsCriteriaDefinition() { + + EvaluationOperators.Expr expr = EvaluationOperators + .valueOf(ConditionalOperators.ifNull("customizedField").then(true)).expr(); + + Query query = query( + expr.toCriteriaDefinition(new TypeBasedAggregationOperationContext(EmbeddedClass.class, context, mapper))); + + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getRequiredPersistentEntity(EmbeddedClass.class)); + + assertThat(mappedQuery).isEqualTo("{ $expr : { $ifNull : [\"$fancy_custom_name\", true] } }"); + } + + @Test // GH-3668 + void mapStringIdFieldProjection() { + + org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), + context.getPersistentEntity(WithStringId.class)); + assertThat(mappedFields).containsEntry("_id", 1); + } + + @Test // GH-3783 + void retainsId$InWithStringArray() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(WithExplicitStringId.class)); + assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}")); + } + + @Test // GH-3783 + void mapsId$InInToObjectIds() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(ClassWithDefaultId.class)); + assertThat(mappedQuery.get("_id")) + .isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}")); + } + + @Test // GH-3596 + void considersValueConverterWhenPresent() { + + org.bson.Document mappedObject = mapper.getMappedObject(new org.bson.Document("text", "value"), + context.getPersistentEntity(WithPropertyValueConverter.class)); + assertThat(mappedObject).isEqualTo(new org.bson.Document("text", "eulav")); + } + + @Test // GH-2750 + void mapsAggregationExpression() { + + Query query = query(whereExpr(ComparisonOperators.valueOf("field").greaterThan("budget"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void unwrapsAggregationExpressionExprObjectWrappedInExpressionCriteria() { + + Query query = query(whereExpr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void mapsMongoExpressionToFieldsIfItsAnAggregationExpression() { + + Query query = query(expr(ComparisonOperators.valueOf("field").greaterThan("budget"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void usageOfMongoExpressionOnCriteriaDoesNotUnwrapAnExprAggregationExpression() { + + Query query = query(expr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $expr : { $gt : [ '$foo', '$budget'] } } }"); + } + + @Test // GH-4687 + void usageOfUntypedAggregationShouldRenderOperationsAsIs() { + + Query query = query(expr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(Object.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $expr : { $gt : [ '$field', '$budget'] } } }"); + } + + @Test // GH-2750 + void usesMongoExpressionDocumentAsIsIfItIsNotAnAggregationExpression() { + + Query query = query(expr(() -> org.bson.Document.parse("{ $gt : [ '$field', '$budget'] }"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$field', '$budget'] } }"); + } + + @Test // GH-4080 + void convertsListOfValuesForPropertyThatHasValueConverterButIsNotCollectionLikeOneByOne() { + + org.bson.Document mappedObject = mapper.getMappedObject(query(where("text").in("spring", "data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $in : ['gnirps', 'atad'] } }"); + } + + @Test // GH-4464 + void usesKeyNameWithDotsIfFieldNameTypeIsKey() { + + org.bson.Document mappedObject = mapper.getMappedObject(query(where("value").is("A")).getQueryObject(), + context.getPersistentEntity(WithPropertyHavingDotsInFieldName.class)); + assertThat(mappedObject).isEqualTo("{ 'field.name.with.dots' : 'A' }"); + } + + @Test // GH-4577 + void mappingShouldRetainMapKeyOrder() { + + TreeMap sourceMap = new TreeMap<>(Map.of("test1", "123", "test2", "456")); + + org.bson.Document target = mapper.getMappedObject(query(where("simpleMap").is(sourceMap)).getQueryObject(), + context.getPersistentEntity(WithSimpleMap.class)); + assertThat(target.get("simpleMap", Map.class)).containsExactlyEntriesOf(sourceMap); + } + + @Test // GH-4510 + void convertsNestedOperatorValueForPropertyThatHasValueConverter() { + + org.bson.Document mappedObject = mapper.getMappedObject( + query(where("text").gt("spring").lt("data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $gt : 'gnirps', $lt : 'atad' } }"); + } + + @Test // GH-4510 + void convertsNestedOperatorValueForPropertyContainingListThatHasValueConverter() { + + org.bson.Document mappedObject = mapper.getMappedObject( + query(where("text").gt("spring").in("data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $gt : 'gnirps', $in : [ 'atad' ] } }"); + } + + @Test // GH-4736 + void allOperatorShouldConvertIdCollection() { + + ObjectId oid = ObjectId.get(); + Criteria criteria = new Criteria().andOperator(where("name").isNull().and("id").all(List.of(oid.toString()))); + + org.bson.Document mappedObject = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(Customer.class)); + + assertThat(mappedObject).containsEntry("$and.[0]._id.$all", List.of(oid)); + } + + class WithSimpleMap { + Map simpleMap; + } + + class WithDeepArrayNesting { + + List level0; + } + + class WithNestedArray { + + List arrayObj; + List arrayString; + List arrayCustomName; + } + + class NestedArrayOfObj { + List nested; + } + + class NestedArrayOfObjCustomFieldName { + + @Field("nes-ted") List nested; + } + + class NestedArrayOfString { + List nested; + } + + class ArrayObj { + String foo; + } + + @Document + class Foo { + @Id private ObjectId id; + EmbeddedClass embedded; + + @Field("my_items") List listOfItems; + } + + class EmbeddedClass { + String id; + + @Field("fancy_custom_name") String customizedField; + } + + class IdWrapper { + Object id; + } + + class ClassWithEmbedded { + @Id String id; + Sample sample; + } + + class ClassWithDefaultId { + + String id; + ClassWithDefaultId nested; + } + + class Sample { + + @Id private String foo; + } + + class WithStringId { + + @MongoId String id; + String name; + } + + class WithExplicitStringId { + + @MongoId(FieldType.STRING) String id; + String name; + } + + class BigIntegerId { + + @Id private BigInteger id; + } + + enum Enum { + INSTANCE; + } + + class UserEntity { + String id; + List publishers = new ArrayList<>(); + } + + class CustomizedField { + + @Field("foo") CustomizedField field; + } + + class WithDBRef { + + String someString; + @DBRef Reference reference; + } + + class WithDBRefList { + + String someString; + @DBRef List referenceList; + } + + class Reference { + + Long id; + } + + class WithDBRefWrapper { + + WithDBRef withDbRef; + } + + class WithMapDBRef { + + @DBRef Map mapWithDBRef; + } + + static class WithDocumentReference { + + private ObjectId id; + + private String name; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private Customer customer; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private List customers; + + @DocumentReference private Sample sample; + + @DocumentReference private List samples; + } + + class WithTextScoreProperty { + + @Id String id; + @TextScore + @Field("score") Float textScore; + } + + static class RootForClassWithExplicitlyRenamedIdField { + + @Id String id; + ClassWithExplicitlyRenamedField nested; + } + + static class ClassWithExplicitlyRenamedField { + + @Field("id") String id; + DeeplyNestedClassWithExplicitlyRenamedField deeplyNested; + } + + static class DeeplyNestedClassWithExplicitlyRenamedField { + @Field("id") String id; + } + + static class ClassWithGeoTypes { + + double[] justAnArray; + Point legacyPoint; + GeoJsonPoint geoJsonPoint; + @Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint; + } + + static class SimpleEntityWithoutId { + + String stringProperty; + Integer integerProperty; + } + + static class EntityWithComplexValueTypeMap { + Map map; + } + + static class EntityWithIntKeyedMapOfMap { + Map outerMap; + } + + static class EntityWithComplexValueTypeList { + List list; + } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.STRING) // + String text; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + } + + static class WithIdPropertyContainingUnderscore { + @Id String with_underscore; + } + + static class NonIdFieldWithObjectIdTargetType { + + String id; + @Field(targetType = FieldType.OBJECT_ID) String stringAsOid; + } + + @Document + static class WithSingleStringArgConstructor { + + String value; + + public WithSingleStringArgConstructor() {} + + public WithSingleStringArgConstructor(String value) { + this.value = value; + } + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithUnwrapped withUnwrapped; + WithPrefixedUnwrapped withPrefixedUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType unwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + } + + static class WrapperAroundWithPropertyUsingUnderscoreInName { + + WithPropertyUsingUnderscoreInName simple; + WithPropertyUsingUnderscoreInName double_underscore; + } + + static class WithPropertyUsingUnderscoreInName { + + String fieldname_with_underscores; + + @Field("renamed") String renamed_fieldname_with_underscores; + } + + @Document + static class Customer { + + @Id private ObjectId id; + private String name; + private MyAddress address; + } + + static class MyAddress { + private String street; + } + + static class WithPropertyValueConverter { + + @ValueConverter(ReversingValueConverter.class) String text; + } + + @WritingConverter + public static class MyAddressToDocumentConverter implements Converter { + + @Override + public org.bson.Document convert(MyAddress address) { + org.bson.Document doc = new org.bson.Document(); + doc.put("street", address.street); + return doc; + } + } + + static class WithPropertyHavingDotsInFieldName { + + @Field(name = "field.name.with.dots", nameType = Type.KEY) String value; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java new file mode 100644 index 0000000000..384cffaad4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.expression.EvaluationContext; + +/** + * Unit tests for {@link ReferenceLookupDelegate}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class ReferenceLookupDelegateUnitTests { + + @Mock MappingContext, MongoPersistentProperty> mappingContext; + @Mock SpELContext spELContext; + @Mock EvaluationContext evaluationContext; + @Mock MongoEntityReader entityReader; + + private ReferenceLookupDelegate lookupDelegate; + + @BeforeEach + void beforeEach() { + lookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext); + } + + @Test // GH-3842 + void shouldComputePlainStringTargetCollection() { + + DocumentReference documentReference = mock(DocumentReference.class); + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + + doReturn(entity).when(mappingContext).getRequiredPersistentEntity((Class) any()); + + when(property.isDocumentReference()).thenReturn(true); + when(property.getDocumentReference()).thenReturn(documentReference); + when(documentReference.collection()).thenReturn("collection1"); + + lookupDelegate.readReference(property, Collections.singletonList("one"), (referenceQuery, referenceCollection) -> { + + assertThat(referenceCollection.getCollection()).isEqualTo("collection1"); + return Collections.emptyList(); + }, entityReader); + } + + @Test // GH-4612 + void shouldResolveEmptyListOnEmptyTargetCollection() { + + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + ReferenceLookupDelegate.LookupFunction lookupFunction = mock(ReferenceLookupDelegate.LookupFunction.class); + + when(property.isCollectionLike()).thenReturn(true); + lookupDelegate.readReference(property, Collections.emptyList(), lookupFunction, entityReader); + verify(lookupFunction, never()).apply(any(), any()); + } + + @Test // GH-4612 + void shouldResolveEmptyMapOnEmptyTargetCollection() { + + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + ReferenceLookupDelegate.LookupFunction lookupFunction = mock(ReferenceLookupDelegate.LookupFunction.class); + + when(property.isMap()).thenReturn(true); + lookupDelegate.readReference(property, Collections.emptyMap(), lookupFunction, entityReader); + verify(lookupFunction, never()).apply(any(), any()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolverUnitTests.java deleted file mode 100644 index 01b105540f..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReflectiveDBRefResolverUnitTests.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import static org.hamcrest.core.Is.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.mockito.Matchers.*; -import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; - -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBRef; - -/** - * Unit tests for {@link ReflectiveDBRefResolver}. - * - * @author Christoph Strobl - */ -@RunWith(MockitoJUnitRunner.class) -public class ReflectiveDBRefResolverUnitTests { - - @Mock MongoDbFactory dbFactoryMock; - @Mock DBRef dbRefMock; - @Mock DB dbMock; - @Mock DBCollection collectionMock; - - @Before - public void setUp() { - - when(dbRefMock.getCollectionName()).thenReturn("collection-1"); - when(dbRefMock.getId()).thenReturn("id-1"); - when(dbFactoryMock.getDb()).thenReturn(dbMock); - when(dbMock.getCollection(eq("collection-1"))).thenReturn(collectionMock); - when(collectionMock.findOne(eq("id-1"))).thenReturn(new BasicDBObject("_id", "id-1")); - } - - /** - * @see DATAMONGO-1193 - */ - @Test - public void fetchShouldNotLookUpDbWhenUsingDriverVersion2() { - - assumeThat(isMongo3Driver(), is(false)); - - ReflectiveDBRefResolver.fetch(dbFactoryMock, dbRefMock); - - verify(dbFactoryMock, never()).getDb(); - verify(dbFactoryMock, never()).getDb(anyString()); - } - - /** - * @see DATAMONGO-1193 - */ - @Test - public void fetchShouldUseDbToResolveDbRefWhenUsingDriverVersion3() { - - assumeThat(isMongo3Driver(), is(true)); - - assertThat(ReflectiveDBRefResolver.fetch(dbFactoryMock, dbRefMock), notNullValue()); - verify(dbFactoryMock, times(1)).getDb(); - } - - /** - * @see DATAMONGO-1193 - */ - @Test(expected = IllegalArgumentException.class) - public void fetchShouldThrowExceptionWhenDbFactoryIsNullUsingDriverVersion3() { - - assumeThat(isMongo3Driver(), is(true)); - - ReflectiveDBRefResolver.fetch(null, dbRefMock); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java new file mode 100644 index 0000000000..eb3b1aba1a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +class ReversingValueConverter implements MongoValueConverter { + + @Nullable + @Override + public String read(@Nullable String value, MongoConversionContext context) { + return reverse(value); + } + + @Nullable + @Override + public String write(@Nullable String value, MongoConversionContext context) { + return reverse(value); + } + + private String reverse(String source) { + + if (source == null) { + return null; + } + + return new StringBuilder(source).reverse().toString(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java index c7971154df..873a49232c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.query.Term.Type; @@ -29,18 +27,7 @@ */ public class TermToStringConverterUnitTests { - /** - * @DATAMONGO-973 - */ - @Test - public void shouldNotConvertNull() { - assertThat(TermToStringConverter.INSTANCE.convert(null), nullValue()); - } - - /** - * @DATAMONGO-973 - */ - @Test + @Test // DATAMONGO-973 public void shouldUseFormattedRepresentationForConversion() { Term term = spy(new Term("foo", Type.WORD)); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index feca9d76b9..d8e36c8f67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2015 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,9 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.hamcrest.collection.IsMapContaining.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.time.LocalDate; import java.util.Arrays; @@ -29,754 +26,760 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import org.hamcrest.Matcher; -import org.hamcrest.collection.IsIterableContainingInOrder; -import org.hamcrest.core.Is; -import org.hamcrest.core.IsEqual; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.ValueConverter; import org.springframework.data.convert.WritingConverter; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.DBObjectTestUtils; -import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.Allocation; -import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.AllocationToStringConverter; -import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.StringToAllocationConverter; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.query.Update.Position; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; import com.mongodb.DBRef; /** * Unit tests for {@link UpdateMapper}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch + * @author Pavel Vodrazka + * @author David Julia + * @author Divya Srivastava */ -@RunWith(MockitoJUnitRunner.class) -public class UpdateMapperUnitTests { +@ExtendWith(MockitoExtension.class) +class UpdateMapperUnitTests { - @Mock MongoDbFactory factory; - MappingMongoConverter converter; - MongoMappingContext context; - UpdateMapper mapper; + private MappingMongoConverter converter; + private MongoMappingContext context; + private UpdateMapper mapper; - private Converter writingConverterSpy; + private Converter writingConverterSpy; - @Before - public void setUp() { + @BeforeEach + @SuppressWarnings("unchecked") + void setUp() { this.writingConverterSpy = Mockito.spy(new NestedEntityWriteConverter()); - CustomConversions conversions = new CustomConversions(Arrays.asList(writingConverterSpy)); + CustomConversions conversions = new MongoCustomConversions(Collections.singletonList(writingConverterSpy)); this.context = new MongoMappingContext(); this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); this.context.initialize(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); this.mapper = new UpdateMapper(converter); } - /** - * @see DATAMONGO-721 - */ - @Test - public void updateMapperRetainsTypeInformationForCollectionField() { + @Test // DATAMONGO-721 + void updateMapperRetainsTypeInformationForCollectionField() { Update update = new Update().push("list", new ConcreteChildClass("2", "BAR")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject list = getAsDBObject(push, "aliased"); + Document push = getAsDocument(mappedObject, "$push"); + Document list = getAsDocument(push, "aliased"); - assertThat(list.get("_class"), is((Object) ConcreteChildClass.class.getName())); + assertTypeHint(list, ConcreteChildClass.class); } - /** - * @see DATAMONGO-807 - */ - @Test - public void updateMapperShouldRetainTypeInformationForNestedEntities() { + @Test // DATAMONGO-807 + void updateMapperShouldRetainTypeInformationForNestedEntities() { Update update = Update.update("model", new ModelImpl(1)); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject set = getAsDBObject(mappedObject, "$set"); - DBObject modelDbObject = (DBObject) set.get("model"); - assertThat(modelDbObject.get("_class"), not(nullValue())); + Document set = getAsDocument(mappedObject, "$set"); + Document modelDocument = (Document) set.get("model"); + assertTypeHint(modelDocument, ModelImpl.class); } - /** - * @see DATAMONGO-807 - */ - @Test - public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() { + @Test // DATAMONGO-807 + void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() { Update update = Update.update("model.value", 1); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject set = getAsDBObject(mappedObject, "$set"); - assertThat(set.get("_class"), nullValue()); + Document set = getAsDocument(mappedObject, "$set"); + assertThat(set.get("_class")).isNull(); } - /** - * @see DATAMONGO-807 - */ - @Test - public void updateMapperShouldNotPersistTypeInformationForNullValues() { + @Test // DATAMONGO-807 + void updateMapperShouldNotPersistTypeInformationForNullValues() { Update update = Update.update("model", null); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject set = getAsDBObject(mappedObject, "$set"); - assertThat(set.get("_class"), nullValue()); + Document set = getAsDocument(mappedObject, "$set"); + assertThat(set.get("_class")).isNull(); } - /** - * @see DATAMONGO-407 - */ - @Test - public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() { + @Test // DATAMONGO-407 + void updateMapperShouldRetainTypeInformationForNestedCollectionElements() { Update update = Update.update("list.$", new ConcreteChildClass("42", "bubu")); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject set = getAsDBObject(mappedObject, "$set"); - DBObject modelDbObject = getAsDBObject(set, "aliased.$"); - assertThat(modelDbObject.get("_class"), is((Object) ConcreteChildClass.class.getName())); + Document set = getAsDocument(mappedObject, "$set"); + Document modelDocument = getAsDocument(set, "aliased.$"); + assertTypeHint(modelDocument, ConcreteChildClass.class); } - /** - * @see DATAMONGO-407 - */ - @Test - public void updateMapperShouldSupportNestedCollectionElementUpdates() { + @Test // DATAMONGO-407 + void updateMapperShouldSupportNestedCollectionElementUpdates() { Update update = Update.update("list.$.value", "foo").set("list.$.otherValue", "bar"); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject set = getAsDBObject(mappedObject, "$set"); - assertThat(set.get("aliased.$.value"), is((Object) "foo")); - assertThat(set.get("aliased.$.otherValue"), is((Object) "bar")); + Document set = getAsDocument(mappedObject, "$set"); + assertThat(set.get("aliased.$.value")).isEqualTo("foo"); + assertThat(set.get("aliased.$.otherValue")).isEqualTo("bar"); } - /** - * @see DATAMONGO-407 - */ - @Test - public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() { + @Test // DATAMONGO-407 + void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() { Update update = Update.update("list.$.value", "foo").set("list.$.someObject", new ConcreteChildClass("42", "bubu")); UpdateMapper mapper = new UpdateMapper(converter); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject dbo = getAsDBObject(mappedObject, "$set"); - assertThat(dbo.get("aliased.$.value"), is((Object) "foo")); + Document document = getAsDocument(mappedObject, "$set"); + assertThat(document.get("aliased.$.value")).isEqualTo("foo"); - DBObject someObject = getAsDBObject(dbo, "aliased.$.someObject"); - assertThat(someObject, is(notNullValue())); - assertThat(someObject.get("_class"), is((Object) ConcreteChildClass.class.getName())); - assertThat(someObject.get("value"), is((Object) "bubu")); + Document someObject = getAsDocument(document, "aliased.$.someObject"); + assertThat(someObject).isNotNull().containsEntry("value", "bubu"); + assertTypeHint(someObject, ConcreteChildClass.class); } - /** - * @see DATAMONGO-812 - */ @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingSimpleTypes() { + @Test // DATAMONGO-812 + void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingSimpleTypes() { Update update = new Update().push("values").each("spring", "data", "mongodb"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Model.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Model.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject values = getAsDBObject(push, "values"); - BasicDBList each = getAsDBList(values, "$each"); + Document push = getAsDocument(mappedObject, "$push"); + Document values = getAsDocument(push, "values"); + List each = getAsDBList(values, "$each"); - assertThat(push.get("_class"), nullValue()); - assertThat(values.get("_class"), nullValue()); + assertThat(push.get("_class")).isNull(); + assertThat(values.get("_class")).isNull(); - assertThat(each.toMap(), (Matcher) allOf(hasValue("spring"), hasValue("data"), hasValue("mongodb"))); + assertThat(each).containsExactly("spring", "data", "mongodb"); } - /** - * @see DATAMONGO-812 - */ - @Test - public void updateMapperShouldConvertPushWhithoutAddingClassInformationWhenUsedWithEvery() { + @Test // DATAMONGO-812 + void updateMapperShouldConvertPushWhithoutAddingClassInformationWhenUsedWithEvery() { Update update = new Update().push("values").each("spring", "data", "mongodb"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Model.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject values = getAsDBObject(push, "values"); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Model.class)); + Document push = getAsDocument(mappedObject, "$push"); + Document values = getAsDocument(push, "values"); - assertThat(push.get("_class"), nullValue()); - assertThat(values.get("_class"), nullValue()); + assertThat(push.get("_class")).isNull(); + assertThat(values.get("_class")).isNull(); } - /** - * @see DATAMONGO-812 - */ @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingCustomTypes() { + @Test // DATAMONGO-812 + void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingCustomTypes() { Update update = new Update().push("models").each(new ListModel("spring", "data", "mongodb")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject model = getAsDBObject(push, "models"); - BasicDBList each = getAsDBList(model, "$each"); - BasicDBList values = getAsDBList((DBObject) each.get(0), "values"); + Document push = getAsDocument(mappedObject, "$push"); + Document model = getAsDocument(push, "models"); + List each = getAsDBList(model, "$each"); + List values = getAsDBList((Document) each.get(0), "values"); - assertThat(values.toMap(), (Matcher) allOf(hasValue("spring"), hasValue("data"), hasValue("mongodb"))); + assertThat(values).containsExactly("spring", "data", "mongodb"); } - /** - * @see DATAMONGO-812 - */ - @Test - public void updateMapperShouldRetainClassInformationForPushCorrectlyWhenCalledWithEachUsingCustomTypes() { + @Test // DATAMONGO-812 + void updateMapperShouldRetainClassInformationForPushCorrectlyWhenCalledWithEachUsingCustomTypes() { Update update = new Update().push("models").each(new ListModel("spring", "data", "mongodb")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject model = getAsDBObject(push, "models"); - BasicDBList each = getAsDBList(model, "$each"); + Document push = getAsDocument(mappedObject, "$push"); + Document model = getAsDocument(push, "models"); + List each = getAsDBList(model, "$each"); - assertThat(((DBObject) each.get(0)).get("_class").toString(), equalTo(ListModel.class.getName())); + assertTypeHint(each.get(0), ListModel.class); } - /** - * @see DATAMONGO-812 - */ - @Test - public void testUpdateShouldAllowMultiplePushEachForDifferentFields() { + @Test // DATAMONGO-812 + void testUpdateShouldAllowMultiplePushEachForDifferentFields() { Update update = new Update().push("category").each("spring", "data").push("type").each("mongodb"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - assertThat(getAsDBObject(push, "category").containsField("$each"), is(true)); - assertThat(getAsDBObject(push, "type").containsField("$each"), is(true)); + Document push = getAsDocument(mappedObject, "$push"); + assertThat(getAsDocument(push, "category")).containsKey("$each"); + assertThat(getAsDocument(push, "type")).containsKey("$each"); } - /** - * @see DATAMONGO-943 - */ - @Test - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositiveIndexParameter() { + @Test // DATAMONGO-943 + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositiveIndexParameter() { Update update = new Update().push("key").atPosition(2).each(Arrays.asList("Arya", "Arry", "Weasel")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key.containsKey("$position")).isTrue(); + assertThat(key.get("$position")).isEqualTo(2); + assertThat(getAsDocument(push, "key")).containsKey("$each"); + } + + @Test // DATAMONGO-943, DATAMONGO-2055 + void updatePushEachAtNegativePositionWorksCorrectly() { - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject key = getAsDBObject(push, "key"); + Update update = new Update().push("key").atPosition(-2).each(Arrays.asList("Arya", "Arry", "Weasel")); - assertThat(key.containsField("$position"), is(true)); - assertThat((Integer) key.get("$position"), is(2)); - assertThat(getAsDBObject(push, "key").containsField("$each"), is(true)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key.containsKey("$position")).isTrue(); + assertThat(key.get("$position")).isEqualTo(-2); } - /** - * @see DATAMONGO-943 - */ - @Test - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionFirst() { + @Test // DATAMONGO-943 + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionFirst() { Update update = new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject key = getAsDBObject(push, "key"); + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); - assertThat(key.containsField("$position"), is(true)); - assertThat((Integer) key.get("$position"), is(0)); - assertThat(getAsDBObject(push, "key").containsField("$each"), is(true)); + assertThat(key.containsKey("$position")).isTrue(); + assertThat(key.get("$position")).isEqualTo(0); + assertThat(getAsDocument(push, "key")).containsKey("$each"); } - /** - * @see DATAMONGO-943 - */ - @Test - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionLast() { + @Test // DATAMONGO-943 + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionLast() { Update update = new Update().push("key").atPosition(Position.LAST).each(Arrays.asList("Arya", "Arry", "Weasel")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject key = getAsDBObject(push, "key"); + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); - assertThat(key.containsField("$position"), is(false)); - assertThat(getAsDBObject(push, "key").containsField("$each"), is(true)); + assertThat(key).doesNotContainKey("$position"); + assertThat(getAsDocument(push, "key")).containsKey("$each"); } - /** - * @see DATAMONGO-943 - */ - @Test - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionNull() { + @Test // DATAMONGO-943 + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionNull() { Update update = new Update().push("key").atPosition(null).each(Arrays.asList("Arya", "Arry", "Weasel")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key).doesNotContainKey("$position"); + assertThat(getAsDocument(push, "key")).containsKey("$each"); + } + + @Test // DATAMONGO-832 + void updatePushEachWithSliceShouldRenderCorrectly() { - DBObject push = getAsDBObject(mappedObject, "$push"); - DBObject key = getAsDBObject(push, "key"); + Update update = new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")); - assertThat(key.containsField("$position"), is(false)); - assertThat(getAsDBObject(push, "key").containsField("$each"), is(true)); + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key).containsKey("$slice").containsEntry("$slice", 5); + assertThat(key).containsKey("$each"); + } + + @Test // DATAMONGO-832 + void updatePushEachWithSliceShouldRenderWhenUsingMultiplePushCorrectly() { + + Update update = new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")).push("key-2") + .slice(-2).each("The Beggar King", "Viserys III Targaryen"); + + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key).containsKey("$slice").containsEntry("$slice", 5); + assertThat(key.containsKey("$each")).isTrue(); + + Document key2 = getAsDocument(push, "key-2"); + + assertThat(key2).containsKey("$slice").containsEntry("$slice", -2); + assertThat(key2).containsKey("$each"); } - /** - * @see DATAMONGO-410 - */ - @Test - public void testUpdateMapperShouldConsiderCustomWriteTarget() { + @Test // DATAMONGO-1141 + void updatePushEachWithValueSortShouldRenderCorrectly() { + + Update update = new Update().push("scores").sort(Direction.DESC).each(42, 23, 68); + + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "scores"); + + assertThat(key).containsKey("$sort"); + assertThat(key).containsEntry("$sort", -1); + assertThat(key).containsKey("$each"); + } + + @Test // DATAMONGO-1141 + void updatePushEachWithDocumentSortShouldRenderCorrectly() { + + Update update = new Update().push("list") + .sort(Sort.by(new Order(Direction.ASC, "value"), new Order(Direction.ASC, "field"))) + .each(Collections.emptyList()); + + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithList.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "list"); + + assertThat(key).containsKey("$sort"); + assertThat(key.get("$sort")).isEqualTo(new Document("renamed-value", 1).append("field", 1)); + assertThat(key).containsKey("$each"); + } + + @Test // DATAMONGO-1141 + void updatePushEachWithSortShouldRenderCorrectlyWhenUsingMultiplePush() { + + Update update = new Update().push("authors").sort(Direction.ASC).each("Harry").push("chapters") + .sort(Sort.by(Direction.ASC, "order")).each(Collections.emptyList()); + + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key1 = getAsDocument(push, "authors"); + + assertThat(key1).containsKey("$sort"); + assertThat(key1).containsEntry("$sort", 1); + assertThat(key1).containsKey("$each"); + + Document key2 = getAsDocument(push, "chapters"); + + assertThat(key2).containsKey("$sort"); + assertThat(key2.get("$sort")).isEqualTo(new Document("order", 1)); + assertThat(key2.containsKey("$each")).isTrue(); + } + + @Test // DATAMONGO-410 + void testUpdateMapperShouldConsiderCustomWriteTarget() { List someValues = Arrays.asList(new NestedEntity("spring"), new NestedEntity("data"), new NestedEntity("mongodb")); NestedEntity[] array = new NestedEntity[someValues.size()]; - Update update = new Update().pushAll("collectionOfNestedEntities", someValues.toArray(array)); + Update update = new Update().push("collectionOfNestedEntities").each(someValues.toArray(array)); mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainEntity.class)); verify(writingConverterSpy, times(3)).convert(Mockito.any(NestedEntity.class)); } - /** - * @see DATAMONGO-404 - */ - @Test - public void createsDbRefForEntityIdOnPulls() { + @Test // DATAMONGO-404 + void createsDbRefForEntityIdOnPulls() { Update update = new Update().pull("dbRefAnnotatedList.id", "2"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); - DBObject pullClause = getAsDBObject(mappedObject, "$pull"); - assertThat(pullClause.get("dbRefAnnotatedList"), is((Object) new DBRef("entity", "2"))); + Document pullClause = getAsDocument(mappedObject, "$pull"); + assertThat(pullClause.get("dbRefAnnotatedList")).isEqualTo(new DBRef("entity", "2")); } - /** - * @see DATAMONGO-404 - */ - @Test - public void createsDbRefForEntityOnPulls() { + @Test // DATAMONGO-404 + void createsDbRefForEntityOnPulls() { Entity entity = new Entity(); entity.id = "5"; Update update = new Update().pull("dbRefAnnotatedList", entity); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); - DBObject pullClause = getAsDBObject(mappedObject, "$pull"); - assertThat(pullClause.get("dbRefAnnotatedList"), is((Object) new DBRef("entity", entity.id))); + Document pullClause = getAsDocument(mappedObject, "$pull"); + assertThat(pullClause.get("dbRefAnnotatedList")).isEqualTo(new DBRef("entity", entity.id)); } - /** - * @see DATAMONGO-404 - */ - @Test(expected = MappingException.class) - public void rejectsInvalidFieldReferenceForDbRef() { + @Test // DATAMONGO-404 + void rejectsInvalidFieldReferenceForDbRef() { Update update = new Update().pull("dbRefAnnotatedList.name", "NAME"); - mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); + assertThatThrownBy(() -> mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(DocumentWithDBRefCollection.class))).isInstanceOf(MappingException.class); } - /** - * @see DATAMONGO-404 - */ - @Test - public void rendersNestedDbRefCorrectly() { + @Test // DATAMONGO-404 + void rendersNestedDbRefCorrectly() { Update update = new Update().pull("nested.dbRefAnnotatedList.id", "2"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Wrapper.class)); - DBObject pullClause = getAsDBObject(mappedObject, "$pull"); - assertThat(pullClause.containsField("mapped.dbRefAnnotatedList"), is(true)); + Document pullClause = getAsDocument(mappedObject, "$pull"); + assertThat(pullClause.containsKey("mapped.dbRefAnnotatedList")).isTrue(); } - /** - * @see DATAMONGO-468 - */ - @Test - public void rendersUpdateOfDbRefPropertyWithDomainObjectCorrectly() { + @Test // DATAMONGO-468 + void rendersUpdateOfDbRefPropertyWithDomainObjectCorrectly() { Entity entity = new Entity(); entity.id = "5"; Update update = new Update().set("dbRefProperty", entity); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); - DBObject setClause = getAsDBObject(mappedObject, "$set"); - assertThat(setClause.get("dbRefProperty"), is((Object) new DBRef("entity", entity.id))); + Document setClause = getAsDocument(mappedObject, "$set"); + assertThat(setClause.get("dbRefProperty")).isEqualTo(new DBRef("entity", entity.id)); } - /** - * @see DATAMONGO-862 - */ - @Test - public void rendersUpdateAndPreservesKeyForPathsNotPointingToProperty() { + @Test // DATAMONGO-862 + void rendersUpdateAndPreservesKeyForPathsNotPointingToProperty() { Update update = new Update().set("listOfInterface.$.value", "expected-value"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject setClause = getAsDBObject(mappedObject, "$set"); - assertThat(setClause.containsField("listOfInterface.$.value"), is(true)); + Document setClause = getAsDocument(mappedObject, "$set"); + assertThat(setClause.containsKey("listOfInterface.$.value")).isTrue(); } - /** - * @see DATAMONGO-863 - */ - @Test - public void doesNotConvertRawDbObjects() { + @Test // DATAMONGO-863 + void doesNotConvertRawDocuments() { Update update = new Update(); update.pull("options", - new BasicDBObject("_id", new BasicDBObject("$in", converter.convertToMongoType(Arrays.asList(1L, 2L))))); + new Document("_id", new Document("$in", converter.convertToMongoType(Arrays.asList(1L, 2L))))); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject setClause = getAsDBObject(mappedObject, "$pull"); - DBObject options = getAsDBObject(setClause, "options"); - DBObject idClause = getAsDBObject(options, "_id"); - BasicDBList inClause = getAsDBList(idClause, "$in"); + Document setClause = getAsDocument(mappedObject, "$pull"); + Document options = getAsDocument(setClause, "options"); + Document idClause = getAsDocument(options, "_id"); + List inClause = getAsDBList(idClause, "$in"); - assertThat(inClause, IsIterableContainingInOrder. contains(1L, 2L)); + assertThat(inClause).containsExactly(1L, 2L); } - /** - * @see DATAMONG0-471 - */ @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testUpdateShouldApply$addToSetCorrectlyWhenUsedWith$each() { + @Test // DATAMONG0-471 + void testUpdateShouldApply$addToSetCorrectlyWhenUsedWith$each() { Update update = new Update().addToSet("values").each("spring", "data", "mongodb"); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ListModel.class)); - DBObject addToSet = getAsDBObject(mappedObject, "$addToSet"); - DBObject values = getAsDBObject(addToSet, "values"); - BasicDBList each = getAsDBList(values, "$each"); + Document addToSet = getAsDocument(mappedObject, "$addToSet"); + Document values = getAsDocument(addToSet, "values"); + List each = getAsDBList(values, "$each"); - assertThat(each.toMap(), (Matcher) allOf(hasValue("spring"), hasValue("data"), hasValue("mongodb"))); + assertThat(each).containsExactly("spring", "data", "mongodb"); } - /** - * @see DATAMONG0-471 - */ - @Test - public void testUpdateShouldRetainClassTypeInformationWhenUsing$addToSetWith$eachForCustomTypes() { + @Test // DATAMONG0-471 + void testUpdateShouldRetainClassTypeInformationWhenUsing$addToSetWith$eachForCustomTypes() { Update update = new Update().addToSet("models").each(new ModelImpl(2014), new ModelImpl(1), new ModelImpl(28)); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ModelWrapper.class)); - DBObject addToSet = getAsDBObject(mappedObject, "$addToSet"); + Document addToSet = getAsDocument(mappedObject, "$addToSet"); - DBObject values = getAsDBObject(addToSet, "models"); - BasicDBList each = getAsDBList(values, "$each"); + Document values = getAsDocument(addToSet, "models"); + List each = getAsDBList(values, "$each"); for (Object updateValue : each) { - assertThat(((DBObject) updateValue).get("_class").toString(), - equalTo("org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests$ModelImpl")); + assertTypeHint((Document) updateValue, ModelImpl.class); } } - /** - * @see DATAMONGO-897 - */ - @Test - public void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShouldBeMappedCorrectly() { + @Test // DATAMONGO-897 + void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShouldBeMappedCorrectly() { Update update = new Update().set("referencedDocument", new InterfaceDocumentDefinitionImpl("1", "Foo")); - DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(), + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithReferenceToInterfaceImpl.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedObject, "$set"); + Document $set = DocumentTestUtils.getAsDocument(mappedObject, "$set"); Object model = $set.get("referencedDocument"); DBRef expectedDBRef = new DBRef("interfaceDocumentDefinitionImpl", "1"); - assertThat(model, allOf(instanceOf(DBRef.class), IsEqual. equalTo(expectedDBRef))); + assertThat(model).isInstanceOf(DBRef.class).isEqualTo(expectedDBRef); } - /** - * @see DATAMONGO-847 - */ - @Test - public void updateMapperConvertsNestedQueryCorrectly() { + @Test // DATAMONGO-847 + void updateMapperConvertsNestedQueryCorrectly() { Update update = new Update().pull("list", Query.query(Criteria.where("value").in("foo", "bar"))); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - DBObject $pull = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$pull"); - DBObject list = DBObjectTestUtils.getAsDBObject($pull, "aliased"); - DBObject value = DBObjectTestUtils.getAsDBObject(list, "value"); - BasicDBList $in = DBObjectTestUtils.getAsDBList(value, "$in"); + Document $pull = DocumentTestUtils.getAsDocument(mappedUpdate, "$pull"); + Document list = DocumentTestUtils.getAsDocument($pull, "aliased"); + Document value = DocumentTestUtils.getAsDocument(list, "value"); + List $in = DocumentTestUtils.getAsDBList(value, "$in"); - assertThat($in, IsIterableContainingInOrder. contains("foo", "bar")); + assertThat($in).containsExactly("foo", "bar"); } - /** - * @see DATAMONGO-847 - */ - @Test - public void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() { + @Test // DATAMONGO-847 + void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() { Update update = new Update().pull("dbRefAnnotatedList", Query.query(Criteria.where("id").is("1"))); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); - DBObject $pull = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$pull"); - DBObject list = DBObjectTestUtils.getAsDBObject($pull, "dbRefAnnotatedList"); + Document $pull = DocumentTestUtils.getAsDocument(mappedUpdate, "$pull"); + Document list = DocumentTestUtils.getAsDocument($pull, "dbRefAnnotatedList"); - assertThat(list, equalTo(new BasicDBObjectBuilder().add("_id", "1").get())); + assertThat(list).isEqualTo(new org.bson.Document().append("_id", "1")); } - /** - * @see DATAMONGO-1077 - */ - @Test - public void shouldNotRemovePositionalParameter() { + @Test // DATAMONGO-1077 + void shouldNotRemovePositionalParameter() { Update update = new Update(); update.unset("dbRefAnnotatedList.$"); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); - DBObject $unset = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$unset"); + Document $unset = DocumentTestUtils.getAsDocument(mappedUpdate, "$unset"); - assertThat($unset, equalTo(new BasicDBObjectBuilder().add("dbRefAnnotatedList.$", 1).get())); + assertThat($unset).isEqualTo(new org.bson.Document().append("dbRefAnnotatedList.$", 1)); } - /** - * @see DATAMONGO-1210 - */ - @Test - public void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTypes() { + @Test // DATAMONGO-1210 + void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTypes() { Update update = new Update().addToSet("nestedDocs").each(new NestedDocument("nested-1"), new NestedDocument("nested-2")); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithNestedCollection.class)); - assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[0]._class")); - assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[1]._class")); + assertThat(mappedUpdate).doesNotContainKey("$addToSet.nestedDocs.$each.[0]._class") + .doesNotContainKey("$addToSet.nestedDocs.$each.[1]._class"); } - /** - * @see DATAMONGO-1210 - */ - @Test - public void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() { + @Test // DATAMONGO-1210 + void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() { Update update = new Update().addToSet("models").each(new ModelImpl(1), new ModelImpl(2)); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ListModelWrapper.class)); - assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[0]._class", ModelImpl.class.getName())); - assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[1]._class", ModelImpl.class.getName())); + assertThat(mappedUpdate).containsEntry("$addToSet.models.$each.[0]._class", ModelImpl.class.getName()); + assertThat(mappedUpdate).containsEntry("$addToSet.models.$each.[1]._class", ModelImpl.class.getName()); } - /** - * @see DATAMONGO-1210 - */ - @Test - public void mappingEachOperatorShouldAddTypeHintForAbstractTypes() { + @Test // DATAMONGO-1210 + void mappingEachOperatorShouldAddTypeHintForAbstractTypes() { Update update = new Update().addToSet("list").each(new ConcreteChildClass("foo", "one"), new ConcreteChildClass("bar", "two")); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ParentClass.class)); - assertThat(mappedUpdate, - isBsonObject().containing("$addToSet.aliased.$each.[0]._class", ConcreteChildClass.class.getName())); - assertThat(mappedUpdate, - isBsonObject().containing("$addToSet.aliased.$each.[1]._class", ConcreteChildClass.class.getName())); + assertThat(mappedUpdate).containsEntry("$addToSet.aliased.$each.[0]._class", ConcreteChildClass.class.getName()); + assertThat(mappedUpdate).containsEntry("$addToSet.aliased.$each.[1]._class", ConcreteChildClass.class.getName()); } - /** - * @see DATAMONGO-1210 - */ - @Test - public void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocument() { + @Test // DATAMONGO-1210 + void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocument() { WrapperAroundInterfaceType wait = new WrapperAroundInterfaceType(); wait.interfaceType = new ModelImpl(1); Update update = new Update().addToSet("listHoldingConcretyTypeWithInterfaceTypeAttribute").each(wait); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainTypeWithListOfConcreteTypesHavingSingleInterfaceTypeAttribute.class)); - assertThat(mappedUpdate, - isBsonObject().notContaining("$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0]._class")); - assertThat(mappedUpdate, - isBsonObject().containing( - "$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0].interfaceType._class", - ModelImpl.class.getName())); + assertThat(mappedUpdate) + .doesNotContainKey("$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0]._class"); + assertThat(mappedUpdate).containsEntry( + "$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0].interfaceType._class", + ModelImpl.class.getName()); } - /** - * @see DATAMONGO-1210 - */ - @Test - public void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyParentType() { + @Test // DATAMONGO-1210 + void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyParentType() { ListModelWrapper lmw = new ListModelWrapper(); - lmw.models = Collections. singletonList(new ModelImpl(1)); + lmw.models = Collections.singletonList(new ModelImpl(1)); Update update = new Update().set("concreteTypeWithListAttributeOfInterfaceType", lmw); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes.class)); - assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteTypeWithListAttributeOfInterfaceType._class")); - assertThat( - mappedUpdate, - isBsonObject().containing("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class", - ModelImpl.class.getName())); + assertThat(mappedUpdate).doesNotContainKey("$set.concreteTypeWithListAttributeOfInterfaceType._class"); + assertThat(mappedUpdate).containsEntry("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class", + ModelImpl.class.getName()); + } + + @Test // DATAMONGO-1809 + void pathShouldIdentifyPositionalParameterWithMoreThanOneDigit() { + + Document at2digitPosition = mapper.getMappedObject(new Update() + .addToSet("concreteInnerList.10.concreteTypeList", new SomeInterfaceImpl("szeth")).getUpdateObject(), + context.getPersistentEntity(Outer.class)); + + Document at3digitPosition = mapper.getMappedObject(new Update() + .addToSet("concreteInnerList.123.concreteTypeList", new SomeInterfaceImpl("lopen")).getUpdateObject(), + context.getPersistentEntity(Outer.class)); + + assertThat(at2digitPosition).isEqualTo(new Document("$addToSet", + new Document("concreteInnerList.10.concreteTypeList", new Document("value", "szeth")))); + assertThat(at3digitPosition).isEqualTo(new Document("$addToSet", + new Document("concreteInnerList.123.concreteTypeList", new Document("value", "lopen")))); } - /** - * @see DATAMONGO-1236 - */ - @Test - public void mappingShouldRetainTypeInformationForObjectValues() { + @Test // DATAMONGO-1236 + void mappingShouldRetainTypeInformationForObjectValues() { Update update = new Update().set("value", new NestedDocument("kaladin")); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObject.class)); - assertThat(mappedUpdate, isBsonObject().containing("$set.value.name", "kaladin")); - assertThat(mappedUpdate, isBsonObject().containing("$set.value._class", NestedDocument.class.getName())); + assertThat(mappedUpdate).containsEntry("$set.value.name", "kaladin"); + assertThat(mappedUpdate).containsEntry("$set.value._class", NestedDocument.class.getName()); } - /** - * @see DATAMONGO-1236 - */ - @Test - public void mappingShouldNotRetainTypeInformationForConcreteValues() { + @Test // DATAMONGO-1236 + void mappingShouldNotRetainTypeInformationForConcreteValues() { Update update = new Update().set("concreteValue", new NestedDocument("shallan")); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObject.class)); - assertThat(mappedUpdate, isBsonObject().containing("$set.concreteValue.name", "shallan")); - assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteValue._class")); + assertThat(mappedUpdate).containsEntry("$set.concreteValue.name", "shallan"); + assertThat(mappedUpdate).doesNotContainKey("$set.concreteValue._class"); } - /** - * @see DATAMONGO-1236 - */ - @Test - public void mappingShouldRetainTypeInformationForObjectValuesWithAlias() { + @Test // DATAMONGO-1236 + void mappingShouldRetainTypeInformationForObjectValuesWithAlias() { Update update = new Update().set("value", new NestedDocument("adolin")); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithAliasedObject.class)); - assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value.name", "adolin")); - assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value._class", NestedDocument.class.getName())); + assertThat(mappedUpdate).containsEntry("$set.renamed-value.name", "adolin"); + assertThat(mappedUpdate).containsEntry("$set.renamed-value._class", NestedDocument.class.getName()); } - /** - * @see DATAMONGO-1236 - */ - @Test - public void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() { + @Test // DATAMONGO-1236 + void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() { - Map map = Collections. singletonMap("szeth", new NestedDocument("son-son-vallano")); + Map map = Collections.singletonMap("szeth", new NestedDocument("son-son-vallano")); Update update = new Update().set("map", map); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObjectMap.class)); - assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth.name", "son-son-vallano")); - assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth._class", NestedDocument.class.getName())); + assertThat(mappedUpdate).containsEntry("$set.map.szeth.name", "son-son-vallano"); + assertThat(mappedUpdate).containsEntry("$set.map.szeth._class", NestedDocument.class.getName()); } - /** - * @see DATAMONGO-1236 - */ - @Test - public void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() { + @Test // DATAMONGO-1236 + void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() { - Map map = Collections. singletonMap("jasnah", new NestedDocument( - "kholin")); + Map map = Collections.singletonMap("jasnah", new NestedDocument("kholin")); Update update = new Update().set("concreteMap", map); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).containsEntry("$set.concreteMap.jasnah.name", "kholin"); + assertThat(mappedUpdate).doesNotContainKey("$set.concreteMap.jasnah._class"); + } + + @Test // GH-4567 + void updateShouldAllowNullValuesInMap() { + + Map map = Collections.singletonMap("jasnah", new NestedDocument("kholin")); + + Update update = new Update().set("concreteMap", Collections.singletonMap("jasnah", null)); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObjectMap.class)); - assertThat(mappedUpdate, isBsonObject().containing("$set.concreteMap.jasnah.name", "kholin")); - assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteMap.jasnah._class")); + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("concreteMap", Collections.singletonMap("jasnah", null)))); } - /** - * @see DATAMONGO-1250 - */ - @Test + @Test // DATAMONGO-1250 @SuppressWarnings("unchecked") - public void mapsUpdateWithBothReadingAndWritingConverterRegistered() { + void mapsUpdateWithBothReadingAndWritingConverterRegistered() { - CustomConversions conversions = new CustomConversions( - Arrays.asList(AllocationToStringConverter.INSTANCE, StringToAllocationConverter.INSTANCE)); + CustomConversions conversions = new MongoCustomConversions(Arrays.asList( + ClassWithEnum.AllocationToStringConverter.INSTANCE, ClassWithEnum.StringToAllocationConverter.INSTANCE)); MongoMappingContext mappingContext = new MongoMappingContext(); mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); @@ -788,103 +791,575 @@ public void mapsUpdateWithBothReadingAndWritingConverterRegistered() { UpdateMapper mapper = new UpdateMapper(converter); - Update update = new Update().set("allocation", Allocation.AVAILABLE); - DBObject result = mapper.getMappedObject(update.getUpdateObject(), + Update update = new Update().set("allocation", ClassWithEnum.Allocation.AVAILABLE); + Document result = mapper.getMappedObject(update.getUpdateObject(), mappingContext.getPersistentEntity(ClassWithEnum.class)); - assertThat(result, isBsonObject().containing("$set.allocation", Allocation.AVAILABLE.code)); + assertThat(result).containsEntry("$set.allocation", ClassWithEnum.Allocation.AVAILABLE.code); } - /** - * @see DATAMONGO-1251 - */ - @Test - public void mapsNullValueCorrectlyForSimpleTypes() { + @Test // DATAMONGO-1251 + void mapsNullValueCorrectlyForSimpleTypes() { Update update = new Update().set("value", null); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ConcreteChildClass.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.containsField("value"), is(true)); - assertThat($set.get("value"), nullValue()); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set).containsKey("value").containsEntry("value", null); } - /** - * @see DATAMONGO-1251 - */ - @Test - public void mapsNullValueCorrectlyForJava8Date() { + @Test // DATAMONGO-1251 + void mapsNullValueCorrectlyForJava8Date() { Update update = new Update().set("date", null); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ClassWithJava8Date.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.containsField("date"), is(true)); - assertThat($set.get("value"), nullValue()); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set).containsKey("date").doesNotContainKey("value"); } - /** - * @see DATAMONGO-1251 - */ - @Test - public void mapsNullValueCorrectlyForCollectionTypes() { + @Test // DATAMONGO-1251 + void mapsNullValueCorrectlyForCollectionTypes() { Update update = new Update().set("values", null); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(ListModel.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.containsField("values"), is(true)); - assertThat($set.get("value"), nullValue()); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set).containsKey("values").doesNotContainKey("value"); } - /** - * @see DATAMONGO-1251 - */ - @Test - public void mapsNullValueCorrectlyForPropertyOfNestedDocument() { + @Test // DATAMONGO-1251 + void mapsNullValueCorrectlyForPropertyOfNestedDocument() { Update update = new Update().set("concreteValue.name", null); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObject.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.containsField("concreteValue.name"), is(true)); - assertThat($set.get("concreteValue.name"), nullValue()); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set).containsKey("concreteValue.name"); + assertThat($set).containsEntry("concreteValue.name", null); } - /** - * @see DATAMONGO-1288 - */ - @Test - public void mapsAtomicIntegerToIntegerCorrectly() { + @Test // DATAMONGO-1288 + void mapsAtomicIntegerToIntegerCorrectly() { Update update = new Update().set("intValue", new AtomicInteger(10)); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(SimpleValueHolder.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.get("intValue"), Is. is(10)); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set.get("intValue")).isEqualTo(10); } - /** - * @see DATAMONGO-1288 - */ - @Test - public void mapsAtomicIntegerToPrimitiveIntegerCorrectly() { + @Test // DATAMONGO-1288 + void mapsAtomicIntegerToPrimitiveIntegerCorrectly() { Update update = new Update().set("primIntValue", new AtomicInteger(10)); - DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(SimpleValueHolder.class)); + + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set.get("primIntValue")).isEqualTo(10); + } + + @Test // DATAMONGO-1404 + void mapsMinCorrectly() { + + Update update = new Update().min("minfield", 10); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(SimpleValueHolder.class)); - DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set"); - assertThat($set.get("primIntValue"), Is. is(10)); + assertThat(mappedUpdate).containsEntry("$min", new Document("minfield", 10)); + } + + @Test // DATAMONGO-1404 + void mapsMaxCorrectly() { + + Update update = new Update().max("maxfield", 999); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(SimpleValueHolder.class)); + + assertThat(mappedUpdate).containsEntry("$max", new Document("maxfield", 999)); + } + + @Test // DATAMONGO-1423, DATAMONGO-2155 + @SuppressWarnings("unchecked") + void mappingShouldConsiderCustomConvertersForEnumMapKeys() { + + CustomConversions conversions = new MongoCustomConversions(Arrays.asList( + ClassWithEnum.AllocationToStringConverter.INSTANCE, ClassWithEnum.StringToAllocationConverter.INSTANCE)); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(mock(DbRefResolver.class), mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + UpdateMapper mapper = new UpdateMapper(converter); + + Update update = new Update().set("enumAsMapKey", Collections.singletonMap(ClassWithEnum.Allocation.AVAILABLE, 100)); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + mappingContext.getPersistentEntity(ClassWithEnum.class)); + + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set.containsKey("enumAsMapKey")).isTrue(); + + Map enumAsMapKey = $set.get("enumAsMapKey", Map.class); + assertThat(enumAsMapKey.get("V")).isEqualTo(100); + } + + @Test // DATAMONGO-1176 + void mappingShouldPrepareUpdateObjectForMixedOperatorsAndFields() { + + Document document = new Document("key", "value").append("$set", new Document("a", "b").append("x", "y")); + + Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class)); + + assertThat(mappedObject.get("$set")).isEqualTo(new Document("a", "b").append("x", "y").append("key", "value")); + assertThat(mappedObject).hasSize(1); + } + + @Test // DATAMONGO-1176 + void mappingShouldReturnReplaceObject() { + + Document document = new Document("key", "value").append("a", "b").append("x", "y"); + + Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class)); + + assertThat(mappedObject).containsEntry("key", "value"); + assertThat(mappedObject).containsEntry("a", "b"); + assertThat(mappedObject).containsEntry("x", "y"); + assertThat(mappedObject).hasSize(3); + } + + @Test // DATAMONGO-1176 + void mappingShouldReturnUpdateObject() { + + Document document = new Document("$push", new Document("x", "y")).append("$set", new Document("a", "b")); + + Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class)); + + assertThat(mappedObject).containsEntry("$push", new Document("x", "y")); + assertThat(mappedObject).containsEntry("$set", new Document("a", "b")); + assertThat(mappedObject).hasSize(2); + } + + @Test // DATAMONGO-1486, DATAMONGO-2155 + void mappingShouldConvertMapKeysToString() { + + Update update = new Update().set("map", Collections.singletonMap(25, "#StarTrek50")); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set.containsKey("map")).isTrue(); + + Map mapToSet = $set.get("map", Map.class); + for (Object key : mapToSet.keySet()) { + assertThat(key).isInstanceOf(String.class); + } + } + + @Test // DATAMONGO-1772 + void mappingShouldAddTypeKeyInListOfInterfaceTypeContainedInConcreteObjectCorrectly() { + + ConcreteInner inner = new ConcreteInner(); + inner.interfaceTypeList = Collections.singletonList(new SomeInterfaceImpl()); + List list = Collections.singletonList(inner); + + Document mappedUpdate = mapper.getMappedObject(new Update().set("concreteInnerList", list).getUpdateObject(), + context.getPersistentEntity(Outer.class)); + + assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].interfaceTypeList.[0]._class") + .doesNotContainKey("$set.concreteInnerList.[0]._class"); + } + + @Test // DATAMONGO-1772 + void mappingShouldAddTypeKeyInListOfAbstractTypeContainedInConcreteObjectCorrectly() { + + ConcreteInner inner = new ConcreteInner(); + inner.abstractTypeList = Collections.singletonList(new SomeInterfaceImpl()); + List list = Collections.singletonList(inner); + + Document mappedUpdate = mapper.getMappedObject(new Update().set("concreteInnerList", list).getUpdateObject(), + context.getPersistentEntity(Outer.class)); + + assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].abstractTypeList.[0]._class") + .doesNotContainKey("$set.concreteInnerList.[0]._class"); + } + + @Test // DATAMONGO-2155 + void shouldPreserveFieldNamesOfMapProperties() { + + Update update = Update + .fromDocument(new Document("concreteMap", new Document("Name", new Document("name", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("concreteMap", new Document("Name", new Document("name", "fooo")))); + } + + @Test // DATAMONGO-2155 + void shouldPreserveExplicitFieldNamesInsideMapProperties() { + + Update update = Update + .fromDocument(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithMapOfAliased.class)); + + assertThat(mappedUpdate) + .isEqualTo(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + } + + @Test // DATAMONGO-2155 + void shouldMapAliasedFieldNamesInMapsCorrectly() { + + Update update = Update + .fromDocument(new Document("map", Collections.singletonMap("Value", new Document("value", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithMapOfAliased.class)); + + assertThat(mappedUpdate) + .isEqualTo(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + } + + @Test // DATAMONGO-2174 + void mappingUpdateDocumentWithExplicitFieldNameShouldBePossible() { + + Document mappedUpdate = mapper.getMappedObject(new Document("AValue", "a value"), + context.getPersistentEntity(TypeWithFieldNameThatCannotBeDecapitalized.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("AValue", "a value")); + } + + @Test // DATAMONGO-2054 + void mappingShouldAllowPositionAllParameter() { + + Update update = new Update().inc("grades.$[]", 10); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithListOfIntegers.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$inc", new Document("grades.$[]", 10))); + } + + @Test // DATAMONGO-2054 + void mappingShouldAllowPositionAllParameterWhenPropertyHasExplicitFieldName() { + + Update update = new Update().inc("list.$[]", 10); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$inc", new Document("aliased.$[]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowPositionParameterWithIdentifier() { + + Update update = new Update().set("grades.$[element]", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithListOfIntegers.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("grades.$[element]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowPositionParameterWithIdentifierWhenFieldHasExplicitFieldName() { + + Update update = new Update().set("list.$[element]", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowNestedPositionParameterWithIdentifierWhenFieldHasExplicitFieldName() { + + Update update = new Update().set("list.$[element].value", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element].value", 10))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderValueOfUnwrappedType() { + + Update update = new Update().set("unwrappedValue.stringValue", "updated"); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("stringValue", "updated"))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + Update update = new Update().set("unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", + new Document("stringValue", "updated").append("listValue", Arrays.asList("val-1", "val-2")))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderValueOfPrefixedUnwrappedType() { + + Update update = new Update().set("unwrappedValue.stringValue", "updated"); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("prefix-stringValue", "updated"))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderPrefixedUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + + Update update = new Update().set("unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", + new Document("prefix-stringValue", "updated").append("prefix-listValue", Arrays.asList("val-1", "val-2")))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderNestedPrefixedUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + + Update update = new Update().set("withPrefixedUnwrapped.unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("withPrefixedUnwrapped", + new Document("prefix-stringValue", "updated").append("prefix-listValue", Arrays.asList("val-1", "val-2"))))); + } + + @Test // GH-3552 + void numericKeyForMap() { + + Update update = new Update().set("map.601218778970110001827396", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396\": \"testing\"}}"); + } + + @Test // GH-3552 + void numericKeyInMapOfNestedPath() { + + Update update = new Update().set("map.601218778970110001827396.value", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}"); + } + + @Test // GH-3688 + void multipleNumericKeysInNestedPath() { + + Update update = new Update().set("intKeyedMap.12345.map.0", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.12345.map.0\": \"testing\"}}"); + } + + @Test // GH-3566 + void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { + + Update update = new Update().set("map.class", "value"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.class\": \"value\"}}"); + } + + @Test // GH-3775 + void mapNestedStringFieldCorrectly() { + + Update update = new Update().set("levelOne.a.b.d", "e"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.a.b.d", "e"))); + } + + @ParameterizedTest // GH-3775, GH-4426 + @ValueSource(strings = {"levelOne.0.1.3", "levelOne.0.1.32", "levelOne2.0.1.32", "levelOne2.0.1.320"}) + void mapNestedIntegerFieldCorrectly(String path) { + + Update update = new Update().set(path, "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4"))); + } + + @ParameterizedTest // GH-3775, GH-4426 + @ValueSource(strings = {"levelOne.0.1.c", "levelOne.0.1.c.32", "levelOne2.0.1.32.c", "levelOne2.0.1.c.320"}) + void mapNestedMixedStringIntegerFieldCorrectly(String path) { + + Update update = new Update().set(path, "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerWithStartNumberFieldCorrectly() { + + Update update = new Update().set("levelOne.0a.1b.3c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0a.1b.3c", "4"))); + } + + @Test // GH-3688 + void multipleKeysStartingWithANumberInNestedPath() { + + Update update = new Update().set("intKeyedMap.1a.map.0b", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.1a.map.0b\": \"testing\"}}"); + } + + @Test // GH-3853 + void updateWithDocuRefOnId() { + + Sample sample = new Sample(); + sample.foo = "s1"; + + Update update = new Update().set("sample", sample); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("sample", "s1"))); + } + + @Test // GH-3853 + void updateListWithDocuRefOnId() { + + Sample sample = new Sample(); + sample.foo = "s1"; + + Update update = new Update().set("samples", Arrays.asList(sample)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate) + .isEqualTo(new org.bson.Document("$set", new org.bson.Document("samples", Arrays.asList("s1")))); + } + + @Test // GH-3853 + void updateWithDocuRefOnProperty() { + + Customer customer = new Customer(); + customer.id = new ObjectId(); + customer.name = "c-name"; + + Update update = new Update().set("customer", customer); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("customer", "c-name"))); + } + + @Test // GH-3853 + void updateListWithDocuRefOnProperty() { + + Customer customer = new Customer(); + customer.id = new ObjectId(); + customer.name = "c-name"; + + Update update = new Update().set("customers", Arrays.asList(customer)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate) + .isEqualTo(new org.bson.Document("$set", new org.bson.Document("customers", Arrays.asList("c-name")))); + } + + @Test // GH-3921 + void mapNumericKeyInPathHavingComplexMapValyeTypes() { + + Update update = new Update().set("testInnerData.testMap.1.intValue", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.intValue': '4' }}"); + } + + @Test // GH-3921 + void mapNumericKeyInPathNotMatchingExistingProperties() { + + Update update = new Update().set("testInnerData.imaginaryMap.1.nonExistingProperty", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.imaginaryMap.1.nonExistingProperty': '4' }}"); + } + + @Test // GH-3921 + void mapNumericKeyInPathPartiallyMatchingExistingProperties() { + + Update update = new Update().set("testInnerData.testMap.1.nonExistingProperty.2.someValue", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.nonExistingProperty.2.someValue': '4' }}"); + } + + @Test // GH-3596 + void updateConsidersValueConverterWhenPresent() { + + Update update = new Update().set("text", "value"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set : { 'text' : 'eulav' } }"); } static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes { @@ -900,7 +1375,7 @@ static class WrapperAroundInterfaceType { } @org.springframework.data.mongodb.core.mapping.Document(collection = "DocumentWithReferenceToInterface") - static interface DocumentWithReferenceToInterface { + interface DocumentWithReferenceToInterface { String getId(); @@ -908,7 +1383,7 @@ static interface DocumentWithReferenceToInterface { } - static interface InterfaceDocumentDefinitionWithoutId { + interface InterfaceDocumentDefinitionWithoutId { String getValue(); } @@ -918,7 +1393,7 @@ static class InterfaceDocumentDefinitionImpl implements InterfaceDocumentDefinit @Id String id; String value; - public InterfaceDocumentDefinitionImpl(String id, String value) { + InterfaceDocumentDefinitionImpl(String id, String value) { this.id = id; this.value = value; @@ -957,12 +1432,12 @@ public InterfaceDocumentDefinitionWithoutId getReferencedDocument() { } - static interface Model {} + interface Model {} static class ModelImpl implements Model { public int value; - public ModelImpl(int value) { + ModelImpl(int value) { this.value = value; } @@ -987,7 +1462,7 @@ static class ListModel { List values; - public ListModel(String... values) { + ListModel(String... values) { this.values = Arrays.asList(values); } } @@ -1016,7 +1491,7 @@ static abstract class AbstractChildClass { String otherValue; AbstractChildClass someObject; - public AbstractChildClass(String id, String value) { + AbstractChildClass(String id, String value) { this.id = id; this.value = value; this.otherValue = "other_" + value; @@ -1025,7 +1500,7 @@ public AbstractChildClass(String id, String value) { static class ConcreteChildClass extends AbstractChildClass { - public ConcreteChildClass(String id, String value) { + ConcreteChildClass(String id, String value) { super(id, value); } } @@ -1041,7 +1516,7 @@ public List getCollectionOfNestedEntities() { static class NestedEntity { String name; - public NestedEntity(String name) { + NestedEntity(String name) { super(); this.name = name; } @@ -1049,11 +1524,11 @@ public NestedEntity(String name) { } @WritingConverter - static class NestedEntityWriteConverter implements Converter { + static class NestedEntityWriteConverter implements Converter { @Override - public DBObject convert(NestedEntity source) { - return new BasicDBObject(); + public Document convert(NestedEntity source) { + return new Document(); } } @@ -1087,7 +1562,7 @@ static class NestedDocument { String name; - public NestedDocument(String name) { + NestedDocument(String name) { super(); this.name = name; } @@ -1099,9 +1574,22 @@ static class EntityWithObject { NestedDocument concreteValue; } + static class EntityWithList { + List list; + } + + static class EntityWithListOfIntegers { + List grades; + } + static class EntityWithAliasedObject { @Field("renamed-value") Object value; + Object field; + } + + static class EntityWithMapOfAliased { + Map map; } static class EntityWithObjectMap { @@ -1110,17 +1598,22 @@ static class EntityWithObjectMap { Map concreteMap; } + static class EntityWithIntKeyedMap { + Map intKeyedMap; + } + static class ClassWithEnum { Allocation allocation; + Map enumAsMapKey; - static enum Allocation { + enum Allocation { AVAILABLE("V"), ALLOCATED("A"); String code; - private Allocation(String code) { + Allocation(String code) { this.code = code; } @@ -1136,7 +1629,7 @@ public static Allocation of(String code) { } } - static enum AllocationToStringConverter implements Converter { + enum AllocationToStringConverter implements Converter { INSTANCE; @@ -1146,7 +1639,7 @@ public String convert(Allocation source) { } } - static enum StringToAllocationConverter implements Converter { + enum StringToAllocationConverter implements Converter { INSTANCE; @@ -1167,4 +1660,174 @@ static class SimpleValueHolder { Integer intValue; int primIntValue; } + + static class Outer { + List concreteInnerList; + } + + static class ConcreteInner { + List interfaceTypeList; + List abstractTypeList; + List concreteTypeList; + } + + interface SomeInterfaceType { + + } + + static abstract class SomeAbstractType { + + } + + static class SomeInterfaceImpl extends SomeAbstractType implements SomeInterfaceType { + + String value; + + public SomeInterfaceImpl() {} + + public SomeInterfaceImpl(String value) { + this.value = value; + } + } + + static class TypeWithFieldNameThatCannotBeDecapitalized { + + @Id protected String id; + + @Field("AValue") private Long aValue = 0L; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Long getaValue() { + return aValue; + } + + public void setaValue(Long aValue) { + this.aValue = aValue; + } + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithUnwrapped withUnwrapped; + WithPrefixedUnwrapped withPrefixedUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType unwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + } + + static class EntityWithNestedMap { + Map>> levelOne; + Map>> levelOne2; + } + + static class Customer { + + @Id private ObjectId id; + private String name; + } + + static class Sample { + + @Id private String foo; + } + + static class WithDocumentReference { + + private ObjectId id; + + private String name; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private Customer customer; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private List customers; + + @DocumentReference private Sample sample; + + @DocumentReference private List samples; + } + + private static class TestData { + + @Id private String id; + private TestInnerData testInnerData; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public TestInnerData getTestInnerData() { + return testInnerData; + } + + public void setTestInnerData(TestInnerData testInnerData) { + this.testInnerData = testInnerData; + } + } + + private static class TestInnerData { + + private Map testMap; + + public Map getTestMap() { + return testMap; + } + + public void setTestMap(Map testMap) { + this.testMap = testMap; + } + } + + private static class TestValue { + + private int intValue; + + public int getIntValue() { + return intValue; + } + + public void setIntValue(int intValue) { + this.intValue = intValue; + } + } + + static class WithPropertyValueConverter { + + @ValueConverter(ReversingValueConverter.class) + String text; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java new file mode 100644 index 0000000000..083221053d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java @@ -0,0 +1,756 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.security.SecureRandom; +import java.time.LocalDate; +import java.time.Month; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.assertj.core.api.Assertions; +import org.bson.BsonBinary; +import org.bson.Document; +import org.bson.types.Binary; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.util.Lazy; + +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.Indexes; +import com.mongodb.client.model.vault.DataKeyOptions; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; + +/** + * @author Christoph Strobl + * @author Julia Lee + */ +public abstract class AbstractEncryptionTestBase { + + @Autowired MongoTemplate template; + + @Test // GH-4284 + void encryptAndDecryptSimpleValue() { + + Person source = new Person(); + source.id = "id-1"; + source.ssn = "mySecretSSN"; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4432 + void encryptAndDecryptJavaTime() { + + Person source = new Person(); + source.id = "id-1"; + source.today = LocalDate.of(1979, Month.SEPTEMBER, 18); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("today")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptComplexValue() { + + Person source = new Person(); + source.id = "id-1"; + source.address = new Address(); + source.address.city = "NYC"; + source.address.street = "4th Ave."; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptValueWithinComplexOne() { + + Person source = new Person(); + source.id = "id-1"; + source.encryptedZip = new AddressWithEncryptedZip(); + source.encryptedZip.city = "Boston"; + source.encryptedZip.street = "central square"; + source.encryptedZip.zip = "1234567890"; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> { + assertThat(it.get("encryptedZip")).isInstanceOf(Document.class); + assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class); + }) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptListOfSimpleValue() { + + Person source = new Person(); + source.id = "id-1"; + source.listOfString = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("listOfString")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptListOfComplexValue() { + + Person source = new Person(); + source.id = "id-1"; + + Address address = new Address(); + address.city = "SFO"; + address.street = "---"; + + source.listOfComplex = Collections.singletonList(address); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("listOfComplex")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptMapOfSimpleValues() { + + Person source = new Person(); + source.id = "id-1"; + source.mapOfString = Map.of("k1", "v1", "k2", "v2"); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("mapOfString")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptMapOfComplexValues() { + + Person source = new Person(); + source.id = "id-1"; + + Address address1 = new Address(); + address1.city = "SFO"; + address1.street = "---"; + + Address address2 = new Address(); + address2.city = "NYC"; + address2.street = "---"; + + source.mapOfComplex = Map.of("a1", address1, "a2", address2); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("mapOfComplex")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void canQueryDeterministicallyEncrypted() { + + Person source = new Person(); + source.id = "id-1"; + source.ssn = "mySecretSSN"; + + template.save(source); + + Person loaded = template.query(Person.class).matching(where("ssn").is(source.ssn)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4284 + void cannotQueryRandomlyEncrypted() { + + Person source = new Person(); + source.id = "id-1"; + source.wallet = "secret-wallet-id"; + + template.save(source); + + Person loaded = template.query(Person.class).matching(where("wallet").is(source.wallet)).firstValue(); + assertThat(loaded).isNull(); + } + + @Test // GH-4284 + void updateSimpleTypeEncryptedFieldWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + + template.save(source); + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("ssn", "secret-value")) + .first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) // + .loadedMatches(it -> assertThat(it.getSsn()).isEqualTo("secret-value")); + } + + @Test // GH-4284 + void updateComplexTypeEncryptedFieldWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + + template.save(source); + + Address address = new Address(); + address.city = "SFO"; + address.street = "---"; + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("address", address)).first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) // + .loadedMatches(it -> assertThat(it.getAddress()).isEqualTo(address)); + } + + @Test // GH-4284 + void updateEncryptedFieldInNestedElementWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + source.encryptedZip = new AddressWithEncryptedZip(); + source.encryptedZip.city = "Boston"; + source.encryptedZip.street = "central square"; + + template.save(source); + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("encryptedZip.zip", "179")) + .first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> { + assertThat(it.get("encryptedZip")).isInstanceOf(Document.class); + assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class); + }) // + .loadedMatches(it -> assertThat(it.getEncryptedZip().getZip()).isEqualTo("179")); + } + + @Test + void aggregationWithMatch() { + + Person person = new Person(); + person.id = "id-1"; + person.name = "p1-name"; + person.ssn = "mySecretSSN"; + + template.save(person); + + AggregationResults aggregationResults = template.aggregateAndReturn(Person.class) + .by(newAggregation(Person.class, Aggregation.match(where("ssn").is(person.ssn)))).all(); + assertThat(aggregationResults.getMappedResults()).containsExactly(person); + } + + @Test + void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException { + + BsonBinary user1key = mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("user-1"))); + + BsonBinary user2key = mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("user-2"))); + + Person p1 = new Person(); + p1.id = "id-1"; + p1.name = "user-1"; + p1.ssn = "ssn"; + p1.viaAltKeyNameField = "value-1"; + + Person p2 = new Person(); + p2.id = "id-2"; + p2.name = "user-2"; + p2.viaAltKeyNameField = "value-1"; + + Person p3 = new Person(); + p3.id = "id-3"; + p3.name = "user-1"; + p3.viaAltKeyNameField = "value-1"; + + template.save(p1); + template.save(p2); + template.save(p3); + + template.execute(Person.class, collection -> { + collection.find(new Document()); + return null; + }); + + // remove the key and invalidate encrypted data + mongoClientEncryption.getClientEncryption().deleteKey(user2key); + + // clear the 60 second key cache within the mongo client + mongoClientEncryption.destroy(); + + assertThat(template.query(Person.class).matching(where("id").is(p1.id)).firstValue()).isEqualTo(p1); + + assertThatExceptionOfType(PermissionDeniedDataAccessException.class) + .isThrownBy(() -> template.query(Person.class).matching(where("id").is(p2.id)).firstValue()); + } + + SaveAndLoadAssert verifyThat(T source) { + return new SaveAndLoadAssert<>(source); + } + + class SaveAndLoadAssert { + + T source; + Function idProvider; + + SaveAndLoadAssert(T source) { + this.source = source; + } + + SaveAndLoadAssert identifiedBy(Function idProvider) { + this.idProvider = idProvider; + return this; + } + + SaveAndLoadAssert wasSavedAs(Document expected) { + return wasSavedMatching(it -> Assertions.assertThat(it).isEqualTo(expected)); + } + + SaveAndLoadAssert wasSavedMatching(Consumer saved) { + AbstractEncryptionTestBase.this.assertSaved(source, idProvider, saved); + return this; + } + + SaveAndLoadAssert loadedMatches(Consumer expected) { + AbstractEncryptionTestBase.this.assertLoaded(source, idProvider, expected); + return this; + } + + SaveAndLoadAssert loadedIsEqualToSource() { + return loadedIsEqualTo(source); + } + + SaveAndLoadAssert loadedIsEqualTo(T expected) { + return loadedMatches(it -> Assertions.assertThat(it).isEqualTo(expected)); + } + + } + + void assertSaved(T source, Function idProvider, Consumer dbValue) { + + Document savedDocument = template.execute(Person.class, collection -> { + + MongoNamespace namespace = collection.getNamespace(); + + try (MongoClient rawClient = MongoClients.create()) { + return rawClient.getDatabase(namespace.getDatabaseName()).getCollection(namespace.getCollectionName()) + .find(new Document("_id", idProvider.apply(source))).first(); + } + }); + dbValue.accept(savedDocument); + } + + void assertLoaded(T source, Function idProvider, Consumer loadedValue) { + + T loaded = template.query((Class) source.getClass()).matching(where("id").is(idProvider.apply(source))) + .firstValue(); + + loadedValue.accept(loaded); + } + + protected static class EncryptionConfig extends AbstractMongoClientConfiguration { + + @Autowired ApplicationContext applicationContext; + + @Override + protected String getDatabaseName() { + return "fle-test"; + } + + @Bean + public MongoClient mongoClient() { + return super.mongoClient(); + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + + converterConfigurationAdapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext)) + .useNativeDriverJavaTimeCodecs(); + } + + @Bean + MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption) { + + Lazy dataKey = Lazy.of(() -> mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey")))); + + return new MongoEncryptionConverter(mongoClientEncryption, + EncryptionKeyResolver.annotated((ctx) -> EncryptionKey.keyId(dataKey.get()))); + } + + @Bean + CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) { + return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings)); + } + + @Bean + ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) { + + MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault"); + MongoCollection keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName()) + .getCollection(keyVaultNamespace.getCollectionName()); + keyVaultCollection.drop(); + // Ensure that two data keys cannot share the same keyAltName. + keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"), + new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames"))); + + MongoCollection collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test"); + collection.drop(); // Clear old data + + byte[] localMasterKey = new byte[96]; + new SecureRandom().nextBytes(localMasterKey); + Map> kmsProviders = Map.of("local", Map.of("key", localMasterKey)); + + // Create the ClientEncryption instance + return ClientEncryptionSettings.builder() // + .keyVaultMongoClientSettings( + MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) // + .keyVaultNamespace(keyVaultNamespace.getFullName()) // + .kmsProviders(kmsProviders) // + .build(); + } + } + + static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean { + + static final AtomicReference cache = new AtomicReference<>(); + + CachingMongoClientEncryption(Supplier source) { + super(() -> { + + if (cache.get() != null) { + return cache.get(); + } + + ClientEncryption clientEncryption = source.get(); + cache.set(clientEncryption); + + return clientEncryption; + }); + } + + @Override + public void destroy() { + + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption != null) { + clientEncryption.close(); + cache.set(null); + } + } + } + + @org.springframework.data.mongodb.core.mapping.Document("test") + static class Person { + + String id; + String name; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String ssn; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "mySuperSecretKey") // + String wallet; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random + Address address; + + AddressWithEncryptedZip encryptedZip; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List listOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List
                    listOfComplex; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/name") // + String viaAltKeyNameField; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfComplex; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + LocalDate today; + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public String getSsn() { + return this.ssn; + } + + public String getWallet() { + return this.wallet; + } + + public Address getAddress() { + return this.address; + } + + public AddressWithEncryptedZip getEncryptedZip() { + return this.encryptedZip; + } + + public List getListOfString() { + return this.listOfString; + } + + public List
                    getListOfComplex() { + return this.listOfComplex; + } + + public String getViaAltKeyNameField() { + return this.viaAltKeyNameField; + } + + public Map getMapOfString() { + return this.mapOfString; + } + + public Map getMapOfComplex() { + return this.mapOfComplex; + } + + public LocalDate getToday() { + return today; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setSsn(String ssn) { + this.ssn = ssn; + } + + public void setWallet(String wallet) { + this.wallet = wallet; + } + + public void setAddress(Address address) { + this.address = address; + } + + public void setEncryptedZip(AddressWithEncryptedZip encryptedZip) { + this.encryptedZip = encryptedZip; + } + + public void setListOfString(List listOfString) { + this.listOfString = listOfString; + } + + public void setListOfComplex(List
                    listOfComplex) { + this.listOfComplex = listOfComplex; + } + + public void setViaAltKeyNameField(String viaAltKeyNameField) { + this.viaAltKeyNameField = viaAltKeyNameField; + } + + public void setMapOfString(Map mapOfString) { + this.mapOfString = mapOfString; + } + + public void setMapOfComplex(Map mapOfComplex) { + this.mapOfComplex = mapOfComplex; + } + + public void setToday(LocalDate today) { + this.today = today; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(name, person.name) && Objects.equals(ssn, person.ssn) + && Objects.equals(wallet, person.wallet) && Objects.equals(address, person.address) + && Objects.equals(encryptedZip, person.encryptedZip) && Objects.equals(listOfString, person.listOfString) + && Objects.equals(listOfComplex, person.listOfComplex) + && Objects.equals(viaAltKeyNameField, person.viaAltKeyNameField) + && Objects.equals(mapOfString, person.mapOfString) && Objects.equals(mapOfComplex, person.mapOfComplex) + && Objects.equals(today, person.today); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, ssn, wallet, address, encryptedZip, listOfString, listOfComplex, viaAltKeyNameField, + mapOfString, mapOfComplex, today); + } + + public String toString() { + return "EncryptionTests.Person(id=" + this.getId() + ", name=" + this.getName() + ", ssn=" + this.getSsn() + + ", wallet=" + this.getWallet() + ", address=" + this.getAddress() + ", encryptedZip=" + + this.getEncryptedZip() + ", listOfString=" + this.getListOfString() + ", listOfComplex=" + + this.getListOfComplex() + ", viaAltKeyNameField=" + this.getViaAltKeyNameField() + ", mapOfString=" + + this.getMapOfString() + ", mapOfComplex=" + this.getMapOfComplex() + ", today=" + this.getToday() + ")"; + } + } + + static class Address { + String city; + String street; + + public Address() {} + + public String getCity() { + return this.city; + } + + public String getStreet() { + return this.street; + } + + public void setCity(String city) { + this.city = city; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(city, address.city) && Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(city, street); + } + + public String toString() { + return "EncryptionTests.Address(city=" + this.getCity() + ", street=" + this.getStreet() + ")"; + } + } + + static class AddressWithEncryptedZip extends Address { + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) String zip; + + @Override + public String toString() { + return "AddressWithEncryptedZip{" + "zip='" + zip + '\'' + ", city='" + getCity() + '\'' + ", street='" + + getStreet() + '\'' + '}'; + } + + public String getZip() { + return this.zip; + } + + public void setZip(String zip) { + this.zip = zip; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java new file mode 100644 index 0000000000..3aab3a7485 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.encryption; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Encryption tests for client having {@link AutoEncryptionSettings#isBypassAutoEncryption()}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = BypassAutoEncryptionTest.Config.class) +public class BypassAutoEncryptionTest extends AbstractEncryptionTestBase { + + @Disabled + @Override + void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException { + super.altKeyDetection(mongoClientEncryption); + } + + @Configuration + static class Config extends EncryptionConfig { + + @Override + protected void configureClientSettings(Builder builder) { + + MongoClient mongoClient = MongoClients.create(); + ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(mongoClient); + mongoClient.close(); + + builder.autoEncryptionSettings(AutoEncryptionSettings.builder() // + .kmsProviders(clientEncryptionSettings.getKmsProviders()) // + .keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) // + .bypassAutoEncryption(true).build()); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java new file mode 100644 index 0000000000..eeb4df2275 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java @@ -0,0 +1,248 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.function.Function; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; + +/** + * Unit tests for {@link EncryptionKeyResolver}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class EncryptionKeyResolverUnitTests { + + @Mock // + EncryptionKeyResolver fallbackKeyResolver; + + MongoTestMappingContext mappingContext = MongoTestMappingContext.newTestContext().init(); + + EncryptionKey defaultEncryptionKey = EncryptionKey + .keyId(new BsonBinary("super-secret".getBytes(StandardCharsets.UTF_8))); + + @BeforeEach + void beforeEach() { + when(fallbackKeyResolver.getKey(any())).thenReturn(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultKeyIfNoAnnotationPresent() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getNotAnnotated); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isSameAs(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultKeyIfAnnotatedValueIsEmpty() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithm); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isSameAs(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultAltKeyNameIfPresent() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithmAndAltKeyName); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyAltName("sec-key-name")); + } + + @Test // GH-4284 + void readsAltKeyNameFromContextIfReferencingPropertyValue() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithmAndAltKeyNameFromPropertyValue); + when(ctx.lookupValue(eq("notAnnotated"))).thenReturn("born-to-be-wild"); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyAltName("born-to-be-wild")); + } + + @Test // GH-4284 + void readsKeyIdFromEncryptedAnnotationIfNoBetterCandidateAvailable() { + + EncryptionContext ctx = prepareEncryptionContext( + AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType.class, + AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType::getKeyIdFromDomainType); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyId( + new BsonBinary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode("xKVup8B1Q+CkHaVRx+qa+g==")))); + } + + @Test // GH-4284 + void ignoresKeyIdFromEncryptedAnnotationWhenBetterCandidateAvailable() { + + EncryptionContext ctx = prepareEncryptionContext(KeyIdFromSpel.class, KeyIdFromSpel::getKeyIdFromDomainType); + + StandardEvaluationContext evaluationContext = new StandardEvaluationContext(); + evaluationContext.setVariable("myKeyId", "xKVup8B1Q+CkHaVRx+qa+g=="); + + when(ctx.getEvaluationContext(any())).thenReturn(evaluationContext); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyId( + new BsonBinary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode("xKVup8B1Q+CkHaVRx+qa+g==")))); + } + + private EncryptionContext prepareEncryptionContext(Class type, Function property) { + + EncryptionContext encryptionContext = mock(EncryptionContext.class); + when(encryptionContext.getProperty()).thenReturn(mappingContext.getPersistentPropertyFor(type, property)); + return encryptionContext; + } + + class AnnotatedWithExplicitlyEncrypted { + + String notAnnotated; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + String algorithm; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "sec-key-name") // + String algorithmAndAltKeyName; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/notAnnotated") // + String algorithmAndAltKeyNameFromPropertyValue; + + public String getNotAnnotated() { + return this.notAnnotated; + } + + public String getAlgorithm() { + return this.algorithm; + } + + public String getAlgorithmAndAltKeyName() { + return this.algorithmAndAltKeyName; + } + + public String getAlgorithmAndAltKeyNameFromPropertyValue() { + return this.algorithmAndAltKeyNameFromPropertyValue; + } + + public void setNotAnnotated(String notAnnotated) { + this.notAnnotated = notAnnotated; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public void setAlgorithmAndAltKeyName(String algorithmAndAltKeyName) { + this.algorithmAndAltKeyName = algorithmAndAltKeyName; + } + + public void setAlgorithmAndAltKeyNameFromPropertyValue(String algorithmAndAltKeyNameFromPropertyValue) { + this.algorithmAndAltKeyNameFromPropertyValue = algorithmAndAltKeyNameFromPropertyValue; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.AnnotatedWithExplicitlyEncrypted(notAnnotated=" + this.getNotAnnotated() + + ", algorithm=" + this.getAlgorithm() + ", algorithmAndAltKeyName=" + this.getAlgorithmAndAltKeyName() + + ", algorithmAndAltKeyNameFromPropertyValue=" + this.getAlgorithmAndAltKeyNameFromPropertyValue() + ")"; + } + } + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + class AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType { + + @ExplicitEncrypted // + String keyIdFromDomainType; + + @ExplicitEncrypted(keyAltName = "sec-key-name") // + String altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + + public AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType() {} + + public String getKeyIdFromDomainType() { + return this.keyIdFromDomainType; + } + + public String getAltKeyNameFromPropertyIgnoringKeyIdFromDomainType() { + return this.altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + } + + public void setKeyIdFromDomainType(String keyIdFromDomainType) { + this.keyIdFromDomainType = keyIdFromDomainType; + } + + public void setAltKeyNameFromPropertyIgnoringKeyIdFromDomainType( + String altKeyNameFromPropertyIgnoringKeyIdFromDomainType) { + this.altKeyNameFromPropertyIgnoringKeyIdFromDomainType = altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType(keyIdFromDomainType=" + + this.getKeyIdFromDomainType() + ", altKeyNameFromPropertyIgnoringKeyIdFromDomainType=" + + this.getAltKeyNameFromPropertyIgnoringKeyIdFromDomainType() + ")"; + } + } + + @Encrypted(keyId = "#{#myKeyId}") + class KeyIdFromSpel { + + @ExplicitEncrypted // + String keyIdFromDomainType; + + public String getKeyIdFromDomainType() { + return this.keyIdFromDomainType; + } + + public void setKeyIdFromDomainType(String keyIdFromDomainType) { + this.keyIdFromDomainType = keyIdFromDomainType; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.KeyIdFromSpel(keyIdFromDomainType=" + this.getKeyIdFromDomainType() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java new file mode 100644 index 0000000000..40b0753b80 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; + +import java.util.UUID; + +import org.bson.BsonBinary; +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EncryptionKey}. + * + * @author Christoph Strobl + */ +class EncryptionKeyUnitTests { + + @Test // GH-4284 + void keyIdToStringDoesNotRevealEntireKey() { + + UUID uuid = UUID.randomUUID(); + + assertThat(EncryptionKey.keyId(new BsonBinary(uuid, UuidRepresentation.STANDARD)).toString()) + .contains(uuid.toString().substring(0, 6) + "***"); + } + + @Test // GH-4284 + void altKeyNameToStringDoesNotRevealEntireKey() { + + assertThat(EncryptionKey.keyAltName("s").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("su").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("sup").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("super-secret-key").toString()).contains("sup***"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java new file mode 100644 index 0000000000..3e840ed858 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +/** + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = AbstractEncryptionTestBase.EncryptionConfig.class) +public class EncryptionTests extends AbstractEncryptionTestBase { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java new file mode 100644 index 0000000000..825645d86c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.vault.ClientEncryption; + +/** + * Unit tests for {@link MongoClientEncryption}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class MongoClientEncryptionUnitTests { + + @Mock // + ClientEncryption clientEncryption; + + @Test // GH-4284 + void delegatesDecrypt() { + + MongoClientEncryption mce = MongoClientEncryption.just(clientEncryption); + mce.decrypt(new BsonBinary(new byte[0])); + + verify(clientEncryption).decrypt(Mockito.any()); + } + + @Test // GH-4284 + void delegatesEncrypt() { + + MongoClientEncryption mce = MongoClientEncryption.just(clientEncryption); + mce.encrypt(new BsonBinary(new byte[0]), + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("sec-key-name"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(EncryptOptions.class); + verify(clientEncryption).encrypt(any(), options.capture()); + assertThat(options.getValue().getAlgorithm()).isEqualTo(AEAD_AES_256_CBC_HMAC_SHA_512_Random); + assertThat(options.getValue().getKeyAltName()).isEqualTo("sec-key-name"); + } + + @Test // GH-4284 + void refreshObtainsNextInstanceFromSupplier() { + + ClientEncryption next = mock(ClientEncryption.class); + + MongoClientEncryption mce = new MongoClientEncryption(new Supplier<>() { + + int counter = 0; + + @Override + public ClientEncryption get() { + return counter++ % 2 == 0 ? clientEncryption : next; + } + }); + + assertThat(mce.getClientEncryption()).isSameAs(clientEncryption); + assertThat(mce.getClientEncryption()).isSameAs(next); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java new file mode 100644 index 0000000000..4e76346e56 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java @@ -0,0 +1,373 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.util.List; +import java.util.Map; + +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoEncryptionConverterUnitTests { + + @Mock // + Encryption encryption; + + @Mock // + EncryptionKeyResolver fallbackKeyResolver; + + @Mock // + MongoConversionContext conversionContext; + + MongoTestMappingContext mappingContext = MongoTestMappingContext.newTestContext(); + EncryptionKeyResolver keyResolver; + MongoEncryptionConverter converter; + + @Captor ArgumentCaptor encryptionOptions; + + @Captor ArgumentCaptor valueToBeEncrypted; + + @BeforeEach + void beforeEach() { + + when(fallbackKeyResolver.getKey(any())).thenReturn(EncryptionKey.keyAltName("default")); + when(encryption.encrypt(valueToBeEncrypted.capture(), encryptionOptions.capture())) + .thenReturn(new BsonBinary(new byte[0])); + keyResolver = EncryptionKeyResolver.annotated(fallbackKeyResolver); + converter = new MongoEncryptionConverter(encryption, keyResolver); + } + + @Test // GH-4284 + void delegatesConversionOfSimpleValueWithDefaultEncryptionKeyFromKeyResolver() { + + when(conversionContext.getProperty()) + .thenReturn(mappingContext.getPersistentPropertyFor(Type.class, Type::getStringValueWithAlgorithmOnly)); + + converter.write("foo", conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(new BsonString("foo")); + assertThat(encryptionOptions.getValue()).isEqualTo( + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, EncryptionKey.keyAltName("default"))); + } + + @Test // GH-4284 + void favorsAltKeyNameIfPresent() { + + when(conversionContext.getProperty()).thenReturn( + mappingContext.getPersistentPropertyFor(Type.class, Type::getStringValueWithAlgorithmAndAltKeyName)); + + converter.write("foo", conversionContext); + + assertThat(encryptionOptions.getValue()).isEqualTo( + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("sec-key-name"))); + } + + @Test // GH-4284 + void readsAltKeyNameFromProperty() { + + when(conversionContext.getProperty()).thenReturn(mappingContext.getPersistentPropertyFor(Type.class, + Type::getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue)); + + ArgumentCaptor path = ArgumentCaptor.forClass(String.class); + when(conversionContext.getValue(path.capture())).thenReturn("(ツ)"); + + converter.write("foo", conversionContext); + assertThat(path.getValue()).isEqualTo("notAnnotated"); + + assertThat(encryptionOptions.getValue()) + .isEqualTo(new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("(ツ)"))); + } + + @Test // GH-4284 + void delegatesConversionOfEntityTypes() { + + Document convertedValue = new Document("unencryptedValue", "nested-unencrypted"); + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, + Type::getNestedFullyEncrypted); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(convertedValue).when(conversionContext).write(any(), eq(property.getTypeInformation())); + + ArgumentCaptor path = ArgumentCaptor.forClass(String.class); + when(conversionContext.getValue(path.capture())).thenReturn("(ツ)"); + + JustATypeWithAnUnencryptedField source = new JustATypeWithAnUnencryptedField(); + source.unencryptedValue = "nested-unencrypted"; + + converter.write(source, conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(convertedValue.toBsonDocument()); + } + + @Test // GH-4284 + void listsOfSimpleTypesAreConvertedEntirely() { + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getListOfString); + when(conversionContext.getProperty()).thenReturn(property); + + converter.write(List.of("one", "two"), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new BsonArray(List.of(new BsonString("one"), new BsonString("two")))); + } + + @Test // GH-4284 + void listsOfComplexTypesAreConvertedEntirely() { + + Document convertedValue1 = new Document("unencryptedValue", "nested-unencrypted-1"); + Document convertedValue2 = new Document("unencryptedValue", "nested-unencrypted-2"); + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getListOfComplex); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(convertedValue1, convertedValue2).when(conversionContext).write(any(), eq(property.getTypeInformation())); + + JustATypeWithAnUnencryptedField source1 = new JustATypeWithAnUnencryptedField(); + source1.unencryptedValue = "nested-unencrypted-1"; + + JustATypeWithAnUnencryptedField source2 = new JustATypeWithAnUnencryptedField(); + source2.unencryptedValue = "nested-unencrypted-1"; + + converter.write(List.of(source1, source2), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new BsonArray(List.of(convertedValue1.toBsonDocument(), convertedValue2.toBsonDocument()))); + } + + @Test // GH-4284 + void simpleMapsAreConvertedEntirely() { + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getMapOfString); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(new Document("k1", "v1").append("k2", "v2")).when(conversionContext).write(any(), + eq(property.getTypeInformation())); + + converter.write(Map.of("k1", "v1", "k2", "v2"), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new Document("k1", new BsonString("v1")).append("k2", new BsonString("v2")).toBsonDocument()); + } + + @Test // GH-4284 + void complexMapsAreConvertedEntirely() { + + Document convertedValue1 = new Document("unencryptedValue", "nested-unencrypted-1"); + Document convertedValue2 = new Document("unencryptedValue", "nested-unencrypted-2"); + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getMapOfComplex); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(new Document("k1", convertedValue1).append("k2", convertedValue2)).when(conversionContext).write(any(), + eq(property.getTypeInformation())); + + JustATypeWithAnUnencryptedField source1 = new JustATypeWithAnUnencryptedField(); + source1.unencryptedValue = "nested-unencrypted-1"; + + JustATypeWithAnUnencryptedField source2 = new JustATypeWithAnUnencryptedField(); + source2.unencryptedValue = "nested-unencrypted-1"; + + converter.write(Map.of("k1", source1, "k2", source2), conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(new Document("k1", convertedValue1.toBsonDocument()) + .append("k2", convertedValue2.toBsonDocument()).toBsonDocument()); + } + + static class Type { + + String notAnnotated; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String stringValueWithAlgorithmOnly; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "sec-key-name") // + String stringValueWithAlgorithmAndAltKeyName; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/notAnnotated") // + String stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random + JustATypeWithAnUnencryptedField nestedFullyEncrypted; + + NestedWithEncryptedField nestedWithEncryptedField; + + // Client-Side Field Level Encryption does not support encrypting individual array elements + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + List listOfString; + + // Client-Side Field Level Encryption does not support encrypting individual array elements + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List listOfComplex; + + // just as it was a domain type encrypt the entire thing here + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfComplex; + + RecordWithEncryptedValue recordWithEncryptedValue; + + List listOfRecordWithEncryptedValue; + + public String getNotAnnotated() { + return this.notAnnotated; + } + + public String getStringValueWithAlgorithmOnly() { + return this.stringValueWithAlgorithmOnly; + } + + public String getStringValueWithAlgorithmAndAltKeyName() { + return this.stringValueWithAlgorithmAndAltKeyName; + } + + public String getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue() { + return this.stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + } + + public JustATypeWithAnUnencryptedField getNestedFullyEncrypted() { + return this.nestedFullyEncrypted; + } + + public NestedWithEncryptedField getNestedWithEncryptedField() { + return this.nestedWithEncryptedField; + } + + public List getListOfString() { + return this.listOfString; + } + + public List getListOfComplex() { + return this.listOfComplex; + } + + public Map getMapOfString() { + return this.mapOfString; + } + + public Map getMapOfComplex() { + return this.mapOfComplex; + } + + public RecordWithEncryptedValue getRecordWithEncryptedValue() { + return this.recordWithEncryptedValue; + } + + public List getListOfRecordWithEncryptedValue() { + return this.listOfRecordWithEncryptedValue; + } + + public void setNotAnnotated(String notAnnotated) { + this.notAnnotated = notAnnotated; + } + + public void setStringValueWithAlgorithmOnly(String stringValueWithAlgorithmOnly) { + this.stringValueWithAlgorithmOnly = stringValueWithAlgorithmOnly; + } + + public void setStringValueWithAlgorithmAndAltKeyName(String stringValueWithAlgorithmAndAltKeyName) { + this.stringValueWithAlgorithmAndAltKeyName = stringValueWithAlgorithmAndAltKeyName; + } + + public void setStringValueWithAlgorithmAndAltKeyNameFromPropertyValue( + String stringValueWithAlgorithmAndAltKeyNameFromPropertyValue) { + this.stringValueWithAlgorithmAndAltKeyNameFromPropertyValue = stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + } + + public void setNestedFullyEncrypted(JustATypeWithAnUnencryptedField nestedFullyEncrypted) { + this.nestedFullyEncrypted = nestedFullyEncrypted; + } + + public void setNestedWithEncryptedField(NestedWithEncryptedField nestedWithEncryptedField) { + this.nestedWithEncryptedField = nestedWithEncryptedField; + } + + public void setListOfString(List listOfString) { + this.listOfString = listOfString; + } + + public void setListOfComplex(List listOfComplex) { + this.listOfComplex = listOfComplex; + } + + public void setMapOfString(Map mapOfString) { + this.mapOfString = mapOfString; + } + + public void setMapOfComplex(Map mapOfComplex) { + this.mapOfComplex = mapOfComplex; + } + + public void setRecordWithEncryptedValue(RecordWithEncryptedValue recordWithEncryptedValue) { + this.recordWithEncryptedValue = recordWithEncryptedValue; + } + + public void setListOfRecordWithEncryptedValue(List listOfRecordWithEncryptedValue) { + this.listOfRecordWithEncryptedValue = listOfRecordWithEncryptedValue; + } + + public String toString() { + return "MongoEncryptionConverterUnitTests.Type(notAnnotated=" + this.getNotAnnotated() + + ", stringValueWithAlgorithmOnly=" + this.getStringValueWithAlgorithmOnly() + + ", stringValueWithAlgorithmAndAltKeyName=" + this.getStringValueWithAlgorithmAndAltKeyName() + + ", stringValueWithAlgorithmAndAltKeyNameFromPropertyValue=" + + this.getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue() + ", nestedFullyEncrypted=" + + this.getNestedFullyEncrypted() + ", nestedWithEncryptedField=" + this.getNestedWithEncryptedField() + + ", listOfString=" + this.getListOfString() + ", listOfComplex=" + this.getListOfComplex() + ", mapOfString=" + + this.getMapOfString() + ", mapOfComplex=" + this.getMapOfComplex() + ", recordWithEncryptedValue=" + + this.getRecordWithEncryptedValue() + ", listOfRecordWithEncryptedValue=" + + this.getListOfRecordWithEncryptedValue() + ")"; + } + } + + static class JustATypeWithAnUnencryptedField { + + String unencryptedValue; + } + + static class NestedWithEncryptedField extends JustATypeWithAnUnencryptedField { + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String encryptedValue; + } + + record RecordWithEncryptedValue(@ExplicitEncrypted String value) { + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java new file mode 100644 index 0000000000..dd9e459e78 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.QueryCharacteristics.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +import org.bson.BsonBinary; +import org.bson.Document; +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for creating collections with encrypted fields. + * + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@ContextConfiguration +public class MongoQueryableEncryptionCollectionCreationTests { + + public static final String COLLECTION_NAME = "enc-collection"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "encryption-schema-tests"; + } + + } + + @Autowired MongoTemplate template; + + @BeforeEach + void beforeEach() { + template.dropCollection(COLLECTION_NAME); + } + + @ParameterizedTest // GH-4185 + @MethodSource("collectionOptions") + public void createsCollectionWithEncryptedFieldsCorrectly(CollectionOptions collectionOptions) { + + template.createCollection(COLLECTION_NAME, collectionOptions); + + Document encryptedFields = readEncryptedFieldsFromDatabase(COLLECTION_NAME); + assertThat(encryptedFields).containsKey("fields"); + + List fields = encryptedFields.get("fields", List.of()); + assertThat(fields.get(0)).containsEntry("path", "encryptedInt") // + .containsEntry("bsonType", "int") // + .containsEntry("queries", List + .of(Document.parse("{'queryType': 'range', 'contention': { '$numberLong' : '1' }, 'min': 5, 'max': 100}"))); + + assertThat(fields.get(1)).containsEntry("path", "nested.encryptedLong") // + .containsEntry("bsonType", "long") // + .containsEntry("queries", List.of(Document.parse( + "{'queryType': 'range', 'contention': { '$numberLong' : '0' }, 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }}"))); + } + + private static Stream collectionOptions() { + + BsonBinary key1 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + BsonBinary key2 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + + CollectionOptions manualOptions = CollectionOptions.encryptedCollection(options -> options // + .queryable(encrypted(int32("encryptedInt")).keys(key1), range().min(5).max(100).contention(1)) // + .queryable(encrypted(JsonSchemaProperty.int64("nested.encryptedLong")).keys(key2), + range().min(-1L).max(1L).contention(0))); + + CollectionOptions schemaOptions = CollectionOptions.encryptedCollection(MongoJsonSchema.builder() + .property( + queryable(encrypted(int32("encryptedInt")).keyId(key1), List.of(range().min(5).max(100).contention(1)))) + .property(queryable(encrypted(int64("nested.encryptedLong")).keyId(key2), + List.of(range().min(-1L).max(1L).contention(0)))) + .build()); + + return Stream.of(Arguments.of(manualOptions), Arguments.of(schemaOptions)); + } + + Document readEncryptedFieldsFromDatabase(String collectionName) { + + Document collectionInfo = template + .executeCommand(new Document("listCollections", 1).append("filter", new Document("name", collectionName))); + + if (collectionInfo.containsKey("cursor")) { + collectionInfo = (Document) collectionInfo.get("cursor", Document.class).get("firstBatch", List.class).iterator() + .next(); + } + + if (!collectionInfo.containsKey("options")) { + return new Document(); + } + + return collectionInfo.get("options", Document.class).get("encryptedFields", Document.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java new file mode 100644 index 0000000000..e4e760cc91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java @@ -0,0 +1,573 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.security.SecureRandom; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +import org.assertj.core.api.Assumptions; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonInt32; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.MongoJsonSchemaCreator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.util.Lazy; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateEncryptedCollectionParams; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.Indexes; +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; + +/** + * @author Ross Lawley + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@EnableIfReplicaSetAvailable +@ContextConfiguration(classes = RangeEncryptionTests.EncryptionConfig.class) +class RangeEncryptionTests { + + @Autowired MongoTemplate template; + @Autowired MongoClientEncryption clientEncryption; + @Autowired EncryptionKeyHolder keyHolder; + + @BeforeEach + void clientVersionCheck() { + Assumptions.assumeThat(MongoClientVersion.isVersion5orNewer()).isTrue(); + } + + @AfterEach + void tearDown() { + template.getDb().getCollection("test").deleteMany(new BsonDocument()); + } + + @Test // GH-4185 + void manuallyEncryptedValuesCanBeSavedAndRetrievedCorrectly() { + + EncryptOptions encryptOptions = new EncryptOptions("Range").contentionFactor(1L) + .keyId(keyHolder.getEncryptionKey("encryptedInt")) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200)).sparsity(1L)); + + EncryptOptions encryptExpressionOptions = new EncryptOptions("Range").contentionFactor(1L) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200))) + .keyId(keyHolder.getEncryptionKey("encryptedInt")).queryType("range"); + + EncryptOptions equalityEncOptions = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("age")); + ; + + EncryptOptions equalityEncOptionsString = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("name")); + ; + + Document source = new Document("_id", "id-1"); + + source.put("name", + clientEncryption.getClientEncryption().encrypt(new BsonString("It's a Me, Mario!"), equalityEncOptionsString)); + source.put("age", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), equalityEncOptions)); + source.put("encryptedInt", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), encryptOptions)); + source.put("_class", Person.class.getName()); + + template.execute(Person.class, col -> col.insertOne(source)); + + Document result = template.execute(Person.class, col -> { + + BsonDocument filterSource = new BsonDocument("encryptedInt", new BsonDocument("$gte", new BsonInt32(100))); + BsonDocument filter = clientEncryption.getClientEncryption() + .encryptExpression(new Document("$and", List.of(filterSource)), encryptExpressionOptions); + + return col.find(filter).first(); + }); + + assertThat(result).containsEntry("encryptedInt", 101); + } + + @Test // GH-4185 + void canLesserThanEqualMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("encryptedInt").lte(source.encryptedInt)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfEqualityEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("name").is(source.name).and("unencryptedValue").is(source.unencryptedValue)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfRangeEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("encryptedInt").lte(source.encryptedInt).and("unencryptedValue").is(source.unencryptedValue)) + .firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryEqualityEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("age").is(source.age)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canExcludeSafeContentFromResult() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + q.fields().exclude("__safeContent__"); + + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canRangeMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canReplaceEntityWithRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + source.encryptedInt = 123; + source.encryptedLong = 9999L; + template.save(source); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canUpdateRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + UpdateResult updateResult = template.update(Person.class).matching(where("id").is(source.id)) + .apply(Update.update("encryptedLong", 5000L)).first(); + assertThat(updateResult.getModifiedCount()).isOne(); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded.encryptedLong).isEqualTo(5000L); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorEqOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedInt").is(source.encryptedInt)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$eq' for field path 'encryptedInt' is not a range query."); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorInOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedLong").in(1001L, 9999L)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$in' for field path 'encryptedLong' is not a range query."); + } + + private Person createPerson() { + + Person source = new Person(); + source.id = "id-1"; + source.unencryptedValue = "y2k"; + source.name = "it's a me mario!"; + source.age = 42; + source.encryptedInt = 101; + source.encryptedLong = 1001L; + source.nested = new NestedWithQEFields(); + source.nested.value = "Luigi time!"; + return source; + } + + protected static class EncryptionConfig extends AbstractMongoClientConfiguration { + + private static final String LOCAL_KMS_PROVIDER = "local"; + + private static final Lazy>> LAZY_KMS_PROVIDERS = Lazy.of(() -> { + byte[] localMasterKey = new byte[96]; + new SecureRandom().nextBytes(localMasterKey); + return Map.of(LOCAL_KMS_PROVIDER, Map.of("key", localMasterKey)); + }); + + @Autowired ApplicationContext applicationContext; + + @Override + protected String getDatabaseName() { + return "qe-test"; + } + + @Bean + public MongoClient mongoClient() { + return super.mongoClient(); + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + converterConfigurationAdapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext)) + .useNativeDriverJavaTimeCodecs(); + } + + @Bean + EncryptionKeyHolder keyHolder(MongoClientEncryption mongoClientEncryption) { + + Lazy> lazyDataKeyMap = Lazy.of(() -> { + try (MongoClient client = mongoClient()) { + + MongoDatabase database = client.getDatabase(getDatabaseName()); + database.getCollection("test").drop(); + + ClientEncryption clientEncryption = mongoClientEncryption.getClientEncryption(); + + MongoJsonSchema personSchema = MongoJsonSchemaCreator.create(new MongoMappingContext()) // init schema creator + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(Person.class); // + + Document encryptedFields = CollectionOptions.encryptedCollection(personSchema) // + .getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .orElseThrow(); + + CreateCollectionOptions createCollectionOptions = new CreateCollectionOptions() + .encryptedFields(encryptedFields); + + BsonDocument local = clientEncryption.createEncryptedCollection(database, "test", createCollectionOptions, + new CreateEncryptedCollectionParams(LOCAL_KMS_PROVIDER)); + + Map keyMap = new LinkedHashMap<>(); + for (Object o : local.getArray("fields")) { + if (o instanceof BsonDocument db) { + String path = db.getString("path").getValue(); + BsonBinary binary = db.getBinary("keyId"); + for (String part : path.split("\\.")) { + keyMap.put(part, binary); + } + } + } + return keyMap; + } + }); + + return new EncryptionKeyHolder(lazyDataKeyMap); + } + + @Bean + MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption, + EncryptionKeyHolder keyHolder) { + return new MongoEncryptionConverter(mongoClientEncryption, EncryptionKeyResolver.annotated((ctx) -> { + + String path = ctx.getProperty().getFieldName(); + + if (ctx.getProperty().getMongoField().getName().isPath()) { + path = StringUtils.arrayToDelimitedString(ctx.getProperty().getMongoField().getName().parts(), "."); + } + if (ctx.getOperatorContext() != null) { + path = ctx.getOperatorContext().path(); + } + return EncryptionKey.keyId(keyHolder.getEncryptionKey(path)); + })); + } + + @Bean + CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) { + return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings)); + } + + @Override + protected void configureClientSettings(MongoClientSettings.Builder builder) { + try (MongoClient client = MongoClients.create()) { + ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(client); + + builder.autoEncryptionSettings(AutoEncryptionSettings.builder() // + .kmsProviders(clientEncryptionSettings.getKmsProviders()) // + .keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) // + .bypassQueryAnalysis(true).build()); + } + } + + @Bean + ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) { + MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault"); + MongoCollection keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName()) + .getCollection(keyVaultNamespace.getCollectionName()); + keyVaultCollection.drop(); + // Ensure that two data keys cannot share the same keyAltName. + keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"), + new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames"))); + + mongoClient.getDatabase(getDatabaseName()).getCollection("test").drop(); // Clear old data + + // Create the ClientEncryption instance + return ClientEncryptionSettings.builder() // + .keyVaultMongoClientSettings( + MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) // + .keyVaultNamespace(keyVaultNamespace.getFullName()) // + .kmsProviders(LAZY_KMS_PROVIDERS.get()) // + .build(); + } + } + + static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean { + + static final AtomicReference cache = new AtomicReference<>(); + + CachingMongoClientEncryption(Supplier source) { + super(() -> { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption == null) { + clientEncryption = source.get(); + cache.set(clientEncryption); + } + + return clientEncryption; + }); + } + + @Override + public void destroy() { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption != null) { + clientEncryption.close(); + cache.set(null); + } + } + } + + static class EncryptionKeyHolder { + + Supplier> lazyDataKeyMap; + + public EncryptionKeyHolder(Supplier> lazyDataKeyMap) { + this.lazyDataKeyMap = Lazy.of(lazyDataKeyMap); + } + + BsonBinary getEncryptionKey(String path) { + return lazyDataKeyMap.get().get(path); + } + } + + @org.springframework.data.mongodb.core.mapping.Document("test") + static class Person { + + String id; + + String unencryptedValue; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String name; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + Integer age; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": 0, \"max\": 200, \"trimFactor\": 1, \"sparsity\": 1}") // + Integer encryptedInt; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": {\"$numberLong\": \"1000\"}, \"max\": {\"$numberLong\": \"9999\"}, \"trimFactor\": 1, \"sparsity\": 1}") // + Long encryptedLong; + + NestedWithQEFields nested; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getEncryptedInt() { + return this.encryptedInt; + } + + public void setEncryptedInt(Integer encryptedInt) { + this.encryptedInt = encryptedInt; + } + + public Long getEncryptedLong() { + return this.encryptedLong; + } + + public void setEncryptedLong(Long encryptedLong) { + this.encryptedLong = encryptedLong; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(unencryptedValue, person.unencryptedValue) + && Objects.equals(name, person.name) && Objects.equals(age, person.age) + && Objects.equals(encryptedInt, person.encryptedInt) && Objects.equals(encryptedLong, person.encryptedLong); + } + + @Override + public int hashCode() { + return Objects.hash(id, unencryptedValue, name, age, encryptedInt, encryptedLong); + } + + @Override + public String toString() { + return "Person{" + "id='" + id + '\'' + ", unencryptedValue='" + unencryptedValue + '\'' + ", name='" + name + + '\'' + ", age=" + age + ", encryptedInt=" + encryptedInt + ", encryptedLong=" + encryptedLong + '}'; + } + } + + static class NestedWithQEFields { + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String value; + + @Override + public String toString() { + return "NestedWithQEFields{" + "value='" + value + '\'' + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NestedWithQEFields that = (NestedWithQEFields) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java index 9b756bf01b..ed5ab78cdc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,15 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Collections; +import java.util.Date; import java.util.List; +import java.util.Set; -import org.joda.time.LocalDate; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -32,32 +33,33 @@ import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.GeoResults; -import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; /** * @author Christoph Strobl * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public abstract class AbstractGeoSpatialTests { @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends MongoClientClosingTestConfiguration { @Override protected String getDatabaseName() { @@ -65,8 +67,13 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } @@ -75,7 +82,7 @@ public Mongo mongo() throws Exception { @Before public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); createIndex(); addVenues(); @@ -103,54 +110,51 @@ protected void removeVenues() { } protected void addVenues() { - - template.insert(new Venue("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue("10gen Office", -73.99171, 40.738868)); - template.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); - template.insert(new Venue("Players Club", -73.997812, 40.739128)); - template.insert(new Venue("City Bakery ", -73.992491, 40.738673)); - template.insert(new Venue("Splash Bar", -73.992491, 40.738673)); - template.insert(new Venue("Momofuku Milk Bar", -73.985839, 40.731698)); - template.insert(new Venue("Shake Shack", -73.98820, 40.74164)); - template.insert(new Venue("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue("Empire State Building", -73.98602, 40.74894)); - template.insert(new Venue("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); - template.insert(new Venue("Maplewood, NJ", -74.2713, 40.73137)); + template.bulkOps(BulkMode.UNORDERED, Venue.class).insert(TestEntities.geolocation().newYork()).execute(); } @Test public void geoNear() { - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150); + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150); GeoResults result = template.geoNear(geoNear, Venue.class); - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); } @Test public void withinCenter() { Circle circle = new Circle(-73.99171, 40.738868, 0.01); - List venues = template.find(query(where("location").within(circle)), Venue.class); - assertThat(venues.size(), is(7)); + Query query = query(where("location").within(circle)); + List venues = template.find(query, Venue.class); + + assertThat(venues).hasSize(7); + assertThat(template.count(query, Venue.class)).isEqualTo(7); } @Test public void withinCenterSphere() { Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); - List venues = template.find(query(where("location").withinSphere(circle)), Venue.class); - assertThat(venues.size(), is(11)); + Query query = query(where("location").withinSphere(circle)); + + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(11); + assertThat(template.count(query, Venue.class)).isEqualTo(11); } @Test public void withinBox() { Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); - List venues = template.find(query(where("location").within(box)), Venue.class); - assertThat(venues.size(), is(4)); + Query query = query(where("location").within(box)); + + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(4); + assertThat(template.count(query, Venue.class)).isEqualTo(4); } @Test @@ -163,25 +167,27 @@ public void withinPolygon() { Polygon polygon = new Polygon(first, second, third, fourth); - List venues = template.find(query(where("location").within(polygon)), Venue.class); - assertThat(venues.size(), is(4)); + Query query = query(where("location").within(polygon)); + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(4); + assertThat(template.count(query, Venue.class)).isEqualTo(4); } @Test public void nearSphere() { + Point point = new Point(-73.99171, 40.738868); Query query = query(where("location").nearSphere(point).maxDistance(0.003712240453784)); + List venues = template.find(query, Venue.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); + assertThat(template.count(query, Venue.class)).isEqualTo(11); } - /** - * @see DATAMONGO-1360 - */ - @Test + @Test // DATAMONGO-1360 public void mapsQueryContainedInNearQuery() { - Query query = query(where("openingDate").lt(LocalDate.now())); - template.geoNear(NearQuery.near(1.5, 1.7).query(query), Venue.class); + Query query = query(where("openingDate").lt(new Date())); + template.geoNear(NearQuery.near(1.5, 1.7).spherical(true).query(query), Venue.class); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java index 97b282317d..e65101177c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,14 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.io.IOException; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.geo.Point; import com.fasterxml.jackson.core.JsonParseException; @@ -36,86 +36,64 @@ public class GeoJsonModuleUnitTests { ObjectMapper mapper; - @Before + @BeforeEach public void setUp() { mapper = new ObjectMapper(); mapper.registerModule(new GeoJsonModule()); } - /** - * @see DATAMONGO-1181 - */ - @Test + @Test // DATAMONGO-1181 public void shouldDeserializeJsonPointCorrectly() throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"Point\", \"coordinates\": [10.0, 20.0] }"; - assertThat(mapper.readValue(json, GeoJsonPoint.class), is(new GeoJsonPoint(10D, 20D))); + assertThat(mapper.readValue(json, GeoJsonPoint.class)).isEqualTo(new GeoJsonPoint(10D, 20D)); } - /** - * @see DATAMONGO-1181 - */ - @Test - public void shouldDeserializeGeoJsonLineStringCorrectly() throws JsonParseException, JsonMappingException, - IOException { + @Test // DATAMONGO-1181 + public void shouldDeserializeGeoJsonLineStringCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"LineString\", \"coordinates\": [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]}"; - assertThat(mapper.readValue(json, GeoJsonLineString.class), - is(new GeoJsonLineString(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))))); + assertThat(mapper.readValue(json, GeoJsonLineString.class)) + .isEqualTo(new GeoJsonLineString(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60)))); } - /** - * @see DATAMONGO-1181 - */ - @Test - public void shouldDeserializeGeoJsonMultiPointCorrectly() throws JsonParseException, JsonMappingException, - IOException { + @Test // DATAMONGO-1181 + public void shouldDeserializeGeoJsonMultiPointCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"MultiPoint\", \"coordinates\": [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]}"; - assertThat(mapper.readValue(json, GeoJsonLineString.class), - is(new GeoJsonMultiPoint(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))))); + assertThat(mapper.readValue(json, GeoJsonLineString.class)) + .isEqualTo(new GeoJsonMultiPoint(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60)))); } - /** - * @see DATAMONGO-1181 - */ - @Test + @Test // DATAMONGO-1181 @SuppressWarnings("unchecked") - public void shouldDeserializeGeoJsonMultiLineStringCorrectly() throws JsonParseException, JsonMappingException, - IOException { + public void shouldDeserializeGeoJsonMultiLineStringCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"MultiLineString\", \"coordinates\": [ [ [10.0, 20.0], [30.0, 40.0] ], [ [50.0, 60.0] , [70.0, 80.0] ] ]}"; - assertThat( - mapper.readValue(json, GeoJsonMultiLineString.class), - is(new GeoJsonMultiLineString(Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, - 60), new Point(70, 80))))); + assertThat(mapper.readValue(json, GeoJsonMultiLineString.class)).isEqualTo(new GeoJsonMultiLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, 60), new Point(70, 80)))); } - /** - * @see DATAMONGO-1181 - */ - @Test + @Test // DATAMONGO-1181 public void shouldDeserializeGeoJsonPolygonCorrectly() throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"Polygon\", \"coordinates\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ]}"; - assertThat( - mapper.readValue(json, GeoJsonPolygon.class), - is(new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), - new Point(100, 0))))); + assertThat(mapper.readValue(json, GeoJsonPolygon.class)).isEqualTo(new GeoJsonPolygon( + Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), new Point(100, 0)))); } - /** - * @see DATAMONGO-1181 - */ - @Test - public void shouldDeserializeGeoJsonMultiPolygonCorrectly() throws JsonParseException, JsonMappingException, - IOException { + @Test // DATAMONGO-1181 + public void shouldDeserializeGeoJsonMultiPolygonCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"Polygon\", \"coordinates\": [" + "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]]," @@ -123,15 +101,13 @@ public void shouldDeserializeGeoJsonMultiPolygonCorrectly() throws JsonParseExce + "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"// + "]}"; - assertThat( - mapper.readValue(json, GeoJsonMultiPolygon.class), - is(new GeoJsonMultiPolygon(Arrays.asList( - new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), - new Point(102, 3), new Point(102, 2))), - new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), - new Point(100, 1), new Point(100, 0))), - new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), - new Point(100.2, 0.8), new Point(100.2, 0.2))))))); + assertThat(mapper.readValue(json, GeoJsonMultiPolygon.class)).isEqualTo(new GeoJsonMultiPolygon(Arrays.asList( + new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), new Point(102, 3), + new Point(102, 2))), + new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0))), + new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), + new Point(100.2, 0.8), new Point(100.2, 0.2)))))); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java new file mode 100644 index 0000000000..43ea3945f0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.geo; + +import static org.assertj.core.api.Assertions.*; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.geo.Point; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Unit tests for {@link GeoJsonSerializersModule}. + * + * @author Bjorn Harvold + * @author Christoph Strobl + */ +class GeoJsonSerializersUnitTests { + + private ObjectMapper mapper; + + @BeforeEach + void beforeEach() { + + mapper = new ObjectMapper(); + mapper.registerModule(new GeoJsonSerializersModule()); + } + + @Test // GH-3517 + void shouldSerializeJsonPointCorrectly() throws IOException { + + GeoJsonPoint geoJsonPoint = new GeoJsonPoint(10D, 20D); + + assertThat(mapper.writeValueAsString(geoJsonPoint)).isEqualTo("{\"type\":\"Point\",\"coordinates\":[10.0,20.0]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonLineStringCorrectly() throws IOException { + + GeoJsonLineString lineString = new GeoJsonLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))); + + assertThat(mapper.writeValueAsString(lineString)) + .isEqualTo("{\"type\":\"LineString\",\"coordinates\":[[10.0,20.0],[30.0,40.0],[50.0,60.0]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonMultiPointCorrectly() throws IOException { + + GeoJsonMultiPoint multiPoint = new GeoJsonMultiPoint( + Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))); + + assertThat(mapper.writeValueAsString(multiPoint)) + .isEqualTo("{\"type\":\"MultiPoint\",\"coordinates\":[[10.0,20.0],[30.0,40.0],[50.0,60.0]]}"); + } + + @Test // GH-3517 + void shouldSerializeJsonMultiLineStringCorrectly() throws IOException { + + GeoJsonMultiLineString multiLineString = new GeoJsonMultiLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, 60), new Point(70, 80))); + + assertThat(mapper.writeValueAsString(multiLineString)).isEqualTo( + "{\"type\":\"MultiLineString\",\"coordinates\":[[[10.0,20.0],[30.0,40.0]],[[50.0,60.0],[70.0,80.0]]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonPolygonCorrectly() throws IOException { + + List points = Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0)); + GeoJsonPolygon polygon = new GeoJsonPolygon(points); + + assertThat(mapper.writeValueAsString(polygon)).isEqualTo( + "{\"type\":\"Polygon\",\"coordinates\":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonMultiPolygonCorrectly() throws IOException { + + String json = "{\"type\":\"MultiPolygon\",\"coordinates\":[" + "[" + "[" + + "[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]" + "]" + "]," + "[" + "[" + + "[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]" + "]" + "]," + "[" + "[" + + "[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]" + "]" + "]" + "]" + "}"; + + GeoJsonMultiPolygon multiPolygon = new GeoJsonMultiPolygon(Arrays.asList( + new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), new Point(102, 3), + new Point(102, 2))), + new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0))), + new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), + new Point(100.2, 0.8), new Point(100.2, 0.2))))); + + assertThat(mapper.writeValueAsString(multiPolygon)).isEqualTo(json); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java index 01d9611675..b81b51abd5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,31 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.assertj.core.data.Percentage; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; +import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.geo.GeoResults; -import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; @@ -43,22 +47,29 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.BasicDbListBuilder; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.WriteConcern; +import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; /** * @author Christoph Strobl + * @author Mark Paluch + * @author Ivan Volzhev */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class GeoJsonTests { + static @Client MongoClient mongoClient; + @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -66,49 +77,142 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Set.of(Venue2DSphere.class, VenueWithDistanceField.class, OpenGeoJson.class, + DocumentWithPropertyUsingGeoJsonType.class); } } @Autowired MongoTemplate template; - @Before + @BeforeEach public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + // template.setWriteConcern(WriteConcern.JOURNALED); + + // createIndex(); + // addVenues(); + } + + private void createIndexAndAddVenues() { createIndex(); addVenues(); } - @After + @AfterEach public void tearDown() { dropIndex(); removeCollections(); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135, DATAMONGO-2264 public void geoNear() { - NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).num(10).maxDistance(150); + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + } + + @Test // DATAMONGO-2264 + public void geoNearShouldNotOverridePropertyWithDefaultNameForCalculatedDistance/* namely "dis" */() { + + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + GeoResults result = template.geoNear(geoNear, VenueWithDistanceField.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + result.getContent().forEach(it -> { + + assertThat(it.getDistance().getValue()).isNotZero(); + assertThat(it.getContent().getDis()).isNull(); + }); + } + + @Test // DATAMONGO-2264 + public void geoNearShouldAllowToReadBackCalculatedDistanceIntoTargetTypeProperty/* namely "dis" */() { + + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class, + template.getCollectionName(Venue2DSphere.class), VenueWithDistanceField.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + result.getContent().forEach(it -> { + + assertThat(it.getDistance().getValue()).isNotZero(); + assertThat(it.getContent().getDis()).isEqualTo(it.getDistance().getValue()); + }); + } + + @Test // DATAMONGO-1148 + public void geoNearShouldReturnDistanceCorrectlyUsingGeoJson/*which is using the meters*/() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73.99171, 40.738868), Metrics.KILOMETERS).limit(10) + .maxDistance(0.4); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).hasSize(3); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getContent().get(0).getDistance().getValue()).isCloseTo(0.0, offset(0.000001)); + assertThat(result.getContent().get(1).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + assertThat(result.getContent().get(2).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + } + + @Test // DATAMONGO-1348 + public void geoNearShouldReturnDistanceCorrectly/*which is using the meters*/() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new Point(-73.99171, 40.738868), Metrics.KILOMETERS).limit(10).maxDistance(0.4); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).hasSize(3); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getContent().get(0).getDistance().getValue()).isCloseTo(0.0, offset(0.000001)); + assertThat(result.getContent().get(1).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + assertThat(result.getContent().get(2).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + } + + @Test // DATAMONGO-1135 + public void geoNearWithMiles() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.MILES).limit(10).maxDistance(93.2057); GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.MILES); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void withinPolygon() { + createIndexAndAddVenues(); + Point first = new Point(-73.99756, 40.73083); Point second = new Point(-73.99756, 40.741404); Point third = new Point(-73.988135, 40.741404); @@ -117,41 +221,36 @@ public void withinPolygon() { GeoJsonPolygon polygon = new GeoJsonPolygon(first, second, third, fourth, first); List venues = template.find(query(where("location").within(polygon)), Venue2DSphere.class); - assertThat(venues.size(), is(4)); + assertThat(venues).hasSize(4); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void nearPoint() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").near(point).maxDistance(0.01)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(1)); + assertThat(venues).hasSize(1); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void nearSphere() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").nearSphere(point).maxDistance(0.003712240453784)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(1)); + assertThat(venues).hasSize(1); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonPoint"; @@ -162,14 +261,11 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonPoint, equalTo(obj.geoJsonPoint)); + assertThat(result.geoJsonPoint).isEqualTo(obj.geoJsonPoint); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonPolygon"; @@ -181,14 +277,11 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonPolygon, equalTo(obj.geoJsonPolygon)); + assertThat(result.geoJsonPolygon).isEqualTo(obj.geoJsonPolygon); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonLineString"; @@ -199,33 +292,28 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonLineString, equalTo(obj.geoJsonLineString)); + assertThat(result.geoJsonLineString).isEqualTo(obj.geoJsonLineString); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiLineStringTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiLineString"; - obj.geoJsonMultiLineString = new GeoJsonMultiLineString(Arrays.asList(new GeoJsonLineString(new Point(0, 0), - new Point(0, 1), new Point(1, 1)), new GeoJsonLineString(new Point(199, 0), new Point(2, 3)))); + obj.geoJsonMultiLineString = new GeoJsonMultiLineString( + Arrays.asList(new GeoJsonLineString(new Point(0, 0), new Point(0, 1), new Point(1, 1)), + new GeoJsonLineString(new Point(199, 0), new Point(2, 3)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiLineString, equalTo(obj.geoJsonMultiLineString)); + assertThat(result.geoJsonMultiLineString).isEqualTo(obj.geoJsonMultiLineString); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiPoint"; @@ -236,107 +324,196 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiPoint, equalTo(obj.geoJsonMultiPoint)); + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); + } + + @Test // DATAMONGO-3776 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeWithOnePointCorrectly() { + + DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); + obj.id = "geoJsonMultiPoint"; + obj.geoJsonMultiPoint = new GeoJsonMultiPoint(new Point(0, 0)); + + template.save(obj); + + DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), + DocumentWithPropertyUsingGeoJsonType.class); + + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiPolygon"; - obj.geoJsonMultiPolygon = new GeoJsonMultiPolygon(Arrays.asList(new GeoJsonPolygon(new Point(0, 0), - new Point(0, 1), new Point(1, 1), new Point(0, 0)))); + obj.geoJsonMultiPolygon = new GeoJsonMultiPolygon( + Arrays.asList(new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(0, 0)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiPolygon, equalTo(obj.geoJsonMultiPolygon)); + assertThat(result.geoJsonMultiPolygon).isEqualTo(obj.geoJsonMultiPolygon); } - /** - * @see DATAMONGO-1137 - */ - @Test - public void shouleSaveAndRetrieveDocumentWithGeoJsonGeometryCollectionTypeCorrectly() { + @Test // DATAMONGO-1137 + public void shouldSaveAndRetrieveDocumentWithGeoJsonGeometryCollectionTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonGeometryCollection"; - obj.geoJsonGeometryCollection = new GeoJsonGeometryCollection(Arrays.> asList( - new GeoJsonPoint(100, 200), new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(1, - 0), new Point(0, 0)))); + obj.geoJsonGeometryCollection = new GeoJsonGeometryCollection(Arrays.> asList(new GeoJsonPoint(100, 200), + new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(1, 0), new Point(0, 0)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonGeometryCollection, equalTo(obj.geoJsonGeometryCollection)); + assertThat(result.geoJsonGeometryCollection).isEqualTo(obj.geoJsonGeometryCollection); } - /** - * @see DATAMONGO-1110 - */ - @Test + @Test // DATAMONGO-1110 public void nearWithMinDistance() { + createIndexAndAddVenues(); + Point point = new GeoJsonPoint(-73.99171, 40.738868); List venues = template.find(query(where("location").near(point).minDistance(0.01)), Venue2DSphere.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); } - /** - * @see DATAMONGO-1110 - */ - @Test + @Test // DATAMONGO-1110 public void nearSphereWithMinDistance() { + createIndexAndAddVenues(); + Point point = new GeoJsonPoint(-73.99171, 40.738868); List venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue2DSphere.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); } - /** - * @see DATAMONGO-1135 - */ - @Test + @Test // DATAMONGO-1135 public void nearWithMinAndMaxDistance() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").near(point).minDistance(0.01).maxDistance(100)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(2)); + assertThat(venues).hasSize(2); + } + + @Test // DATAMONGO-1453 + public void shouldConvertPointRepresentationCorrectlyWhenSourceCoordinatesUsesInteger() { + + this.template.execute(template.getCollectionName(DocumentWithPropertyUsingGeoJsonType.class), + new CollectionCallback() { + + @Override + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + org.bson.Document pointRepresentation = new org.bson.Document(); + pointRepresentation.put("type", "Point"); + pointRepresentation.put("coordinates", new BasicDbListBuilder().add(0).add(0).get()); + + org.bson.Document document = new org.bson.Document(); + document.append("_id", "datamongo-1453"); + document.append("geoJsonPoint", pointRepresentation); + + collection.insertOne(document); + + return document; + } + }); + + assertThat(template.findOne(query(where("id").is("datamongo-1453")), + DocumentWithPropertyUsingGeoJsonType.class).geoJsonPoint).isEqualTo(new GeoJsonPoint(0D, 0D)); + } + + @Test // DATAMONGO-1453 + public void shouldConvertLineStringRepresentationCorrectlyWhenSourceCoordinatesUsesInteger() { + + this.template.execute(template.getCollectionName(DocumentWithPropertyUsingGeoJsonType.class), + new CollectionCallback() { + + @Override + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + org.bson.Document lineStringRepresentation = new org.bson.Document(); + lineStringRepresentation.put("type", "LineString"); + lineStringRepresentation.put("coordinates", + new BasicDbListBuilder().add(new BasicDbListBuilder().add(0).add(0).get()) + .add(new BasicDbListBuilder().add(1).add(1).get()).get()); + + org.bson.Document document = new org.bson.Document(); + document.append("_id", "datamongo-1453"); + document.append("geoJsonLineString", lineStringRepresentation); + + collection.insertOne(document); + + return document; + } + }); + + assertThat(template.findOne(query(where("id").is("datamongo-1453")), + DocumentWithPropertyUsingGeoJsonType.class).geoJsonLineString) + .isEqualTo(new GeoJsonLineString(new Point(0D, 0D), new Point(1, 1))); + } + + @Test // DATAMONGO-1466 + public void readGeoJsonBasedOnEmbeddedTypeInformation() { + + Point first = new Point(-73.99756, 40.73083); + Point second = new Point(-73.99756, 40.741404); + Point third = new Point(-73.988135, 40.741404); + Point fourth = new Point(-73.988135, 40.73083); + + GeoJsonPolygon polygon = new GeoJsonPolygon(first, second, third, fourth, first); + + ConcreteGeoJson source = new ConcreteGeoJson(); + source.shape = polygon; + source.id = "id-1"; + + template.save(source); + + OpenGeoJson target = template.findOne(query(where("id").is(source.id)), OpenGeoJson.class); + + assertThat(target.shape).isEqualTo(source.shape); } private void addVenues() { - template.insert(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue2DSphere("10gen Office", -73.99171, 40.738868)); - template.insert(new Venue2DSphere("Flatiron Building", -73.988135, 40.741404)); - template.insert(new Venue2DSphere("Players Club", -73.997812, 40.739128)); - template.insert(new Venue2DSphere("City Bakery ", -73.992491, 40.738673)); - template.insert(new Venue2DSphere("Splash Bar", -73.992491, 40.738673)); - template.insert(new Venue2DSphere("Momofuku Milk Bar", -73.985839, 40.731698)); - template.insert(new Venue2DSphere("Shake Shack", -73.98820, 40.74164)); - template.insert(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue2DSphere("Empire State Building", -73.98602, 40.74894)); - template.insert(new Venue2DSphere("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); - template.insert(new Venue2DSphere("Maplewood, NJ", -74.2713, 40.73137)); + List venues = new ArrayList<>(); + + venues.add(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue2DSphere("10gen Office", -73.99171, 40.738868)); + venues.add(new Venue2DSphere("Flatiron Building", -73.988135, 40.741404)); + venues.add(new Venue2DSphere("Players Club", -73.997812, 40.739128)); + venues.add(new Venue2DSphere("City Bakery ", -73.992491, 40.738673)); + venues.add(new Venue2DSphere("Splash Bar", -73.992491, 40.738673)); + venues.add(new Venue2DSphere("Momofuku Milk Bar", -73.985839, 40.731698)); + venues.add(new Venue2DSphere("Shake Shack", -73.98820, 40.74164)); + venues.add(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue2DSphere("Empire State Building", -73.98602, 40.74894)); + venues.add(new Venue2DSphere("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); + venues.add(new Venue2DSphere("Maplewood, NJ", -74.2713, 40.73137)); + + template.bulkOps(BulkMode.UNORDERED, Venue2DSphere.class).insert(venues).execute(); } protected void createIndex() { dropIndex(); - template.indexOps(Venue2DSphere.class).ensureIndex( - new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); + template.indexOps(Venue2DSphere.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); } protected void dropIndex() { @@ -352,7 +529,7 @@ protected void removeCollections() { template.dropCollection(DocumentWithPropertyUsingGeoJsonType.class); } - @Document(collection = "venue2dsphere") + @org.springframework.data.mongodb.core.mapping.Document(collection = "venue2dsphere") static class Venue2DSphere { @Id private String id; @@ -384,6 +561,23 @@ public String toString() { } } + static class VenueWithDistanceField extends Venue2DSphere { + + private Double dis; // geoNear command default distance field name + + public VenueWithDistanceField(String name, double[] location) { + super(name, location); + } + + public Double getDis() { + return dis; + } + + public void setDis(Double dis) { + this.dis = dis; + } + } + static class DocumentWithPropertyUsingGeoJsonType { String id; @@ -396,4 +590,58 @@ static class DocumentWithPropertyUsingGeoJsonType { GeoJsonGeometryCollection geoJsonGeometryCollection; } + @Document("geo-json-shapes") + static class ConcreteGeoJson { + + String id; + GeoJsonPolygon shape; + + public String getId() { + return this.id; + } + + public GeoJsonPolygon getShape() { + return this.shape; + } + + public void setId(String id) { + this.id = id; + } + + public void setShape(GeoJsonPolygon shape) { + this.shape = shape; + } + + public String toString() { + return "GeoJsonTests.ConcreteGeoJson(id=" + this.getId() + ", shape=" + this.getShape() + ")"; + } + } + + @Document("geo-json-shapes") + static class OpenGeoJson { + + String id; + GeoJson shape; + + public String getId() { + return this.id; + } + + public GeoJson getShape() { + return this.shape; + } + + public void setId(String id) { + this.id = id; + } + + public void setShape(GeoJson shape) { + this.shape = shape; + } + + public String toString() { + return "GeoJsonTests.OpenGeoJson(id=" + this.getId() + ", shape=" + this.getShape() + ")"; + } + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java index a51bd46b97..3a9140d34c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java @@ -1,98 +1,101 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.data.mongodb.core.geo; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.core.query.Query.*; - -import java.util.List; - -import org.junit.Test; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.geo.GeoResults; -import org.springframework.data.geo.Metric; -import org.springframework.data.geo.Metrics; -import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.IndexOperations; -import org.springframework.data.mongodb.core.Venue; -import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; -import org.springframework.data.mongodb.core.index.GeospatialIndex; -import org.springframework.data.mongodb.core.index.IndexField; -import org.springframework.data.mongodb.core.index.IndexInfo; -import org.springframework.data.mongodb.core.query.NearQuery; - -/** - * @author Christoph Strobl - */ -public class GeoSpatial2DSphereTests extends AbstractGeoSpatialTests { - - /** - * @see DATAMONGO-360 - */ - @Test - public void indexInfoIsCorrect() { - - IndexOperations operations = template.indexOps(Venue.class); - List indexInfo = operations.getIndexInfo(); - - assertThat(indexInfo.size(), is(2)); - - List fields = indexInfo.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); - - fields = indexInfo.get(1).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.geo("location"))); - } - - /** - * @see DATAMONGO-1110 - */ - @Test - public void geoNearWithMinDistance() { - - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).minDistance(1); - - GeoResults result = template.geoNear(geoNear, Venue.class); - - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); - } - - /** - * @see DATAMONGO-1110 - */ - @Test - public void nearSphereWithMinDistance() { - Point point = new Point(-73.99171, 40.738868); - List venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue.class); - assertThat(venues.size(), is(1)); - } - - @Override - protected void createIndex() { - template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); - } - - @Override - protected void dropIndex() { - template.indexOps(Venue.class).dropIndex("location_2dsphere"); - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.geo; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.List; + +import org.junit.Test; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.geo.Metric; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.Venue; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * @author Christoph Strobl + */ +public class GeoSpatial2DSphereTests extends AbstractGeoSpatialTests { + + @Test // DATAMONGO-360 + public void indexInfoIsCorrect() { + + IndexOperations operations = template.indexOps(Venue.class); + List indexInfo = operations.getIndexInfo(); + + assertThat(indexInfo.size()).isEqualTo(2); + + List fields = indexInfo.get(0).getIndexFields(); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.create("_id", Direction.ASC)); + + fields = indexInfo.get(1).getIndexFields(); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.geo("location")); + } + + @Test // DATAMONGO-1110 + public void geoNearWithMinDistance() { + + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).minDistance(1); + + GeoResults result = template.geoNear(geoNear, Venue.class); + + assertThat(result.getContent().size()).isNotEqualTo(0); + assertThat(result.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + } + + @Test // DATAMONGO-1110 + public void nearSphereWithMinDistance() { + + Point point = new Point(-73.99171, 40.738868); + Query query = query(where("location").nearSphere(point).minDistance(0.01)); + + List venues = template.find(query, Venue.class); + assertThat(venues.size()).isEqualTo(1); + } + + @Test + public void countNearSphereWithMinDistance() { + + Point point = new Point(-73.99171, 40.738868); + Query query = query(where("location").nearSphere(point).minDistance(0.01)); + + assertThat(template.count(query, Venue.class)).isEqualTo(1); + } + + @Override + protected void createIndex() { + template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); + } + + @Override + protected void dropIndex() { + template.indexOps(Venue.class).dropIndex("location_2dsphere"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java index e9248f63b0..15c3a89dab 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java @@ -1,82 +1,84 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.data.mongodb.core.geo; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.core.query.Query.*; - -import java.util.List; - -import org.junit.Test; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.IndexOperations; -import org.springframework.data.mongodb.core.Venue; -import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; -import org.springframework.data.mongodb.core.index.GeospatialIndex; -import org.springframework.data.mongodb.core.index.IndexField; -import org.springframework.data.mongodb.core.index.IndexInfo; - -/** - * Modified from https://github.com/deftlabs/mongo-java-geospatial-example - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class GeoSpatial2DTests extends AbstractGeoSpatialTests { - - @Test - public void nearPoint() { - Point point = new Point(-73.99171, 40.738868); - List venues = template.find(query(where("location").near(point).maxDistance(0.01)), Venue.class); - assertThat(venues.size(), is(7)); - } - - /** - * @see DATAMONGO-360 - */ - @Test - public void indexInfoIsCorrect() { - - IndexOperations operations = template.indexOps(Venue.class); - List indexInfo = operations.getIndexInfo(); - - assertThat(indexInfo.size(), is(2)); - - List fields = indexInfo.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); - - fields = indexInfo.get(1).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.geo("location"))); - } - - @Override - protected void createIndex() { - template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D)); - } - - @Override - protected void dropIndex() { - template.indexOps(Venue.class).dropIndex("location_2d"); - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.geo; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.List; + +import org.junit.Test; + +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.Venue; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Modified from https://github.com/deftlabs/mongo-java-geospatial-example + * + * @author Mark Pollack + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + */ +public class GeoSpatial2DTests extends AbstractGeoSpatialTests { + + @Test + public void nearPoint() { + + Point point = new Point(-73.99171, 40.738868); + Query query = query(where("location").near(point).maxDistance(0.01)); + + List venues = template.find(query, Venue.class); + assertThat(venues.size()).isEqualTo(7); + assertThat(template.count(query, Venue.class)).isEqualTo(7); + } + + @Test // DATAMONGO-360 + public void indexInfoIsCorrect() { + + IndexOperations operations = template.indexOps(Venue.class); + List indexInfo = operations.getIndexInfo(); + + assertThat(indexInfo.size()).isEqualTo(2); + + List fields = indexInfo.get(0).getIndexFields(); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.create("_id", Direction.ASC)); + + fields = indexInfo.get(1).getIndexFields(); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.geo("location")); + } + + @Override + protected void createIndex() { + template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D)); + } + + @Override + protected void dropIndex() { + template.indexOps(Venue.class).dropIndex("location_2d"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java index 1c2f0a6832..37ae08dff4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,38 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.assertj.core.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.config.AbstractIntegrationTests; import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.IndexOperations; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.WriteResultChecking; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.util.MongoClientVersion; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoCollection; /** * Integration tests for geo-spatial indexing. - * + * * @author Laurent Canet * @author Oliver Gierke * @author Thomas Darimont @@ -53,59 +56,50 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests { @Autowired private MongoTemplate template; - @Before + @BeforeEach public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); } - /** - * @see DATAMONGO-778 - */ - @Test + @Test // DATAMONGO-778 public void test2dIndex() { try { template.save(new GeoSpatialEntity2D(45.2, 4.6)); - assertThat(hasIndexOfType(GeoSpatialEntity2D.class, "2d"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntity2D.class, "2d")).isTrue(); } finally { template.dropCollection(GeoSpatialEntity2D.class); } } - /** - * @see DATAMONGO-778 - */ - @Test + @Test // DATAMONGO-778 public void test2dSphereIndex() { try { template.save(new GeoSpatialEntity2DSphere(45.2, 4.6)); - assertThat(hasIndexOfType(GeoSpatialEntity2DSphere.class, "2dsphere"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntity2DSphere.class, "2dsphere")).isTrue(); } finally { template.dropCollection(GeoSpatialEntity2DSphere.class); } } - /** - * @see DATAMONGO-778 - */ - @Test + @Test // DATAMONGO-778 + @EnableIfMongoServerVersion(isLessThan = "5.0") public void testHaystackIndex() { + Assumptions.assumeThat(MongoClientVersion.isVersion5orNewer()).isFalse(); + try { template.save(new GeoSpatialEntityHaystack(45.2, 4.6, "Paris")); - assertThat(hasIndexOfType(GeoSpatialEntityHaystack.class, "geoHaystack"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntityHaystack.class, "geoHaystack")).isTrue(); } finally { template.dropCollection(GeoSpatialEntityHaystack.class); } } - /** - * @see DATAMONGO-827 - */ - @Test + @Test // DATAMONGO-827 public void useGeneratedNameShouldGenerateAnIndexName() { try { @@ -116,9 +110,9 @@ public void useGeneratedNameShouldGenerateAnIndexName() { IndexOperations indexOps = template.indexOps(GeoSpatialEntity2dWithGeneratedIndex.class); List indexInfo = indexOps.getIndexInfo(); - assertThat(indexInfo, hasSize(2)); - assertThat(indexInfo.get(1), is(notNullValue())); - assertThat(indexInfo.get(1).getName(), is("location_2d")); + assertThat(indexInfo).hasSize(2); + assertThat(indexInfo.get(1)).isNotNull(); + assertThat(indexInfo.get(1).getName()).isEqualTo("location_2d"); } finally { template.dropCollection(GeoSpatialEntity2D.class); @@ -127,9 +121,9 @@ public void useGeneratedNameShouldGenerateAnIndexName() { /** * Returns whether an index with the given name exists for the given entity type. - * - * @param indexName + * * @param entityType + * @param type * @return */ private boolean hasIndexOfType(Class entityType, final String type) { @@ -137,12 +131,16 @@ private boolean hasIndexOfType(Class entityType, final String type) { return template.execute(entityType, new CollectionCallback() { @SuppressWarnings("unchecked") - public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { + public Boolean doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + List indexes = new ArrayList(); + collection.listIndexes(org.bson.Document.class).into(indexes); - for (DBObject indexInfo : collection.getIndexInfo()) { + for (org.bson.Document indexInfo : indexes) { - DBObject keys = (DBObject) indexInfo.get("key"); - Map keysMap = keys.toMap(); + org.bson.Document keys = (org.bson.Document) indexInfo.get("key"); + Map keysMap = keys; for (String key : keysMap.keySet()) { Object indexType = keys.get(key); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java index a92cf8969a..6f505289e6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,18 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.query.Order; /** * Unit tests for {@link IndexField}. - * + * * @author Oliver Gierke + * @author Christoph Strobl */ -@SuppressWarnings("deprecation") public class IndexFieldUnitTests { @Test @@ -35,10 +34,9 @@ public void createsPlainIndexFieldCorrectly() { IndexField field = IndexField.create("foo", Direction.ASC); - assertThat(field.getKey(), is("foo")); - assertThat(field.getDirection(), is(Direction.ASC)); - assertThat(field.getOrder(), is(Order.ASCENDING)); - assertThat(field.isGeo(), is(false)); + assertThat(field.getKey()).isEqualTo("foo"); + assertThat(field.getDirection()).isEqualTo(Direction.ASC); + assertThat(field.isGeo()).isFalse(); } @Test @@ -46,9 +44,9 @@ public void createsGeoIndexFieldCorrectly() { IndexField field = IndexField.geo("foo"); - assertThat(field.getKey(), is("foo")); - assertThat(field.getDirection(), is(nullValue())); - assertThat(field.isGeo(), is(true)); + assertThat(field.getKey()).isEqualTo("foo"); + assertThat(field.getDirection()).isNull(); + assertThat(field.isGeo()).isTrue(); } @Test @@ -57,8 +55,8 @@ public void correctEqualsForPlainFields() { IndexField first = IndexField.create("foo", Direction.ASC); IndexField second = IndexField.create("foo", Direction.ASC); - assertThat(first, is(second)); - assertThat(second, is(first)); + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); } @Test @@ -67,7 +65,22 @@ public void correctEqualsForGeoFields() { IndexField first = IndexField.geo("bar"); IndexField second = IndexField.geo("bar"); - assertThat(first, is(second)); - assertThat(second, is(first)); + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); + } + + @Test // DATAMONGO-1183 + public void correctTypeForHashedFields() { + assertThat(IndexField.hashed("key").isHashed()).isTrue(); + } + + @Test // DATAMONGO-1183 + public void correctEqualsForHashedFields() { + + IndexField first = IndexField.hashed("bar"); + IndexField second = IndexField.hashed("bar"); + + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java index cf4bdb8286..12fc967c83 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,108 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.time.Duration; import java.util.Arrays; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Sort.Direction; /** * Unit tests for {@link IndexInfo}. - * + * * @author Oliver Gierke + * @author Christoph Strobl + * @author Stefan Tirea */ -public class IndexInfoUnitTests { +class IndexInfoUnitTests { + + static final String ID_INDEX = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"_id_\", \"ns\" : \"db.collection\" }"; + static final String INDEX_WITH_PARTIAL_FILTER = "{ \"v\" : 2, \"key\" : { \"k3y\" : 1 }, \"name\" : \"partial-filter-index\", \"ns\" : \"db.collection\", \"partialFilterExpression\" : { \"quantity\" : { \"$gte\" : 10 } } }"; + static final String INDEX_WITH_EXPIRATION_TIME = "{ \"v\" : 2, \"key\" : { \"lastModifiedDate\" : 1 },\"name\" : \"expire-after-last-modified\", \"ns\" : \"db.collectio\", \"expireAfterSeconds\" : 3600 }"; + static final String HASHED_INDEX = "{ \"v\" : 2, \"key\" : { \"score\" : \"hashed\" }, \"name\" : \"score_hashed\", \"ns\" : \"db.collection\" }"; + static final String WILDCARD_INDEX = "{ \"v\" : 2, \"key\" : { \"$**\" : 1 }, \"name\" : \"$**_1\", \"wildcardProjection\" : { \"fieldA\" : 0, \"fieldB.fieldC\" : 0 } }"; + static final String INDEX_WITH_COLLATION = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"projectName\", \"collation\": { \"locale\": \"en_US\", \"strength\": 2 } }"; + static final String HIDDEN_INDEX = """ + { + "v" : 2, + "key" : { + "borough" : 1 + }, + "name" : "borough_1", + "hidden" : true + } + """; @Test - public void isIndexForFieldsCorrectly() { + void isIndexForFieldsCorrectly() { IndexField fooField = IndexField.create("foo", Direction.ASC); IndexField barField = IndexField.create("bar", Direction.DESC); - IndexInfo info = new IndexInfo(Arrays.asList(fooField, barField), "myIndex", false, false, false); - assertThat(info.isIndexForFields(Arrays.asList("foo", "bar")), is(true)); + IndexInfo info = new IndexInfo(Arrays.asList(fooField, barField), "myIndex", false, false, ""); + assertThat(info.isIndexForFields(Arrays.asList("foo", "bar"))).isTrue(); + } + + @Test // DATAMONGO-2170 + void partialFilterExpressionShouldBeNullIfNotSetInSource() { + assertThat(getIndexInfo(ID_INDEX).getPartialFilterExpression()).isNull(); + } + + @Test // DATAMONGO-2170 + void partialFilterExpressionShouldMatchSource() { + + assertThat(Document.parse(getIndexInfo(INDEX_WITH_PARTIAL_FILTER).getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"quantity\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-2081 + void expireAfterIsParsedCorrectly() { + assertThat(getIndexInfo(INDEX_WITH_EXPIRATION_TIME).getExpireAfter()).contains(Duration.ofHours(1)); + } + + @Test // DATAMONGO-2081 + void expireAfterIsEmptyIfNotSet() { + assertThat(getIndexInfo(ID_INDEX).getExpireAfter()).isEmpty(); + } + + @Test // DATAMONGO-1183 + void readsHashedIndexCorrectly() { + assertThat(getIndexInfo(HASHED_INDEX).getIndexFields()).containsExactly(IndexField.hashed("score")); + } + + @Test // DATAMONGO-1183 + public void hashedIndexIsMarkedAsSuch() { + assertThat(getIndexInfo(HASHED_INDEX).isHashed()).isTrue(); + } + + @Test // GH-3225 + void identifiesWildcardIndexCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).isWildcard()).isTrue(); + } + + @Test // GH-3225 + void readsWildcardIndexProjectionCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).getWildcardProjection()) + .contains(new Document("fieldA", 0).append("fieldB.fieldC", 0)); + } + + @Test // GH-3002 + void collationParsedCorrectly() { + assertThat(getIndexInfo(INDEX_WITH_COLLATION).getCollation()) + .contains(Document.parse("{ \"locale\": \"en_US\", \"strength\": 2 }")); + } + + @Test // GH-4348 + void hiddenInfoSetCorrectly() { + + assertThat(getIndexInfo(ID_INDEX).isHidden()).isFalse(); + assertThat(getIndexInfo(HIDDEN_INDEX).isHidden()).isTrue(); + } + + private static IndexInfo getIndexInfo(String documentJson) { + return IndexInfo.indexInfoOf(Document.parse(documentJson)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java new file mode 100644 index 0000000000..3a193f0133 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.time.Duration; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.index.IndexOptions.Unique; + +/** + * @author Christoph Strobl + */ +class IndexOptionsUnitTests { + + @Test // GH-4851 + void noneIsEmpty() { + + IndexOptions options = IndexOptions.none(); + + assertThat(options.getExpire()).isNull(); + assertThat(options.getUnique()).isNull(); + assertThat(options.isHidden()).isNull(); + assertThat(options.toDocument()).isEqualTo(new Document()); + } + + @Test // GH-4851 + void uniqueSetsFlag() { + + IndexOptions options = IndexOptions.unique(); + + assertThat(options.getUnique()).isEqualTo(Unique.YES); + assertThat(options.toDocument()).containsEntry("unique", true); + + options.setUnique(Unique.NO); + assertThat(options.toDocument()).containsEntry("unique", false); + + options.setUnique(Unique.PREPARE); + assertThat(options.toDocument()).containsEntry("prepareUnique", true); + } + + @Test // GH-4851 + void hiddenSetsFlag() { + + IndexOptions options = IndexOptions.hidden(); + + assertThat(options.isHidden()).isTrue(); + assertThat(options.toDocument()).containsEntry("hidden", true); + } + + @Test // GH-4851 + void expireAfterSetsExpiration() { + + Duration duration = Duration.ofMinutes(2); + IndexOptions options = IndexOptions.expireAfter(duration); + + assertThat(options.getExpire()).isEqualTo(duration); + assertThat(options.toDocument()).containsEntry("expireAfterSeconds", duration.toSeconds()); + } + + @Test // GH-4851 + void expireAfterForZeroAndNegativeDuration() { + + assertThat(IndexOptions.expireAfter(Duration.ZERO).toDocument()).containsEntry("expireAfterSeconds", 0L); + assertThat(IndexOptions.expireAfter(Duration.ofSeconds(-1)).toDocument()).isEmpty(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java index 2b748fc8d9..caa40e96c0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,75 +15,175 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; -import org.junit.After; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.CollectionCallback; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; /** * Integration tests for index handling. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Jordi Llach + * @author Mark Paluch + * @author Ben Foster */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration public class IndexingIntegrationTests { + static @Client MongoClient mongoClient; + @Autowired MongoOperations operations; - @Autowired MongoDbFactory mongoDbFactory; + @Autowired MongoDatabaseFactory mongoDbFactory; @Autowired ConfigurableApplicationContext context; - @After + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Bean + TimeoutResolver myTimeoutResolver() { + return new TimeoutResolver("11s"); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } + } + + @AfterEach public void tearDown() { operations.dropCollection(IndexedPerson.class); + operations.dropCollection(TimeSeriesWithSpelIndexTimeout.class); } - /** - * @see DATAMONGO-237 - */ - @Test + @Test // DATAMONGO-237 @DirtiesContext public void createsIndexWithFieldName() { operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); - assertThat(hasIndex("_firstname", IndexedPerson.class), is(true)); + assertThat(hasIndex("_firstname", IndexedPerson.class)).isTrue(); } - /** - * @see DATAMONGO-1163 - */ - @Test + @Test // DATAMONGO-2188 + @DirtiesContext + public void shouldNotCreateIndexOnIndexingDisabled() { + + MongoMappingContext context = new MongoMappingContext(); + context.setAutoIndexCreation(false); + + MongoTemplate template = new MongoTemplate(mongoDbFactory, + new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context)); + + template.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); + + assertThat(hasIndex("_firstname", MongoCollectionUtils.getPreferredCollectionName(IndexedPerson.class))).isFalse(); + } + + @Test // DATAMONGO-1163 @DirtiesContext public void createsIndexFromMetaAnnotation() { operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); - assertThat(hasIndex("_lastname", IndexedPerson.class), is(true)); + assertThat(hasIndex("_lastname", IndexedPerson.class)).isTrue(); + } + + @Test // DATAMONGO-2112 + @DirtiesContext + public void evaluatesTimeoutSpelExpresssionWithBeanReference() { + + operations.getConverter().getMappingContext().getPersistentEntity(WithSpelIndexTimeout.class); + + Optional indexInfo = operations.execute("withSpelIndexTimeout", collection -> { + + return collection.listIndexes(org.bson.Document.class).into(new ArrayList<>()) // + .stream() // + .filter(it -> it.get("name").equals("someString")) // + .findFirst(); + }); + + assertThat(indexInfo).isPresent(); + assertThat(indexInfo.get()).hasEntrySatisfying("expireAfterSeconds", timeout -> { + + // MongoDB 5 returns int not long + assertThat(timeout).isIn(11, 11L); + }); + } + + @Test // GH-4099 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + @DirtiesContext + public void evaluatesTimeSeriesTimeoutSpelExpresssionWithBeanReference() { + + operations.createCollection(TimeSeriesWithSpelIndexTimeout.class); + + final Optional collectionInfo = operations.execute(db -> { + return db.listCollections().into(new ArrayList<>()) + .stream() + .filter(c -> "timeSeriesWithSpelIndexTimeout".equals(c.get("name"))) + .findFirst(); + }); + + assertThat(collectionInfo).isPresent(); + assertThat(collectionInfo.get()).hasEntrySatisfying("options", options -> { + final org.bson.Document optionsDoc = (org.bson.Document) options; + // MongoDB 5 returns int not long + assertThat(optionsDoc.get("expireAfterSeconds")).isIn(11, 11L); + }); } @Target({ ElementType.FIELD }) @@ -99,24 +199,59 @@ class IndexedPerson { @Field("_lastname") @IndexedFieldAnnotation String lastname; } + static class TimeoutResolver { + final String timeout; + + public TimeoutResolver(String timeout) { + this.timeout = timeout; + } + + public String getTimeout() { + return this.timeout; + } + } + + @Document + class WithSpelIndexTimeout { + @Indexed(expireAfter = "#{@myTimeoutResolver?.timeout}") String someString; + } + + @TimeSeries(expireAfter = "#{@myTimeoutResolver?.timeout}", timeField = "timestamp") + class TimeSeriesWithSpelIndexTimeout { + Instant timestamp; + } + /** * Returns whether an index with the given name exists for the given entity type. - * + * * @param indexName * @param entityType * @return */ - private boolean hasIndex(final String indexName, Class entityType) { - - return operations.execute(entityType, new CollectionCallback() { - public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { - for (DBObject indexInfo : collection.getIndexInfo()) { - if (indexName.equals(indexInfo.get("name"))) { - return true; - } + private boolean hasIndex(String indexName, Class entityType) { + return hasIndex(indexName, operations.getCollectionName(entityType)); + } + + /** + * Returns whether an index with the given name exists for the given collection. + * + * @param indexName + * @param collectionName + * @return + */ + private boolean hasIndex(String indexName, String collectionName) { + + return operations.execute(collectionName, collection -> { + + List indexes = new ArrayList<>(); + collection.listIndexes(org.bson.Document.class).into(indexes); + + for (org.bson.Document indexInfo : indexes) { + if (indexName.equals(indexInfo.get("name"))) { + return true; } - return false; } + return false; }); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java index 529828fe41..a506decf67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2015 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,11 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import java.net.UnknownHostException; import java.util.Arrays; import java.util.List; -import org.hamcrest.Matchers; import org.hamcrest.core.IsInstanceOf; import org.junit.ClassRule; import org.junit.Rule; @@ -30,41 +27,43 @@ import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.test.util.CleanMongoDB; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.data.mongodb.test.util.MongoVersionRule; import org.springframework.data.util.Version; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.MongoClient; import com.mongodb.MongoCommandException; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link MongoPersistentEntityIndexCreator}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoPersistentEntityIndexCreatorIntegrationTests { static final String SAMPLE_TYPE_COLLECTION_NAME = "sampleEntity"; static final String RECURSIVE_TYPE_COLLECTION_NAME = "recursiveGenericTypes"; - public static @ClassRule RuleChain rules = RuleChain.outerRule(MongoVersionRule.atLeast(new Version(2, 6))).around( - CleanMongoDB.indexes(Arrays.asList(SAMPLE_TYPE_COLLECTION_NAME, RECURSIVE_TYPE_COLLECTION_NAME))); + public static @ClassRule RuleChain rules = RuleChain.outerRule(MongoVersionRule.atLeast(new Version(2, 6))) + .around(CleanMongoDB.indexes(Arrays.asList(SAMPLE_TYPE_COLLECTION_NAME, RECURSIVE_TYPE_COLLECTION_NAME))); public @Rule ExpectedException expectedException = ExpectedException.none(); @@ -76,30 +75,24 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests { public void createsIndexForConfiguredMappingContextOnly() { List indexInfo = templateOne.indexOps(SampleEntity.class).getIndexInfo(); - assertThat(indexInfo, hasSize(greaterThan(0))); - assertThat(indexInfo, Matchers. hasItem(hasProperty("name", is("prop")))); + assertThat(indexInfo).isNotEmpty(); + assertThat(indexInfo).extracting(IndexInfo::getName).contains("prop"); indexInfo = templateTwo.indexOps(SAMPLE_TYPE_COLLECTION_NAME).getIndexInfo(); - assertThat(indexInfo, hasSize(0)); + assertThat(indexInfo).hasSize(0); } - /** - * @see DATAMONGO-1202 - */ - @Test + @Test // DATAMONGO-1202 public void shouldHonorIndexedPropertiesWithRecursiveMappings() { List indexInfo = templateOne.indexOps(RecursiveConcreteType.class).getIndexInfo(); - assertThat(indexInfo, hasSize(greaterThan(0))); - assertThat(indexInfo, Matchers. hasItem(hasProperty("name", is("firstName")))); + assertThat(indexInfo).isNotEmpty(); + assertThat(indexInfo).extracting(IndexInfo::getName).contains("firstName"); } - /** - * @DATAMONGO-1125 - */ - @Test - public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() throws UnknownHostException { + @Test // DATAMONGO-1125 + public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() { expectedException.expect(DataIntegrityViolationException.class); expectedException.expectMessage("collection 'datamongo-1125'"); @@ -107,17 +100,21 @@ public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() th expectedException.expectMessage("lastname"); expectedException.expectCause(IsInstanceOf. instanceOf(MongoCommandException.class)); - MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(), - new SimpleMongoDbFactory(new MongoClient(), "issue")); + try (MongoClient client = MongoTestUtils.client()) { + MongoTemplate mongoTemplate = new MongoTemplate(client, "issue"); + + MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(), + mongoTemplate); - indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight") - .on("lastname", Direction.ASC).unique(), "datamongo-1125")); + indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", + new Index().named("stormlight").on("lastname", Direction.ASC).unique(), "datamongo-1125")); - indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight") - .on("lastname", Direction.ASC).sparse(), "datamongo-1125")); + indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", + new Index().named("stormlight").on("lastname", Direction.ASC).sparse(), "datamongo-1125")); + } } - @Document(collection = RECURSIVE_TYPE_COLLECTION_NAME) + @Document(RECURSIVE_TYPE_COLLECTION_NAME) static abstract class RecursiveGenericType> { @Id Long id; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java index 093286aab7..0e48b39e56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2015 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,234 +15,213 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Collections; import java.util.Date; +import java.util.concurrent.TimeUnit; -import org.hamcrest.core.IsEqual; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.context.ApplicationContext; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; import org.springframework.data.mapping.context.MappingContextEvent; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; /** * Unit tests for {@link MongoPersistentEntityIndexCreator}. - * + * * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoPersistentEntityIndexCreatorUnitTests { - private @Mock MongoDbFactory factory; - private @Mock ApplicationContext context; - private @Mock DB db; - private @Mock DBCollection collection; + private @Mock MongoDatabaseFactory factory; + private @Mock MongoDatabase db; + private @Mock MongoCollection collection; + private MongoTemplate mongoTemplate; - ArgumentCaptor keysCaptor; - ArgumentCaptor optionsCaptor; - ArgumentCaptor collectionCaptor; + private ArgumentCaptor keysCaptor; + private ArgumentCaptor optionsCaptor; + private ArgumentCaptor collectionCaptor; - @Before - public void setUp() { + @BeforeEach + void setUp() { - keysCaptor = ArgumentCaptor.forClass(DBObject.class); - optionsCaptor = ArgumentCaptor.forClass(DBObject.class); + keysCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + optionsCaptor = ArgumentCaptor.forClass(IndexOptions.class); collectionCaptor = ArgumentCaptor.forClass(String.class); - when(factory.getDb()).thenReturn(db); - when(db.getCollection(collectionCaptor.capture())).thenReturn(collection); + when(factory.getMongoDatabase()).thenReturn(db); + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(db.getCollection(collectionCaptor.capture(), eq(org.bson.Document.class))) + .thenReturn((MongoCollection) collection); + + mongoTemplate = new MongoTemplate(factory); - doNothing().when(collection).createIndex(keysCaptor.capture(), optionsCaptor.capture()); + when(collection.createIndex(keysCaptor.capture(), optionsCaptor.capture())).thenReturn("OK"); } @Test - public void buildsIndexDefinitionUsingFieldName() { + void buildsIndexDefinitionUsingFieldName() { MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("fieldname")); - assertThat(optionsCaptor.getValue().get("name").toString(), is("indexName")); - assertThat(optionsCaptor.getValue().get("background"), nullValue()); - assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), nullValue()); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("fieldname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("indexName"); + assertThat(optionsCaptor.getValue().isBackground()).isFalse(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); } @Test - public void doesNotCreateIndexForEntityComingFromDifferentMappingContext() { + void doesNotCreateIndexForEntityComingFromDifferentMappingContext() { MongoMappingContext mappingContext = new MongoMappingContext(); MongoMappingContext personMappingContext = prepareMappingContext(Person.class); - MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory); + MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - MongoPersistentEntity entity = personMappingContext.getPersistentEntity(Person.class); + MongoPersistentEntity entity = personMappingContext.getRequiredPersistentEntity(Person.class); MappingContextEvent, MongoPersistentProperty> event = new MappingContextEvent, MongoPersistentProperty>( personMappingContext, entity); creator.onApplicationEvent(event); - verifyZeroInteractions(collection); + verifyNoInteractions(collection); } - /** - * @see DATAMONGO-530 - */ - @Test - public void isIndexCreatorForMappingContextHandedIntoConstructor() { + @Test // DATAMONGO-530 + void isIndexCreatorForMappingContextHandedIntoConstructor() { MongoMappingContext mappingContext = new MongoMappingContext(); mappingContext.initialize(); - MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory); - assertThat(creator.isIndexCreatorFor(mappingContext), is(true)); - assertThat(creator.isIndexCreatorFor(new MongoMappingContext()), is(false)); + MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); + assertThat(creator.isIndexCreatorFor(mappingContext)).isTrue(); + assertThat(creator.isIndexCreatorFor(new MongoMappingContext())).isFalse(); } - /** - * @see DATAMONGO-554 - */ - @Test - public void triggersBackgroundIndexingIfConfigured() { + @Test // DATAMONGO-554 + void triggersBackgroundIndexingIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(AnotherPerson.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); - assertThat(optionsCaptor.getValue().get("name").toString(), is("lastname")); - assertThat(optionsCaptor.getValue().get("background"), IsEqual. equalTo(true)); - assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), nullValue()); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("lastname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("lastname"); + assertThat(optionsCaptor.getValue().isBackground()).isTrue(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); } - /** - * @see DATAMONGO-544 - */ - @Test - public void expireAfterSecondsIfConfigured() { + @Test // DATAMONGO-544 + void expireAfterSecondsIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(Milk.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("expiry")); - assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), IsEqual. equalTo(60L)); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("expiry"); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(60); } - /** - * @see DATAMONGO-899 - */ - @Test - public void createsNotNestedGeoSpatialIndexCorrectly() { + @Test // DATAMONGO-899 + void createsNotNestedGeoSpatialIndexCorrectly() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); + + assertThat(keysCaptor.getValue()).isEqualTo(new org.bson.Document("company.address.location", "2d")); - assertThat(keysCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("company.address.location", "2d").get())); - assertThat(optionsCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("name", "company.address.location") - .add("min", -180).add("max", 180).add("bits", 26).get())); + IndexOptions opts = optionsCaptor.getValue(); + assertThat(opts.getName()).isEqualTo("company.address.location"); + assertThat(opts.getMin()).isCloseTo(-180d, offset(0d)); + assertThat(opts.getMax()).isCloseTo(180d, offset(0d)); + assertThat(opts.getBits()).isEqualTo(26); } - /** - * @see DATAMONGO-827 - */ - @Test - public void autoGeneratedIndexNameShouldGenerateNoName() { + @Test // DATAMONGO-827 + void autoGeneratedIndexNameShouldGenerateNoName() { MongoMappingContext mappingContext = prepareMappingContext(EntityWithGeneratedIndexName.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue().containsField("name"), is(false)); - assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); - assertThat(optionsCaptor.getValue(), is(new BasicDBObjectBuilder().get())); + assertThat(keysCaptor.getValue()).doesNotContainKey("name").containsKey("lastname"); + assertThat(optionsCaptor.getValue().getName()).isNull(); } - /** - * @see DATAMONGO-367 - */ - @Test - public void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() { + @Test // DATAMONGO-367 + void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture()); - assertThat(collectionNameCapturer.getValue(), equalTo("wrapper")); + verify(db, times(1)).getCollection(collectionNameCapturer.capture(), any()); + assertThat(collectionNameCapturer.getValue()).isEqualTo("wrapper"); } - /** - * @see DATAMONGO-367 - */ - @Test - public void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() { + @Test // DATAMONGO-367 + void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(IndexedDocumentWrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture()); - assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper")); + verify(db, times(1)).getCollection(collectionNameCapturer.capture(), any()); + assertThat(collectionNameCapturer.getValue()).isEqualTo("indexedDocumentWrapper"); } - /** - * @see DATAMONGO-1125 - */ - @Test(expected = DataAccessException.class) - public void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { + @Test // DATAMONGO-1125 + void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { - when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); - doThrow(new MongoException(6, "HostUnreachable")).when(collection).createIndex(Mockito.any(DBObject.class), - Mockito.any(DBObject.class)); + doThrow(new MongoException(6, "HostUnreachable")).when(collection).createIndex(any(org.bson.Document.class), + any(IndexOptions.class)); MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + assertThatThrownBy(() -> new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate)) + .isInstanceOf(DataAccessException.class); } - /** - * @see DATAMONGO-1125 - */ - @Test(expected = ClassCastException.class) - public void createIndexShouldNotConvertUnknownExceptionTypes() { + @Test // DATAMONGO-1125 + void createIndexShouldNotConvertUnknownExceptionTypes() { - when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); - doThrow(new ClassCastException("o_O")).when(collection).createIndex(Mockito.any(DBObject.class), - Mockito.any(DBObject.class)); + doThrow(new ClassCastException("o_O")).when(collection).createIndex(any(org.bson.Document.class), + any(IndexOptions.class)); MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + assertThatThrownBy(() -> new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate)) + .isInstanceOf(ClassCastException.class); } private static MongoMappingContext prepareMappingContext(Class type) { @@ -257,8 +236,8 @@ private static MongoMappingContext prepareMappingContext(Class type) { @Document static class Person { - @Indexed(name = "indexName")// - @Field("fieldname")// + @Indexed(name = "indexName") // + @Field("fieldname") // String field; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 97554b5582..aa26445f2d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2016 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,29 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.util.Collections; +import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.annotation.AliasFor; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.CompoundIndexResolutionTests; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.GeoSpatialIndexResolutionTests; @@ -49,252 +52,306 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.util.ClassTypeInformation; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; - /** + * Tests for {@link MongoPersistentEntityIndexResolver}. + * * @author Christoph Strobl * @author Mark Paluch + * @author Dave Perryman + * @author Stefan Tirea */ @RunWith(Suite.class) @SuiteClasses({ IndexResolutionTests.class, GeoSpatialIndexResolutionTests.class, CompoundIndexResolutionTests.class, TextIndexedResolutionTests.class, MixedIndexResolutionTests.class }) +@SuppressWarnings("unused") public class MongoPersistentEntityIndexResolverUnitTests { /** * Test resolution of {@link Indexed}. - * + * * @author Christoph Strobl + * @author Mark Paluch */ public static class IndexResolutionTests { - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void indexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("indexedProperty", "Zero", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void indexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("zero.indexedProperty", "One", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test - public void depplyNestedIndexPathIsResolvedCorrectly() { + @Test // DATAMONGO-899, DATAMONGO-2188 + public void shouldResolveIndexViaClass() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + IndexResolver indexResolver = IndexResolver.create(mappingContext); + Iterable definitions = indexResolver.resolveIndexFor(IndexOnLevelOne.class); + + assertThat(definitions).isNotEmpty(); + } + + @Test // DATAMONGO-899 + public void deeplyNestedIndexPathIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelTwo.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("one.zero.indexedProperty", "Two", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void resolvesIndexPathNameForNamedPropertiesCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnLevelOneWithExplicitlyNamedField.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("customZero.customFieldName", "indexOnLevelOneWithExplicitlyNamedField", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void resolvesIndexDefinitionCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnLevelZero.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), - equalTo(new BasicDBObjectBuilder().add("name", "indexedProperty").get())); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document("name", "indexedProperty")); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void resolvesIndexDefinitionOptionsCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( WithOptionsOnIndexedProperty.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), - equalTo(new BasicDBObjectBuilder().add("name", "indexedProperty").add("unique", true).add("dropDups", true) - .add("sparse", true).add("background", true).add("expireAfterSeconds", 10L).get())); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "indexedProperty") + .append("unique", true).append("sparse", true).append("background", true).append("expireAfterSeconds", 10L)); } - /** - * @see DATAMONGO-899 - */ - @Test - public void resolvesIndexCollectionNameCorrectlyWhenDefinedInAnnotation() { - - List indexDefinitions = prepareMappingContextAndResolveIndexForType( - WithOptionsOnIndexedProperty.class); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("CollectionOverride")); - } - - /** - * @see DATAMONGO-1297 - */ - @Test + @Test // DATAMONGO-1297 public void resolvesIndexOnDbrefWhenDefined() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithDbRef.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("withDbRef")); - assertThat(indexDefinitions.get(0).getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("indexedDbRef", 1).get())); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("withDbRef"); + assertThat(indexDefinitions.get(0).getIndexKeys()).isEqualTo(new org.bson.Document("indexedDbRef", 1)); } - /** - * @see DATAMONGO-1297 - */ - @Test + @Test // DATAMONGO-1297 public void resolvesIndexOnDbrefWhenDefinedOnNestedElement() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( WrapperOfWithDbRef.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("wrapperOfWithDbRef")); - assertThat(indexDefinitions.get(0).getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("nested.indexedDbRef", 1).get())); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("wrapperOfWithDbRef"); + assertThat(indexDefinitions.get(0).getIndexKeys()).isEqualTo(new org.bson.Document("nested.indexedDbRef", 1)); } - /** - * @see DATAMONGO-1163 - */ - @Test + @Test // DATAMONGO-1163 public void resolveIndexDefinitionInMetaAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnMetaAnnotatedField.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("indexOnMetaAnnotatedField")); - assertThat(indexDefinitions.get(0).getIndexOptions(), - equalTo(new BasicDBObjectBuilder().add("name", "_name").get())); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("indexOnMetaAnnotatedField"); + assertThat(indexDefinitions.get(0).getIndexOptions()).isEqualTo(new org.bson.Document("name", "_name")); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void resolveIndexDefinitionInComposedAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexedDocumentWithComposedAnnotations.class); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); IndexDefinitionHolder indexDefinitionHolder = indexDefinitions.get(1); - assertThat(indexDefinitionHolder.getIndexKeys(), isBsonObject().containing("fieldWithMyIndexName", 1)); - assertThat(indexDefinitionHolder.getIndexOptions(), - isBsonObject().containing("sparse", true).containing("unique", true).containing("name", "my_index_name")); + assertThat(indexDefinitionHolder.getIndexKeys()).containsEntry("fieldWithMyIndexName", 1); + assertThat(indexDefinitionHolder.getIndexOptions()) // + .containsEntry("sparse", true) // + .containsEntry("unique", true) // + .containsEntry("name", "my_index_name"); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void resolveIndexDefinitionInCustomComposedAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexedDocumentWithComposedAnnotations.class); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); IndexDefinitionHolder indexDefinitionHolder = indexDefinitions.get(0); - assertThat(indexDefinitionHolder.getIndexKeys(), isBsonObject().containing("fieldWithDifferentIndexName", 1)); - assertThat(indexDefinitionHolder.getIndexOptions(), - isBsonObject().containing("sparse", true).containing("name", "different_name").notContaining("unique")); + assertThat(indexDefinitionHolder.getIndexKeys()).containsEntry("fieldWithDifferentIndexName", 1); + assertThat(indexDefinitionHolder.getIndexOptions()) // + .containsEntry("sparse", true) // + .containsEntry("name", "different_name") // + .doesNotContainKey("unique"); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromString() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsPlainString.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 600L); + } + + @Test // GH-4844 + public void shouldResolveZeroTimeoutFromString() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterZeroSecondsAsPlainString.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 0L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromIso8601String() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithIso8601Style.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 86400L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromExpression() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 11L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromExpressionReturningDuration() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsExpressionResultingInDuration.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 100L); + } + + @Test // DATAMONGO-2112 + public void shouldErrorOnInvalidTimeoutExpression() { + + MongoMappingContext mappingContext = prepareMappingContext(WithInvalidExpireAfter.class); + MongoPersistentEntityIndexResolver indexResolver = new MongoPersistentEntityIndexResolver(mappingContext); + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> indexResolver + .resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(WithInvalidExpireAfter.class))); + } + + @Test // DATAMONGO-2112 + public void shouldErrorOnDuplicateTimeoutExpression() { + + MongoMappingContext mappingContext = prepareMappingContext(WithDuplicateExpiry.class); + MongoPersistentEntityIndexResolver indexResolver = new MongoPersistentEntityIndexResolver(mappingContext); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> indexResolver + .resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(WithDuplicateExpiry.class))); } - @Document(collection = "Zero") - static class IndexOnLevelZero { + @Test // DATAMONGO-2112 + public void resolveExpressionIndexName() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithIndexNameAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my1st"); + } + + @Test // DATAMONGO-1569 + public void resolvesPartialFilter() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithPartialFilter.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("partialFilterExpression", + org.bson.Document.parse("{'value': {'$exists': true}}")); + } + + @Document("Zero") + class IndexOnLevelZero { @Indexed String indexedProperty; } - @Document(collection = "One") - static class IndexOnLevelOne { + @Document("One") + class IndexOnLevelOne { IndexOnLevelZero zero; } - @Document(collection = "Two") - static class IndexOnLevelTwo { + @Document("Two") + class IndexOnLevelTwo { IndexOnLevelOne one; } - @Document(collection = "WithOptionsOnIndexedProperty") - static class WithOptionsOnIndexedProperty { + @Document("WithOptionsOnIndexedProperty") + class WithOptionsOnIndexedProperty { - @Indexed(background = true, collection = "CollectionOverride", direction = IndexDirection.DESCENDING, - dropDups = true, expireAfterSeconds = 10, sparse = true, unique = true) // - String indexedProperty; + @Indexed(background = true, direction = IndexDirection.DESCENDING, expireAfterSeconds = 10, sparse = true, + unique = true) // + String indexedProperty; } @Document - static class IndexOnLevelOneWithExplicitlyNamedField { + class IndexOnLevelOneWithExplicitlyNamedField { @Field("customZero") IndexOnLevelZeroWithExplicityNamedField zero; } - static class IndexOnLevelZeroWithExplicityNamedField { + class IndexOnLevelZeroWithExplicityNamedField { - @Indexed @Field("customFieldName") String namedProperty; + @Indexed + @Field("customFieldName") String namedProperty; } @Document - static class WrapperOfWithDbRef { + class WrapperOfWithDbRef { WithDbRef nested; } @Document - static class WithDbRef { + class WithDbRef { @Indexed // @DBRef // NoIndex indexedDbRef; } - @Document(collection = "no-index") - static class NoIndex { + @Document("no-index") + class NoIndex { @Id String id; } @Document - static class IndexedDocumentWithComposedAnnotations { + class IndexedDocumentWithComposedAnnotations { @Id String id; @CustomIndexedAnnotation String fieldWithDifferentIndexName; @@ -304,13 +361,13 @@ static class IndexedDocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD }) @ComposedIndexedAnnotation(indexName = "different_name", beUnique = false) - static @interface CustomIndexedAnnotation { + @interface CustomIndexedAnnotation { } @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Indexed - static @interface ComposedIndexedAnnotation { + @interface ComposedIndexedAnnotation { @AliasFor(annotation = Indexed.class, attribute = "unique") boolean beUnique() default true; @@ -325,75 +382,108 @@ static class IndexedDocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @org.springframework.data.mongodb.core.mapping.Field - static @interface ComposedFieldAnnotation { + @interface ComposedFieldAnnotation { @AliasFor(annotation = org.springframework.data.mongodb.core.mapping.Field.class, attribute = "value") String name() default "_id"; } + + @Document + class WithExpireAfterAsPlainString { + @Indexed(expireAfter = "10m") String withTimeout; + } + + @Document + class WithExpireAfterZeroSecondsAsPlainString { + @Indexed(expireAfter = "0s") String withTimeout; + } + + @Document + class WithIso8601Style { + @Indexed(expireAfter = "P1D") String withTimeout; + } + + @Document + class WithExpireAfterAsExpression { + @Indexed(expireAfter = "#{10 + 1 + 's'}") String withTimeout; + } + + @Document + class WithExpireAfterAsExpressionResultingInDuration { + @Indexed(expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") String withTimeout; + } + + @Document + class WithInvalidExpireAfter { + @Indexed(expireAfter = "123ops") String withTimeout; + } + + @Document + class WithDuplicateExpiry { + @Indexed(expireAfter = "1s", expireAfterSeconds = 2) String withTimeout; + } + + @Document + class WithIndexNameAsExpression { + @Indexed(name = "#{'my' + 1 + 'st'}") String spelIndexName; + } + + @Document + class WithPartialFilter { + @Indexed(partialFilter = "{'value': {'$exists': true}}") String withPartialFilter; + } } @Target({ ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) @Indexed @interface IndexedFieldAnnotation { - } @Document - static class IndexOnMetaAnnotatedField { - @Field("_name") @IndexedFieldAnnotation String lastname; + class IndexOnMetaAnnotatedField { + @Field("_name") + @IndexedFieldAnnotation String lastname; } /** * Test resolution of {@link GeoSpatialIndexed}. - * + * * @author Christoph Strobl */ public static class GeoSpatialIndexResolutionTests { - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void geoSpatialIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("geoIndexedProperty", "Zero", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void geoSpatialIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("zero.geoIndexedProperty", "One", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void depplyNestedGeoSpatialIndexPathIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelTwo.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("one.zero.geoIndexedProperty", "Two", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void resolvesIndexDefinitionOptionsCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( @@ -401,14 +491,11 @@ public void resolvesIndexDefinitionOptionsCorrectly() { IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo( - new BasicDBObjectBuilder().add("name", "location").add("min", 1).add("max", 100).add("bits", 2).get())); + assertThat(indexDefinition.getIndexOptions()).isEqualTo( + new org.bson.Document().append("name", "location").append("min", 1).append("max", 100).append("bits", 2)); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void resolvesComposedAnnotationIndexDefinitionOptionsCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( @@ -416,37 +503,45 @@ public void resolvesComposedAnnotationIndexDefinitionOptionsCorrectly() { IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexKeys(), - isBsonObject().containing("location", "geoHaystack").containing("What light?", 1)); - assertThat(indexDefinition.getIndexOptions(), - isBsonObject().containing("name", "my_geo_index_name").containing("bucketSize", 2.0)); + assertThat(indexDefinition.getIndexKeys()).containsEntry("location", "geoHaystack").containsEntry("What light?", + 1); + assertThat(indexDefinition.getIndexOptions()).containsEntry("name", "my_geo_index_name") + .containsEntry("bucketSize", 2.0); + } + + @Test // DATAMONGO-2112 + public void resolveExpressionIndexNameForGeoIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + GeoIndexWithNameAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my1st"); } - @Document(collection = "Zero") - static class GeoSpatialIndexOnLevelZero { + @Document("Zero") + class GeoSpatialIndexOnLevelZero { @GeoSpatialIndexed Point geoIndexedProperty; } - @Document(collection = "One") - static class GeoSpatialIndexOnLevelOne { + @Document("One") + class GeoSpatialIndexOnLevelOne { GeoSpatialIndexOnLevelZero zero; } - @Document(collection = "Two") - static class GeoSpatialIndexOnLevelTwo { + @Document("Two") + class GeoSpatialIndexOnLevelTwo { GeoSpatialIndexOnLevelOne one; } - @Document(collection = "WithOptionsOnGeoSpatialIndexProperty") - static class WithOptionsOnGeoSpatialIndexProperty { + @Document("WithOptionsOnGeoSpatialIndexProperty") + class WithOptionsOnGeoSpatialIndexProperty { - @GeoSpatialIndexed(collection = "CollectionOverride", bits = 2, max = 100, min = 1, - type = GeoSpatialIndexType.GEO_2D) // - Point location; + @GeoSpatialIndexed(bits = 2, max = 100, min = 1, type = GeoSpatialIndexType.GEO_2D) // + Point location; } - @Document(collection = "WithComposedAnnotation") - static class GeoSpatialIndexedDocumentWithComposedAnnotation { + @Document("WithComposedAnnotation") + class GeoSpatialIndexedDocumentWithComposedAnnotation { @ComposedGeoSpatialIndexed // Point location; @@ -470,175 +565,232 @@ static class GeoSpatialIndexedDocumentWithComposedAnnotation { GeoSpatialIndexType indexType() default GeoSpatialIndexType.GEO_HAYSTACK; } + @Document + class GeoIndexWithNameAsExpression { + @GeoSpatialIndexed(name = "#{'my' + 1 + 'st'}") Point spelIndexName; + } + } /** * Test resolution of {@link CompoundIndexes}. - * + * * @author Christoph Strobl */ public static class CompoundIndexResolutionTests { - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void compoundIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void compoundIndexOptionsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelZero.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index") - .add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get())); - assertThat(indexDefinition.getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get())); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document("name", "compound_index") + .append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } - /** - * @see DATAMONGO-909 - */ - @Test + @Test // DATAMONGO-909 public void compoundIndexOnSuperClassResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexDefinedOnSuperClass.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index") - .add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get())); - assertThat(indexDefinition.getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get())); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "compound_index") + .append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } - /** - * @see DATAMONGO-827 - */ - @Test + @Test // DATAMONGO-827 public void compoundIndexDoesNotSpecifyNameWhenUsingGenerateName() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( ComountIndexWithAutogeneratedName.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("unique", true) - .add("dropDups", true).add("sparse", true).add("background", true).get())); - assertThat(indexDefinition.getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get())); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } - /** - * @see DATAMONGO-929 - */ - @Test + @Test // DATAMONGO-929 public void compoundIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "zero.foo", "zero.bar" }, "CompoundIndexOnLevelOne", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-929 - */ - @Test + @Test // DATAMONGO-929 public void emptyCompoundIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelOneWithEmptyIndexDefinition.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "zero" }, "CompoundIndexOnLevelZeroWithEmptyIndexDef", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-929 - */ - @Test + @Test // DATAMONGO-929 public void singleCompoundIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SingleCompoundIndex.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void singleCompoundIndexUsingComposedAnnotationsOnTypeResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexDocumentWithComposedAnnotation.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getIndexKeys(), isBsonObject().containing("foo", 1).containing("bar", -1)); - assertThat(indexDefinitions.get(0).getIndexOptions(), isBsonObject().containing("name", "my_compound_index_name") - .containing("unique", true).containing("background", true)); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexKeys()).containsEntry("foo", 1).containsEntry("bar", -1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my_compound_index_name") + .containsEntry("unique", true).containsEntry("background", true); } - @Document(collection = "CompoundIndexOnLevelOne") - static class CompoundIndexOnLevelOne { + @Test // DATAMONGO-2112 + public void resolveExpressionIndexNameForCompoundIndex() { - CompoundIndexOnLevelZero zero; + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithNameExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "cmp2name"); } - @Document(collection = "CompoundIndexOnLevelZeroWithEmptyIndexDef") - static class CompoundIndexOnLevelOneWithEmptyIndexDefinition { + @Test // DATAMONGO-2112 + public void resolveExpressionDefForCompoundIndex() { - CompoundIndexOnLevelZeroWithEmptyIndexDef zero; + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithDefExpression.class); + + assertThat(indexDefinitions).hasSize(1); + assertIndexPathAndCollection(new String[] { "foo", "bar" }, "compoundIndexWithDefExpression", + indexDefinitions.get(0)); } - @Document(collection = "CompoundIndexOnLevelZero") - @CompoundIndexes({ @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, - dropDups = true, sparse = true, unique = true) }) - static class CompoundIndexOnLevelZero {} + @Test // DATAMONGO-2067 + public void shouldIdentifyRepeatedAnnotationCorrectly() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + RepeatedCompoundIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertIndexPathAndCollection(new String[] { "firstname", "lastname" }, "repeatedCompoundIndex", + indexDefinitions.get(0)); + assertIndexPathAndCollection(new String[] { "address.city", "address.street" }, "repeatedCompoundIndex", + indexDefinitions.get(1)); + } - @CompoundIndexes({ - @CompoundIndex(name = "compound_index", background = true, dropDups = true, sparse = true, unique = true) }) - static class CompoundIndexOnLevelZeroWithEmptyIndexDef {} + @Test // DATAMONGO-1569 + public void singleIndexWithPartialFilter() { - @Document(collection = "CompoundIndexOnLevelZero") - @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, dropDups = true, - sparse = true, unique = true) - static class SingleCompoundIndex {} + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + SingleCompoundIndexWithPartialFilter.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexKeys()).containsEntry("foo", 1).containsEntry("bar", -1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "compound_index_with_partial") + .containsEntry("unique", true).containsEntry("background", true); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("partialFilterExpression", + org.bson.Document.parse("{'value': {'$exists': true}}")); + } - static class IndexDefinedOnSuperClass extends CompoundIndexOnLevelZero { + @Test // GH-3002 + public void compoundIndexWithCollation() { + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithCollation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); } - @Document(collection = "ComountIndexWithAutogeneratedName") - @CompoundIndexes({ @CompoundIndex(useGeneratedName = true, def = "{'foo': 1, 'bar': -1}", background = true, - dropDups = true, sparse = true, unique = true) }) - static class ComountIndexWithAutogeneratedName { + @Test // GH-3002 + public void compoundIndexWithCollationFromDocumentAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCompoundCollationFromDocument.class); + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); } - @Document(collection = "WithComposedAnnotation") - @ComposedCompoundIndex - static class CompoundIndexDocumentWithComposedAnnotation { + @Test // GH-3002 + public void compoundIndexWithEvaluatedCollationFromAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithEvaluatedCollationFromCompoundIndex.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "de_AT"))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); + } + + @Document("CompoundIndexOnLevelOne") + class CompoundIndexOnLevelOne { + + CompoundIndexOnLevelZero zero; + } + + @Document("CompoundIndexOnLevelZeroWithEmptyIndexDef") + class CompoundIndexOnLevelOneWithEmptyIndexDefinition { + CompoundIndexOnLevelZeroWithEmptyIndexDef zero; } + @Document("CompoundIndexOnLevelZero") + @CompoundIndexes({ @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, + sparse = true, unique = true) }) + class CompoundIndexOnLevelZero {} + + @CompoundIndexes({ @CompoundIndex(name = "compound_index", background = true, sparse = true, unique = true) }) + class CompoundIndexOnLevelZeroWithEmptyIndexDef {} + + @Document("CompoundIndexOnLevelZero") + @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, sparse = true, + unique = true) + class SingleCompoundIndex {} + + class IndexDefinedOnSuperClass extends CompoundIndexOnLevelZero {} + + @Document("ComountIndexWithAutogeneratedName") + @CompoundIndexes({ @CompoundIndex(useGeneratedName = true, def = "{'foo': 1, 'bar': -1}", background = true, + sparse = true, unique = true) }) + class ComountIndexWithAutogeneratedName {} + + @Document("WithComposedAnnotation") + @ComposedCompoundIndex + class CompoundIndexDocumentWithComposedAnnotation {} + @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) @CompoundIndex @@ -661,169 +813,187 @@ static class CompoundIndexDocumentWithComposedAnnotation { } + @Document + @CompoundIndex(name = "#{'cmp' + 2 + 'name'}", def = "{'foo': 1, 'bar': -1}") + class CompoundIndexWithNameExpression {} + + @Document + @CompoundIndex(def = "#{T(org.bson.Document).parse(\"{ 'foo': 1, 'bar': -1 }\")}") + class CompoundIndexWithDefExpression {} + + @Document + @CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}") + @CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}") + class RepeatedCompoundIndex {} + + @Document("SingleCompoundIndexWithPartialFilter") + @CompoundIndex(name = "compound_index_with_partial", def = "{'foo': 1, 'bar': -1}", background = true, + unique = true, partialFilter = "{'value': {'$exists': true}}") + class SingleCompoundIndexWithPartialFilter {} + + @Document + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}", + collation = "{'locale': 'en_US', 'strength': 2}") + class CompoundIndexWithCollation {} + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}") + class WithCompoundCollationFromDocument {} + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}", + collation = "#{{ 'locale' : 'de' + '_' + 'AT' }}") + class WithEvaluatedCollationFromCompoundIndex {} } public static class TextIndexedResolutionTests { - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldResolveSingleFieldTextIndexCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnSinglePropertyInRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("bar", "textIndexOnSinglePropertyInRoot", indexDefinitions.get(0)); + assertThat(indexDefinitions.get(0).getIndexOptions()).doesNotContainKey("collation"); + } + + @Test // DATAMONGO-2316 + public void shouldEnforceSimpleCollationOnTextIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + TextIndexWithCollation.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("collation", + new org.bson.Document("locale", "simple")); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldResolveMultiFieldTextIndexCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( - TextIndexOnMutiplePropertiesInRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); - assertIndexPathAndCollection(new String[] { "foo", "bar" }, "textIndexOnMutiplePropertiesInRoot", + TextIndexOnMultiplePropertiesInRoot.class); + + assertThat(indexDefinitions).hasSize(1); + assertIndexPathAndCollection(new String[] { "foo", "bar" }, "textIndexOnMultiplePropertiesInRoot", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldResolveTextIndexOnElementCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo" }, "textIndexOnNestedRoot", indexDefinitions.get(0)); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldResolveTextIndexOnElementWithWeightCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedWithWeightRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo" }, "textIndexOnNestedWithWeightRoot", indexDefinitions.get(0)); - DBObject weights = DBObjectTestUtils.getAsDBObject(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights.get("nested.foo"), is((Object) 5F)); + org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); + assertThat(weights.get("nested.foo")).isEqualTo(5F); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldResolveTextIndexOnElementWithMostSpecificWeightCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedWithMostSpecificValueRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo", "nested.bar" }, "textIndexOnNestedWithMostSpecificValueRoot", indexDefinitions.get(0)); - DBObject weights = DBObjectTestUtils.getAsDBObject(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights.get("nested.foo"), is((Object) 5F)); - assertThat(weights.get("nested.bar"), is((Object) 10F)); + org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); + assertThat(weights.get("nested.foo")).isEqualTo(5F); + assertThat(weights.get("nested.bar")).isEqualTo(10F); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937 public void shouldSetDefaultLanguageCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithDefaultLanguage.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("default_language"), is((Object) "spanish")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("default_language", "spanish"); } - /** - * @see DATAMONGO-937, DATAMONGO-1049 - */ - @Test + @Test // DATAMONGO-937, DATAMONGO-1049 public void shouldResolveTextIndexLanguageOverrideCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithLanguageOverride.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("language_override", "lang"); } - /** - * @see DATAMONGO-1049 - */ - @Test + @Test // DATAMONGO-1049 public void shouldIgnoreTextIndexLanguageOverrideOnNestedElements() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithLanguageOverrideOnNestedElement.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is(nullValue())); + assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override")).isNull(); } - /** - * @see DATAMONGO-1049 - */ - @Test + @Test // DATAMONGO-1049 public void shouldNotCreateIndexDefinitionWhenOnlyLanguageButNoTextIndexPresent() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNoTextIndexPropertyButReservedFieldLanguage.class); - assertThat(indexDefinitions, is(empty())); + + assertThat(indexDefinitions).isEmpty(); } - /** - * @see DATAMONGO-1049 - */ - @Test + @Test // DATAMONGO-1049 public void shouldNotCreateIndexDefinitionWhenOnlyAnnotatedLanguageButNoTextIndexPresent() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated.class); - assertThat(indexDefinitions, is(empty())); + + assertThat(indexDefinitions).isEmpty(); } - /** - * @see DATAMONGO-1049 - */ - @Test + @Test // DATAMONGO-1049 public void shouldPreferExplicitlyAnnotatedLanguageProperty() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithOverlappingLanguageProps.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("language_override", "lang"); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void shouldResolveComposedAnnotationCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexedDocumentWithComposedAnnotation.class); - DBObject weights = DBObjectTestUtils.getAsDBObject(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights, isBsonObject().containing("foo", 99f)); + org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); + assertThat(weights).containsEntry("foo", 99f); } @Document - static class TextIndexOnSinglePropertyInRoot { + class TextIndexOnSinglePropertyInRoot { String foo; @TextIndexed String bar; } + @Document(collation = "de_AT") + class TextIndexWithCollation { + + @TextIndexed String foo; + } + @Document - static class TextIndexOnMutiplePropertiesInRoot { + class TextIndexOnMultiplePropertiesInRoot { @TextIndexed String foo; @@ -831,48 +1001,48 @@ static class TextIndexOnMutiplePropertiesInRoot { } @Document - static class TextIndexOnNestedRoot { + class TextIndexOnNestedRoot { String bar; @TextIndexed TextIndexOnNested nested; } - static class TextIndexOnNested { + class TextIndexOnNested { String foo; } @Document - static class TextIndexOnNestedWithWeightRoot { + class TextIndexOnNestedWithWeightRoot { @TextIndexed(weight = 5) TextIndexOnNested nested; } @Document - static class TextIndexOnNestedWithMostSpecificValueRoot { + class TextIndexOnNestedWithMostSpecificValueRoot { @TextIndexed(weight = 5) TextIndexOnNestedWithMostSpecificValue nested; } - static class TextIndexOnNestedWithMostSpecificValue { + class TextIndexOnNestedWithMostSpecificValue { String foo; @TextIndexed(weight = 10) String bar; } @Document(language = "spanish") - static class DocumentWithDefaultLanguage { + class DocumentWithDefaultLanguage { @TextIndexed String foo; } @Document - static class DocumentWithLanguageOverrideOnNestedElement { + class DocumentWithLanguageOverrideOnNestedElement { DocumentWithLanguageOverride nested; } @Document - static class DocumentWithLanguageOverride { + class DocumentWithLanguageOverride { @TextIndexed String foo; @@ -880,19 +1050,19 @@ static class DocumentWithLanguageOverride { } @Document - static class DocumentWithNoTextIndexPropertyButReservedFieldLanguage { + class DocumentWithNoTextIndexPropertyButReservedFieldLanguage { String language; } @Document - static class DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated { + class DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated { @Field("language") String lang; } @Document - static class DocumentWithOverlappingLanguageProps { + class DocumentWithOverlappingLanguageProps { @TextIndexed String foo; String language; @@ -900,7 +1070,7 @@ static class DocumentWithOverlappingLanguageProps { } @Document - static class TextIndexedDocumentWithComposedAnnotation { + class TextIndexedDocumentWithComposedAnnotation { @ComposedTextIndexedAnnotation String foo; String lang; @@ -909,7 +1079,7 @@ static class TextIndexedDocumentWithComposedAnnotation { @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @TextIndexed - static @interface ComposedTextIndexedAnnotation { + @interface ComposedTextIndexedAnnotation { @AliasFor(annotation = TextIndexed.class, attribute = "weight") float heavyweight() default 99f; @@ -918,132 +1088,105 @@ static class TextIndexedDocumentWithComposedAnnotation { public static class MixedIndexResolutionTests { - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void multipleIndexesResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(MixedIndexRoot.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat(indexDefinitions.get(0).getIndexDefinition(), instanceOf(Index.class)); - assertThat(indexDefinitions.get(1).getIndexDefinition(), instanceOf(GeospatialIndex.class)); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexDefinition()).isInstanceOf(Index.class); + assertThat(indexDefinitions.get(1).getIndexDefinition()).isInstanceOf(GeospatialIndex.class); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void cyclicPropertyReferenceOverDBRefShouldNotBeTraversed() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(Inner.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getIndexDefinition().getIndexKeys(), - equalTo(new BasicDBObjectBuilder().add("outer", 1).get())); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexDefinition().getIndexKeys()) + .isEqualTo(new org.bson.Document().append("outer", 1)); } - /** - * @see DATAMONGO-899 - */ - @Test + @Test // DATAMONGO-899 public void associationsShouldNotBeTraversed() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(Outer.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } - /** - * @see DATAMONGO-926 - */ - @Test + @Test // DATAMONGO-926 public void shouldNotRunIntoStackOverflow() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CycleStartingInBetween.class); - assertThat(indexDefinitions, hasSize(1)); + + assertThat(indexDefinitions).hasSize(1); } - /** - * @see DATAMONGO-926 - */ - @Test + @Test // DATAMONGO-926 public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelZero() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleLevelZero.class); assertIndexPathAndCollection("indexedProperty", "cycleLevelZero", indexDefinitions.get(0)); assertIndexPathAndCollection("cyclicReference.indexedProperty", "cycleLevelZero", indexDefinitions.get(1)); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); } - /** - * @see DATAMONGO-926 - */ - @Test + @Test // DATAMONGO-926 public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelOne() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleOnLevelOne.class); assertIndexPathAndCollection("reference.indexedProperty", "cycleOnLevelOne", indexDefinitions.get(0)); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); } - /** - * @see DATAMONGO-926 - */ - @Test + @Test // DATAMONGO-926 public void indexBeResolvedCorrectlyWhenPropertiesOfDifferentTypesAreNamedEqually() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( NoCycleButIdenticallyNamedProperties.class); + + assertThat(indexDefinitions).hasSize(3); assertIndexPathAndCollection("foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(0)); assertIndexPathAndCollection("reference.foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(1)); assertIndexPathAndCollection("reference.deep.foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(2)); - assertThat(indexDefinitions, hasSize(3)); } - /** - * @see DATAMONGO-949 - */ - @Test + @Test // DATAMONGO-949 public void shouldNotDetectCycleInSimilarlyNamedProperties() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SimilarityHolingBean.class); assertIndexPathAndCollection("norm", "similarityHolingBean", indexDefinitions.get(0)); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); } - /** - * @see DATAMONGO-962 - */ - @Test + @Test // DATAMONGO-962 public void shouldDetectSelfCycleViaCollectionTypeCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SelfCyclingViaCollectionType.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } - /** - * @see DATAMONGO-962 - */ - @Test + @Test // DATAMONGO-962 public void shouldNotDetectCycleWhenTypeIsUsedMoreThanOnce() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultipleObjectsOfSameType.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } - /** - * @see DATAMONGO-962 - */ - @Test + @Test // DATAMONGO-962 @SuppressWarnings({ "rawtypes", "unchecked" }) public void shouldCatchCyclicReferenceExceptionOnRoot() { - MongoPersistentEntity entity = new BasicMongoPersistentEntity(ClassTypeInformation.from(Object.class)); + MongoPersistentEntity entity = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(Object.class)); MongoPersistentProperty propertyMock = mock(MongoPersistentProperty.class); when(propertyMock.isEntity()).thenReturn(true); @@ -1051,210 +1194,417 @@ public void shouldCatchCyclicReferenceExceptionOnRoot() { when(propertyMock.getActualType()).thenThrow( new MongoPersistentEntityIndexResolver.CyclicPropertyReferenceException("foo", Object.class, "bar")); - MongoPersistentEntity selfCyclingEntity = new BasicMongoPersistentEntity( + MongoPersistentEntity selfCyclingEntity = new BasicMongoPersistentEntity<>( ClassTypeInformation.from(SelfCyclingViaCollectionType.class)); new MongoPersistentEntityIndexResolver(prepareMappingContext(SelfCyclingViaCollectionType.class)) .resolveIndexForEntity(selfCyclingEntity); } - /** - * @see DATAMONGO-1025 - */ - @Test + @Test // DATAMONGO-1782 + public void shouldAllowMultiplePathsToDeeplyType() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + NoCycleManyPathsToDeepValueObject.class); + + assertThat(indexDefinitions).hasSize(2); + assertIndexPathAndCollection("l3.valueObject.value", "rules", indexDefinitions.get(0)); + assertIndexPathAndCollection("l2.l3.valueObject.value", "rules", indexDefinitions.get(1)); + } + + @Test // DATAMONGO-1025 public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedCompoundIndexFixedOnCollection() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedDocumentHavingNamedCompoundIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedCompoundIndex.c_index"); } - /** - * @see DATAMONGO-1025 - */ - @Test + @Test // DATAMONGO-1025 public void shouldUseIndexNameForNestedTypesWithNamedCompoundIndexDefinition() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedTypeHavingNamedCompoundIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedCompoundIndex.c_index"); } - /** - * @see DATAMONGO-1025 - */ - @Test + @Test // DATAMONGO-1025 public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedIndexFixedOnCollection() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedDocumentHavingNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedIndex.property_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedIndex.property_index"); } - /** - * @see DATAMONGO-1025 - */ - @Test + @Test // DATAMONGO-1025 public void shouldUseIndexNameForNestedTypesWithNamedIndexDefinition() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedTypeHavingNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedIndex.property_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedIndex.property_index"); } - /** - * @see DATAMONGO-1025 - */ - @Test + @Test // DATAMONGO-1025 public void shouldUseIndexNameOnRootLevel() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("property_index")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "property_index"); } - /** - * @see DATAMONGO-1087 - */ - @Test + @Test // DATAMONGO-1087 public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnRoot() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultiplePropertiesOfSameTypeWithMatchingStartLetters.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("name.component")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("nameLast.component")); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "name.component"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", "nameLast.component"); } - /** - * @see DATAMONGO-1087 - */ - @Test + @Test // DATAMONGO-1087 public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("component.nameLast")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("component.name")); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "component.nameLast"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", "component.name"); } - /** - * @see DATAMONGO-1121 - */ - @Test + @Test // DATAMONGO-1121 public void shouldOnlyConsiderEntitiesAsPotentialCycleCandidates() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("path1.foo")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), - equalTo("path2.propertyWithIndexedStructure.foo")); - + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "path1.foo"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", + "path2.propertyWithIndexedStructure.foo"); } - /** - * @see DATAMONGO-1263 - */ - @Test + @Test // DATAMONGO-1263 public void shouldConsiderGenericTypeArgumentsOfCollectionElements() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( EntityWithGenericTypeWrapperAsElement.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("listWithGeneircTypeElement.entity.property_index")); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "listWithGeneircTypeElement.entity.property_index"); + } + + @Test // DATAMONGO-1183 + public void hashedIndexOnId() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithHashedIndexOnId.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).hasSize(1).containsEntry("_id", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithHashedIndex.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).hasSize(1).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndexAndIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithHashedIndexAndIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndexAndIndexViaComposedAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithComposedHashedIndexAndIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", 1); + assertThat(it.getIndexOptions()).containsEntry("name", "idx-name"); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1902 + public void resolvedIndexOnUnwrappedType() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithUnwrapped.class, + UnwrappableType.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("stringValue", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("with-at-field-annotation", 1); + }); + } + + @Test // DATAMONGO-1902 + public void resolvedIndexOnNestedUnwrappedType() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WrapperAroundWithUnwrapped.class, WithUnwrapped.class, UnwrappableType.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withEmbedded.stringValue", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withEmbedded.with-at-field-annotation", 1); + }); + } + + @Test // DATAMONGO-1902 + public void errorsOnIndexOnEmbedded() { + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> prepareMappingContextAndResolveIndexForType(InvalidIndexOnUnwrapped.class)); + + } + + @Test // GH-3225 + public void resolvesWildcardOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).isEmpty(); + }); + } + + @Test // GH-3225 + public void resolvesWildcardWithProjectionOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexHavingProjectionOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).containsEntry("wildcardProjection", + org.bson.Document.parse("{'_id' : 1, 'value' : 0}")); + }); + } + + @Test // GH-3225 + public void resolvesWildcardOnProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnProperty.class); + assertThat(indices).hasSize(3); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + assertThat(indices.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("the_field.$**", 1); + }); + assertThat(indices.get(2)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withOptions.$**", 1); + assertThat(it.getIndexOptions()).containsEntry("name", "withOptions.idx") + .containsEntry("collation", new org.bson.Document("locale", "en_US")) + .containsEntry("partialFilterExpression", new org.bson.Document("$eq", 1)); + }); + } + + @Test // GH-3225 + public void resolvesWildcardTypeOfNestedProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardOnEntityOfNested.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + assertThat(it.getIndexOptions()).hasSize(1).containsKey("name"); + }); + } + + @Test // GH-3225 + public void rejectsWildcardProjectionOnNestedPaths() { + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> { + prepareMappingContextAndResolveIndexForType(WildcardIndexedProjectionOnNestedPath.class); + }); + } + + @Test // GH-3914 + public void shouldSkipMapStructuresUnlessAnnotatedWithWildcardIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithMapStructures.class); + + assertThat(indexDefinitions).hasSize(1); + } + + @Test // GH-3002 + public void indexedWithCollation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCollationFromIndexedAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "value").append("unique", true).append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + } + + @Test // GH-3002 + public void indexedWithCollationFromDocumentAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCollationFromDocumentAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "value").append("unique", true).append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + } + + @Test // GH-3002 + public void indexedWithEvaluatedCollation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithEvaluatedCollationFromIndexedAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "value") + .append("collation", new org.bson.Document().append("locale", "de_AT"))); } @Document - static class MixedIndexRoot { + class MixedIndexRoot { @Indexed String first; NestedGeoIndex nestedGeo; } - static class NestedGeoIndex { + class NestedGeoIndex { @GeoSpatialIndexed Point location; } @Document - static class Outer { + class Outer { @DBRef Inner inner; } @Document - static class Inner { + class Inner { @Indexed Outer outer; } @Document - static class CycleLevelZero { + class CycleLevelZero { @Indexed String indexedProperty; CycleLevelZero cyclicReference; } @Document - static class CycleOnLevelOne { + class CycleOnLevelOne { CycleOnLevelOneReferenced reference; } - static class CycleOnLevelOneReferenced { + class CycleOnLevelOneReferenced { @Indexed String indexedProperty; CycleOnLevelOne cyclicReference; } @Document - public static class CycleStartingInBetween { + static class CycleStartingInBetween { CycleOnLevelOne referenceToCycleStart; } @Document - static class NoCycleButIdenticallyNamedProperties { + class NoCycleButIdenticallyNamedProperties { @Indexed String foo; NoCycleButIdenticallyNamedPropertiesNested reference; } - static class NoCycleButIdenticallyNamedPropertiesNested { + class NoCycleButIdenticallyNamedPropertiesNested { @Indexed String foo; NoCycleButIndenticallNamedPropertiesDeeplyNested deep; } - static class NoCycleButIndenticallNamedPropertiesDeeplyNested { + class NoCycleButIndenticallNamedPropertiesDeeplyNested { @Indexed String foo; } + @Document("rules") + class NoCycleManyPathsToDeepValueObject { + + private NoCycleLevel3 l3; + private NoCycleLevel2 l2; + } + + class NoCycleLevel2 { + private NoCycleLevel3 l3; + } + + class NoCycleLevel3 { + private ValueObject valueObject; + } + + class ValueObject { + @Indexed private String value; + } + @Document - static class SimilarityHolingBean { + class SimilarityHolingBean { - @Indexed @Field("norm") String normalProperty; + @Indexed + @Field("norm") String normalProperty; @Field("similarityL") private List listOfSimilarilyNamedEntities = null; } - static class SimilaritySibling { + class SimilaritySibling { @Field("similarity") private String similarThoughNotEqualNamedProperty; } @Document - static class MultipleObjectsOfSameType { + class MultipleObjectsOfSameType { SelfCyclingViaCollectionType cycleOne; @@ -1262,7 +1612,7 @@ static class MultipleObjectsOfSameType { } @Document - static class SelfCyclingViaCollectionType { + class SelfCyclingViaCollectionType { List cyclic; @@ -1270,55 +1620,55 @@ static class SelfCyclingViaCollectionType { @Document @CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }") - static class DocumentWithNamedCompoundIndex { + class DocumentWithNamedCompoundIndex { String property; } @Document - static class DocumentWithNamedIndex { + class DocumentWithNamedIndex { @Indexed(name = "property_index") String property; } - static class TypeWithNamedIndex { + class TypeWithNamedIndex { @Indexed(name = "property_index") String property; } @Document - static class DocumentWithNestedDocumentHavingNamedCompoundIndex { + class DocumentWithNestedDocumentHavingNamedCompoundIndex { DocumentWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex; } @CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }") - static class TypeWithNamedCompoundIndex { + class TypeWithNamedCompoundIndex { String property; } @Document - static class DocumentWithNestedTypeHavingNamedCompoundIndex { + class DocumentWithNestedTypeHavingNamedCompoundIndex { TypeWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex; } @Document - static class DocumentWithNestedDocumentHavingNamedIndex { + class DocumentWithNestedDocumentHavingNamedIndex { DocumentWithNamedIndex propertyOfTypeHavingNamedIndex; } @Document - static class DocumentWithNestedTypeHavingNamedIndex { + class DocumentWithNestedTypeHavingNamedIndex { TypeWithNamedIndex propertyOfTypeHavingNamedIndex; } @Document - public class MultiplePropertiesOfSameTypeWithMatchingStartLetters { + class MultiplePropertiesOfSameTypeWithMatchingStartLetters { - public class NameComponent { + class NameComponent { @Indexed String component; } @@ -1328,9 +1678,9 @@ public class NameComponent { } @Document - public class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty { + class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty { - public class NameComponent { + class NameComponent { @Indexed String nameLast; @Indexed String name; @@ -1340,38 +1690,182 @@ public class NameComponent { } @Document - public static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths { + static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths { NoCycleButIndenticallNamedPropertiesDeeplyNested path1; AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument path2; } - public static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument { + @Document + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withEmbedded; + } + + @Document + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappableType; + } + + @Document + class InvalidIndexOnUnwrapped { + + @Indexed // + @Unwrapped.Nullable // + UnwrappableType unwrappableType; + + } + + static class UnwrappableType { + + @Indexed String stringValue; + + List listValue; + + @Indexed // + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } + + static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument { NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure; } - static class GenericEntityWrapper { + class GenericEntityWrapper { T entity; } @Document - static class EntityWithGenericTypeWrapperAsElement { + class WithMapStructures { + Map rootMap; + NestedInMapWithStructures nested; + ValueObject plainValue; + } + + class NestedInMapWithStructures { + Map nestedMap; + } + + @Document + class EntityWithGenericTypeWrapperAsElement { List> listWithGeneircTypeElement; } + @Document + class WithHashedIndexOnId { + + @HashIndexed + @Id String id; + } + + @Document + class WithHashedIndex { + + @HashIndexed String value; + } + + @Document + @WildcardIndexed + class WithWildCardIndexOnEntity { + + String value; + } + + @Document + @WildcardIndexed(wildcardProjection = "{'_id' : 1, 'value' : 0}") + class WithWildCardIndexHavingProjectionOnEntity { + + String value; + } + + @Document + class WithWildCardIndexOnProperty { + + @WildcardIndexed // + Map value; + + @WildcardIndexed // + @Field("the_field") // + Map renamedField; + + @WildcardIndexed(name = "idx", partialFilter = "{ '$eq' : 1 }", collation = "en_US") // + Map withOptions; + + } + + @Document + class WildcardIndexedProjectionOnNestedPath { + + @WildcardIndexed(wildcardProjection = "{}") String foo; + } + + @Document + class WithWildCardOnEntityOfNested { + + WithWildCardIndexHavingProjectionOnEntity value; + + } + + @Document + class WithHashedIndexAndIndex { + + @Indexed // + @HashIndexed // + String value; + } + + @Document + class WithComposedHashedIndexAndIndex { + + @ComposedHashIndexed(name = "idx-name") String value; + } + + @Document + class WithCollationFromIndexedAnnotation { + + @Indexed(collation = "{'locale': 'en_US', 'strength': 2}", unique = true) // + private String value; + } + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + class WithCollationFromDocumentAnnotation { + + @Indexed(unique = true) // + private String value; + } + + @Document(collation = "en_US") + class WithEvaluatedCollationFromIndexedAnnotation { + + @Indexed(collation = "#{{'locale' : 'de' + '_' + 'AT'}}") // + private String value; + } + + @HashIndexed + @Indexed + @Retention(RetentionPolicy.RUNTIME) + @interface ComposedHashIndexed { + + @AliasFor(annotation = Indexed.class, attribute = "name") + String name() default ""; + } } - private static List prepareMappingContextAndResolveIndexForType(Class type) { + private static List prepareMappingContextAndResolveIndexForType(Class... types) { - MongoMappingContext mappingContext = prepareMappingContext(type); + MongoMappingContext mappingContext = prepareMappingContext(types); MongoPersistentEntityIndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext); - return resolver.resolveIndexForEntity(mappingContext.getPersistentEntity(type)); + return resolver.resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(types[0])); } - private static MongoMappingContext prepareMappingContext(Class type) { + private static MongoMappingContext prepareMappingContext(Class... types) { MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(Collections.singleton(type)); + mappingContext.setInitialEntitySet(new LinkedHashSet<>(Arrays.asList(types))); mappingContext.initialize(); return mappingContext; @@ -1386,9 +1880,9 @@ private static void assertIndexPathAndCollection(String[] expectedPaths, String IndexDefinitionHolder holder) { for (String expectedPath : expectedPaths) { - assertThat(holder.getIndexDefinition().getIndexKeys().containsField(expectedPath), equalTo(true)); + assertThat(holder.getIndexDefinition().getIndexKeys()).containsKey(expectedPath); } - assertThat(holder.getCollection(), equalTo(expectedCollection)); + assertThat(holder.getCollection()).isEqualTo(expectedCollection); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java index 9e88c00c1b..d8f6b9b698 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import org.junit.Before; @@ -24,17 +23,19 @@ import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.MockitoJUnitRunner; + import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; /** * Unit tests for {@link Path}. - * + * * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@RunWith(MockitoJUnitRunner.Silent.class) public class PathUnitTests { @Mock MongoPersistentEntity entityMock; @@ -45,48 +46,50 @@ public void setUp() { when(entityMock.getType()).thenReturn((Class) Object.class); } - /** - * @see DATAMONGO-962 - */ - @Test - public void shouldIdentifyCycleForOwnerOfSameTypeAndMatchingPath() { + @Test // DATAMONGO-962, DATAMONGO-1782 + public void shouldIdentifyCycle() { + + MongoPersistentProperty foo = createPersistentPropertyMock(entityMock, "foo"); + MongoPersistentProperty bar = createPersistentPropertyMock(entityMock, "bar"); + + Path path = Path.of(foo).append(bar).append(bar); - MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo"); - assertThat(new Path(property, "foo.bar").cycles(property, "foo.bar.bar"), is(true)); + assertThat(path.isCycle()).isTrue(); + assertThat(path.toCyclePath()).isEqualTo("bar -> bar"); + assertThat(path.toString()).isEqualTo("foo -> bar -> bar"); } - /** - * @see DATAMONGO-962 - */ - @Test - @SuppressWarnings("rawtypes") - public void shouldAllowMatchingPathForDifferentOwners() { + @Test // DATAMONGO-1782 + public void isCycleShouldReturnFalseWhenNoCyclePresent() { - MongoPersistentProperty existing = createPersistentPropertyMock(entityMock, "foo"); + MongoPersistentProperty foo = createPersistentPropertyMock(entityMock, "foo"); + MongoPersistentProperty bar = createPersistentPropertyMock(entityMock, "bar"); - MongoPersistentEntity entityOfDifferentType = Mockito.mock(MongoPersistentEntity.class); - when(entityOfDifferentType.getType()).thenReturn(String.class); - MongoPersistentProperty toBeVerified = createPersistentPropertyMock(entityOfDifferentType, "foo"); + Path path = Path.of(foo).append(bar); - assertThat(new Path(existing, "foo.bar").cycles(toBeVerified, "foo.bar.bar"), is(false)); + assertThat(path.isCycle()).isFalse(); + assertThat(path.toCyclePath()).isEqualTo(""); + assertThat(path.toString()).isEqualTo("foo -> bar"); } - /** - * @see DATAMONGO-962 - */ - @Test - public void shouldAllowEqaulPropertiesOnDifferentPaths() { + @Test // DATAMONGO-1782 + public void isCycleShouldReturnFalseCycleForNonEqualProperties() { - MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo"); - assertThat(new Path(property, "foo.bar").cycles(property, "foo2.bar.bar"), is(false)); + MongoPersistentProperty foo = createPersistentPropertyMock(entityMock, "foo"); + MongoPersistentProperty bar = createPersistentPropertyMock(entityMock, "bar"); + MongoPersistentProperty bar2 = createPersistentPropertyMock(mock(MongoPersistentEntity.class), "bar"); + + assertThat(Path.of(foo).append(bar).append(bar2).isCycle()).isFalse(); } @SuppressWarnings({ "rawtypes", "unchecked" }) - private MongoPersistentProperty createPersistentPropertyMock(MongoPersistentEntity owner, String fieldname) { + private static MongoPersistentProperty createPersistentPropertyMock(MongoPersistentEntity owner, String fieldname) { MongoPersistentProperty property = Mockito.mock(MongoPersistentProperty.class); + when(property.getOwner()).thenReturn(owner); - when(property.getFieldName()).thenReturn(fieldname); + when(property.getName()).thenReturn(fieldname); + return property; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java new file mode 100644 index 0000000000..4b4693ed75 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoException; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Unit tests for {@link ReactiveMongoPersistentEntityIndexCreator}. + * + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveMongoPersistentEntityIndexCreatorUnitTests { + + private ReactiveIndexOperations indexOperations; + + @Mock ReactiveMongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + + private ArgumentCaptor keysCaptor; + private ArgumentCaptor optionsCaptor; + private ArgumentCaptor collectionCaptor; + + @BeforeEach + @SuppressWarnings("unchecked") + void setUp() { + + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + + indexOperations = new ReactiveMongoTemplate(factory).indexOps("foo"); + + keysCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + optionsCaptor = ArgumentCaptor.forClass(IndexOptions.class); + collectionCaptor = ArgumentCaptor.forClass(String.class); + + when(collection.createIndex(keysCaptor.capture(), optionsCaptor.capture())).thenReturn(Mono.just("OK")); + } + + @Test // DATAMONGO-1928 + void buildsIndexDefinitionUsingFieldName() { + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + verifyNoInteractions(collection); + + publisher.as(StepVerifier::create).verifyComplete(); + + assertThat(keysCaptor.getValue()).isNotNull().containsKey("fieldname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("indexName"); + assertThat(optionsCaptor.getValue().isBackground()).isFalse(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); + } + + @Test // DATAMONGO-1928 + void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { + + when(collection.createIndex(any(org.bson.Document.class), any(IndexOptions.class))) + .thenReturn(Mono.error(new MongoException(6, "HostUnreachable"))); + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + publisher.as(StepVerifier::create).expectError(DataAccessResourceFailureException.class).verify(); + } + + @Test // DATAMONGO-1928 + void createIndexShouldNotConvertUnknownExceptionTypes() { + + when(collection.createIndex(any(org.bson.Document.class), any(IndexOptions.class))) + .thenReturn(Mono.error(new ClassCastException("o_O"))); + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + publisher.as(StepVerifier::create).expectError(ClassCastException.class).verify(); + } + + private static MongoMappingContext prepareMappingContext(Class type) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setInitialEntitySet(Collections.singleton(type)); + mappingContext.initialize(); + + return mappingContext; + } + + private Mono checkForIndexes(MongoMappingContext mappingContext) { + + return new ReactiveMongoPersistentEntityIndexCreator(mappingContext, it -> indexOperations) + .checkForIndexes(mappingContext.getRequiredPersistentEntity(Person.class)); + } + + @Document + static class Person { + + @Indexed(name = "indexName") // + @Field("fieldname") // + String field; + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java index a099c4cb08..e419a75012 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java @@ -6,9 +6,7 @@ @Document public class SampleEntity { - @Id - String id; + @Id String id; - @Indexed - String prop; + @Indexed String prop; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java new file mode 100644 index 0000000000..1d7e5b63b6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java @@ -0,0 +1,90 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +/** + * @author Christoph Strobl + */ +class SearchIndexInfoUnitTests { + + @ParameterizedTest + @ValueSource(strings = { """ + { + "id": "679b7637a580c270015ef6fb", + "name": "vector_index", + "type": "vectorSearch", + "status": "READY", + "queryable": true, + "latestVersion": 0, + "latestDefinition": { + "fields": [ + { + "type": "vector", + "path": "plot_embedding", + "numDimensions": 1536, + "similarity": "euclidean" + } + ] + } + }""", """ + { + id: '648b4ad4d697b73bf9d2e5e1', + name: 'search-index', + status: 'PENDING', + queryable: false, + latestDefinition: { + mappings: { dynamic: false, fields: { text: { type: 'string' } } } + } + }""", """ + { + name: 'search-index-not-yet-created', + definition: { + mappings: { dynamic: false, fields: { text: { type: 'string' } } } + } + }""", """ + { + name: 'vector-index-with-filter', + type: "vectorSearch", + definition: { + fields: [ + { + type: "vector", + path: "plot_embedding", + numDimensions: 1536, + similarity: "euclidean" + }, { + type: "filter", + path: "year" + } + ] + } + }""" }) + void parsesIndexInfo(String indexInfoSource) { + + SearchIndexInfo indexInfo = SearchIndexInfo.parse(indexInfoSource); + + if (indexInfo.getId() != null) { + assertThat(indexInfo.getId()).isInstanceOf(String.class); + } + assertThat(indexInfo.getStatus()).isNotNull(); + assertThat(indexInfo.getIndexDefinition()).isNotNull(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java index 8047f0fe00..aa37b8bced 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,77 +15,80 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; -import org.springframework.data.mongodb.core.IndexOperations; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Language; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; - -import com.mongodb.WriteConcern; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** * @author Christoph Strobl + * @author Mark Paluch */ -public class TextIndexTests extends AbstractIntegrationTests { +@ExtendWith(MongoTemplateExtension.class) +public class TextIndexTests { - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); + @Template(initialEntitySet = TextIndexedDocumentRoot.class) + static MongoTestTemplate template; - private @Autowired MongoTemplate template; private IndexOperations indexOps; - @Before - public void setUp() throws Exception { + @BeforeEach + public void beforeEach() throws Exception { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); this.indexOps = template.indexOps(TextIndexedDocumentRoot.class); + + template.dropDatabase(); + + template.createCollection(TextIndexedDocumentRoot.class, + CollectionOptions.empty().collation(Collation.of("de_AT"))); } - /** - * @see DATAMONGO-937 - */ - @Test + @Test // DATAMONGO-937, DATAMONGO-2316 public void indexInfoShouldHaveBeenCreatedCorrectly() { + IndexResolver indexResolver = IndexResolver.create(template.getConverter().getMappingContext()); + + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(TextIndexedDocumentRoot.class)) { + indexOps.ensureIndex(indexDefinition); + } + List indexInfos = indexOps.getIndexInfo(); - assertThat(indexInfos.size(), is(2)); + assertThat(indexInfos.size()).isEqualTo(2); List fields = indexInfos.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); + assertThat(fields).containsExactly(IndexField.create("_id", Direction.ASC)); IndexInfo textIndexInfo = indexInfos.get(1); List textIndexFields = textIndexInfo.getIndexFields(); - assertThat(textIndexFields.size(), is(4)); - assertThat(textIndexFields, hasItem(IndexField.text("textIndexedPropertyWithDefaultWeight", 1F))); - assertThat(textIndexFields, hasItem(IndexField.text("textIndexedPropertyWithWeight", 5F))); - assertThat(textIndexFields, hasItem(IndexField.text("nestedDocument.textIndexedPropertyInNestedDocument", 1F))); - assertThat(textIndexFields, hasItem(IndexField.create("_ftsx", Direction.ASC))); - assertThat(textIndexInfo.getLanguage(), is("spanish")); + assertThat(textIndexFields).hasSize(4).contains(IndexField.text("textIndexedPropertyWithDefaultWeight", 1F), + IndexField.text("textIndexedPropertyWithWeight", 5F), + IndexField.text("nestedDocument.textIndexedPropertyInNestedDocument", 1F), + IndexField.create("_ftsx", Direction.ASC)); + assertThat(textIndexInfo.getLanguage()).isEqualTo("spanish"); } - @Document(language = "spanish") + @Document(language = "spanish", collation = "de_AT") static class TextIndexedDocumentRoot { @TextIndexed String textIndexedPropertyWithDefaultWeight; @TextIndexed(weight = 5) String textIndexedPropertyWithWeight; - TextIndexedDocumentWihtLanguageOverride nestedDocument; + TextIndexedDocumentWithLanguageOverride nestedDocument; } - static class TextIndexedDocumentWihtLanguageOverride { + static class TextIndexedDocumentWithLanguageOverride { @Language String lang; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java new file mode 100644 index 0000000000..dcd447f81a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java @@ -0,0 +1,223 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.*; +import static org.awaitility.Awaitility.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.index.VectorIndex.SimilarityFunction; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.AtlasContainer; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.lang.Nullable; + +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import com.mongodb.ConnectionString; +import com.mongodb.client.AggregateIterable; + +/** + * Integration tests for vector index creation. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@Testcontainers(disabledWithoutDocker = true) +class VectorIndexIntegrationTests { + + private static final @Container AtlasContainer atlasLocal = AtlasContainer.bestMatch(); + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + cfg.configureDatabaseFactory(ctx -> { + ctx.client(MongoTestUtils.client(new ConnectionString(atlasLocal.getConnectionString()))); + }); + cfg.configureMappingContext(ctx -> { + ctx.initialEntitySet(Movie.class); + }); + }); + + SearchIndexOperations indexOps; + + @BeforeEach + void init() { + template.createCollection(Movie.class); + indexOps = template.searchIndexOps(Movie.class); + } + + @AfterEach + void cleanup() { + + template.searchIndexOps(Movie.class).dropAllIndexes(); + template.dropCollection(Movie.class); + } + + @ParameterizedTest // GH-4706 + @ValueSource(strings = { "euclidean", "cosine", "dotProduct" }) + void createsSimpleVectorIndex(String similarityFunction) { + + VectorIndex idx = new VectorIndex("vector_index").addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity(similarityFunction)); + + indexOps.createIndex(idx); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[0].path", "plot_embedding") // + .containsEntry("latestDefinition.fields.[0].numDimensions", 1536) // + .containsEntry("latestDefinition.fields.[0].similarity", similarityFunction); // + }); + } + + @Test // GH-4706 + void dropIndex() { + + VectorIndex idx = new VectorIndex("vector_index").addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + template.awaitIndexCreation(Movie.class, idx.getName()); + + indexOps.dropIndex(idx.getName()); + + assertThat(readRawIndexInfo(idx.getName())).isNull(); + } + + @Test // GH-4706 + void statusChanges() throws InterruptedException { + + String indexName = "vector_index"; + assertThat(indexOps.status(indexName)).isEqualTo(SearchIndexStatus.DOES_NOT_EXIST); + + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + assertThat(indexOps.status(indexName)).isIn(SearchIndexStatus.PENDING, SearchIndexStatus.BUILDING, + SearchIndexStatus.READY); + } + + @Test // GH-4706 + void exists() throws InterruptedException { + + String indexName = "vector_index"; + assertThat(indexOps.exists(indexName)).isFalse(); + + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + assertThat(indexOps.exists(indexName)).isTrue(); + } + + @Test // GH-4706 + void updatesVectorIndex() throws InterruptedException { + + String indexName = "vector_index"; + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[0].path", "plot_embedding") // + .containsEntry("latestDefinition.fields.[0].numDimensions", 1536) // + .containsEntry("latestDefinition.fields.[0].similarity", "cosine"); // + }); + + VectorIndex updatedIdx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity(SimilarityFunction.DOT_PRODUCT)); + + // updating vector index does currently not work, one needs to delete and recreat + assertThatRuntimeException().isThrownBy(() -> indexOps.updateIndex(updatedIdx)); + } + + @Test // GH-4706 + void createsVectorIndexWithFilters() throws InterruptedException { + + VectorIndex idx = new VectorIndex("vector_index") + .addVector("plotEmbedding", builder -> builder.dimensions(1536).cosine()).addFilter("description") + .addFilter("year"); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[1].type", "filter") // + .containsEntry("latestDefinition.fields.[1].path", "plot") // + .containsEntry("latestDefinition.fields.[2].type", "filter") // + .containsEntry("latestDefinition.fields.[2].path", "year"); // + }); + } + + @Nullable + private Document readRawIndexInfo(String name) { + + AggregateIterable indexes = template.execute(Movie.class, collection -> { + return collection.aggregate(List.of(new Document("$listSearchIndexes", new Document("name", name)))); + }); + + return indexes.first(); + } + + static class Movie { + + @Id String id; + String title; + + @Field("plot") String description; + int year; + + @Field("plot_embedding") Double[] plotEmbedding; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java index a8d50afe87..d9d59c081b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java index ef47b638aa..e50bffc1fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java index eef85ea23a..ed7ba975ff 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; /** @@ -21,8 +20,7 @@ */ public class Address implements Comparable
                    { - @SuppressWarnings("unused") - private String id; + @SuppressWarnings("unused") private String id; private String[] lines; private String city; private String provinceOrState; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java index 8d2aec0655..3f3a326873 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import com.querydsl.core.annotations.QuerySupertype; /** * {@link QuerySupertype} is necessary for Querydsl 2.2.0-beta4 to compile the query classes directly. Can be removed as - * soon as {@link https://bugs.launchpad.net/querydsl/+bug/776219} is fixed. - * - * @see https://bugs.launchpad.net/querydsl/+bug/776219 + * soon as https://bugs.launchpad.net/querydsl/+bug/776219 + * is fixed. + * + * @see https://bugs.launchpad.net/querydsl/+bug/776219 * @author Jon Brisbin * @author Oliver Gierke */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java index e5f8c87252..9a39042349 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,113 +15,123 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.context.ApplicationContext; import org.springframework.core.annotation.AliasFor; -import org.springframework.data.mapping.model.MappingException; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.util.TypeInformation; +import org.springframework.mock.env.MockEnvironment; /** * Unit tests for {@link BasicMongoPersistentEntity}. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BasicMongoPersistentEntityUnitTests { @Mock ApplicationContext context; @Mock MongoPersistentProperty propertyMock; @Test - public void subclassInheritsAtDocumentAnnotation() { + void subclassInheritsAtDocumentAnnotation() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(Person.class)); - assertThat(entity.getCollection(), is("contacts")); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(Person.class)); + assertThat(entity.getCollection()).isEqualTo("contacts"); } @Test - public void evaluatesSpELExpression() { + void evaluatesSpELExpression() { - MongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(Company.class)); - assertThat(entity.getCollection(), is("35")); + MongoPersistentEntity entity = new BasicMongoPersistentEntity<>(TypeInformation.of(Company.class)); + assertThat(entity.getCollection()).isEqualTo("35"); } - /** - * @see DATAMONGO-65, DATAMONGO-1108 - */ - @Test - public void collectionAllowsReferencingSpringBean() { + @Test // DATAMONGO-65, DATAMONGO-1108 + void collectionAllowsReferencingSpringBean() { CollectionProvider provider = new CollectionProvider(); provider.collectionName = "reference"; when(context.getBean("myBean")).thenReturn(provider); - when(context.containsBean("myBean")).thenReturn(true); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DynamicallyMapped.class)); - entity.setApplicationContext(context); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DynamicallyMapped.class)); + entity.setEvaluationContextProvider(new ExtensionAwareEvaluationContextProvider(context)); - assertThat(entity.getCollection(), is("reference")); + assertThat(entity.getCollection()).isEqualTo("reference"); provider.collectionName = "otherReference"; - assertThat(entity.getCollection(), is("otherReference")); + assertThat(entity.getCollection()).isEqualTo("otherReference"); } - /** - * @see DATAMONGO-937 - */ - @Test - public void shouldDetectLanguageCorrectly() { + @Test // GH-2764 + void collectionAllowsReferencingProperties() { + + MockEnvironment environment = new MockEnvironment(); + environment.setProperty("collectionName", "reference"); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DynamicallyMappedUsingPropertyPlaceholder.class)); + entity.setEnvironment(environment); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithLanguage.class)); - assertThat(entity.getLanguage(), is("spanish")); + assertThat(entity.getCollection()).isEqualTo("reference_cat"); } - /** - * @see DATAMONGO-1053 - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(expected = MappingException.class) - public void verifyShouldThrowExceptionForInvalidTypeOfExplicitLanguageProperty() { + @Test // DATAMONGO-937 + void shouldDetectLanguageCorrectly() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithLanguage.class)); - when(propertyMock.isExplicitLanguageProperty()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Number.class); + assertThat(entity.getLanguage()).isEqualTo("spanish"); + } + + @Test // DATAMONGO-1053 + void verifyShouldThrowExceptionForInvalidTypeOfExplicitLanguageProperty() { + + doReturn(true).when(propertyMock).isExplicitLanguageProperty(); + doReturn(Number.class).when(propertyMock).getActualType(); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } - /** - * @see DATAMONGO-1053 - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void verifyShouldPassForStringAsExplicitLanguageProperty() { + @Test // DATAMONGO-1053 + void verifyShouldPassForStringAsExplicitLanguageProperty() { + + doReturn(true).when(propertyMock).isExplicitLanguageProperty(); + doReturn(String.class).when(propertyMock).getActualType(); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); - when(propertyMock.isExplicitLanguageProperty()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) String.class); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); entity.verify(); @@ -130,17 +140,12 @@ public void verifyShouldPassForStringAsExplicitLanguageProperty() { verify(propertyMock, times(1)).getActualType(); } - /** - * @see DATAMONGO-1053 - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void verifyShouldIgnoreNonExplicitLanguageProperty() { + @Test // DATAMONGO-1053 + void verifyShouldIgnoreNonExplicitLanguageProperty() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); when(propertyMock.isExplicitLanguageProperty()).thenReturn(false); - when(propertyMock.getActualType()).thenReturn((Class) Number.class); entity.addPersistentProperty(propertyMock); entity.verify(); @@ -149,124 +154,195 @@ public void verifyShouldIgnoreNonExplicitLanguageProperty() { verify(propertyMock, never()).getActualType(); } - /** - * @see DATAMONGO-1157 - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(expected = MappingException.class) - public void verifyShouldThrowErrorForLazyDBRefOnFinalClass() { + @Test // DATAMONGO-1157 + void verifyShouldThrowErrorForLazyDBRefOnFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Class.class); + + doReturn(Class.class).when(propertyMock).getActualType(); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } - /** - * @see DATAMONGO-1157 - */ - @Test(expected = MappingException.class) - public void verifyShouldThrowErrorForLazyDBRefArray() { + @Test // DATAMONGO-1157 + void verifyShouldThrowErrorForLazyDBRefArray() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.isArray()).thenReturn(true); + + doReturn(true).when(propertyMock).isDbReference(); + doReturn(true).when(propertyMock).isArray(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } - /** - * @see DATAMONGO-1157 - */ - @Test - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() { + @Test // DATAMONGO-1157 + void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Object.class); - entity.addPersistentProperty(propertyMock); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(Object.class).when(propertyMock).getActualType(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); + entity.addPersistentProperty(propertyMock); entity.verify(); verify(propertyMock, times(1)).isDbReference(); } - /** - * @see DATAMONGO-1157 - */ - @Test - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void verifyShouldPassForNonLazyDBRefOnFinalClass() { + @Test // DATAMONGO-1157 + void verifyShouldPassForNonLazyDBRefOnFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(false); - when(propertyMock.getActualType()).thenReturn((Class) Class.class); - entity.addPersistentProperty(propertyMock); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(false).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); + entity.addPersistentProperty(propertyMock); entity.verify(); verify(dbRefMock, times(1)).lazy(); } - /** - * @see DATAMONGO-1291 - */ - @Test - public void metaInformationShouldBeReadCorrectlyFromInheritedDocumentAnnotation() { + @Test // DATAMONGO-1291 + void metaInformationShouldBeReadCorrectlyFromInheritedDocumentAnnotation() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithCustomAnnotation.class)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithCustomAnnotation.class)); - assertThat(entity.getCollection(), is("collection-1")); + assertThat(entity.getCollection()).isEqualTo("collection-1"); } - /** - * @see DATAMONGO-1373 - */ - @Test - public void metaInformationShouldBeReadCorrectlyFromComposedDocumentAnnotation() { + @Test // DATAMONGO-1373 + void metaInformationShouldBeReadCorrectlyFromComposedDocumentAnnotation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithComposedAnnotation.class)); + + assertThat(entity.getCollection()).isEqualTo("custom-collection"); + } + + @Test // DATAMONGO-1874 + void usesEvaluationContextExtensionInDynamicDocumentName() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(MappedWithExtension.class)); + entity.setEvaluationContextProvider( + new ExtensionAwareEvaluationContextProvider(Collections.singletonList(new SampleExtension()))); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + } + + @Test // GH-4634 + @SetSystemProperty(key = "mongo.entity.collection", value = "collectionName") + void readsCollectionNameFromSystemProperty() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(MappedWithExtensionPropertyPlaceholderStyle.class)); + entity.setEnvironment(new StandardEnvironment()); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + } - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithComposedAnnotation.class)); + @Test // DATAMONGO-1854 + void readsSimpleCollation() { - assertThat(entity.getCollection(), is("custom-collection")); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithSimpleCollation.class)); + + assertThat(entity.getCollation()).isEqualTo(org.springframework.data.mongodb.core.query.Collation.of("en_US")); + } + + @Test // DATAMONGO-1854 + void readsDocumentCollation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithDocumentCollation.class)); + + assertThat(entity.getCollation()).isEqualTo(org.springframework.data.mongodb.core.query.Collation.of("en_US")); + } + + @Test // DATAMONGO-2565 + void usesCorrectExpressionsForCollectionAndCollation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithCollectionAndCollationFromSpEL.class)); + entity.setEvaluationContextProvider( + new ExtensionAwareEvaluationContextProvider(Collections.singletonList(new SampleExtension()))); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + assertThat(entity.getCollation()).isEqualTo(Collation.of("en_US")); + } + + @Test // DATAMONGO-2341 + void detectsShardedEntityCorrectly() { + + assertThat(entityOf(WithDefaultShardKey.class).isSharded()).isTrue(); + assertThat(entityOf(Contact.class).isSharded()).isFalse(); + } + + @Test // DATAMONGO-2341 + void readsDefaultShardKey() { + + assertThat(entityOf(WithDefaultShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("_id", 1)); + } + + @Test // DATAMONGO-2341 + void readsSingleShardKey() { + + assertThat(entityOf(WithSingleShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("country", 1)); + } + + @Test // DATAMONGO-2341 + void readsMultiShardKey() { + + assertThat(entityOf(WithMultiShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("country", 1).append("userid", 1)); } - @Document(collection = "contacts") + static BasicMongoPersistentEntity entityOf(Class type) { + return new BasicMongoPersistentEntity<>(TypeInformation.of(type)); + } + + @Document("contacts") class Contact {} class Person extends Contact {} - @Document(collection = "#{35}") + @Document("#{35}") class Company {} - @Document(collection = "#{myBean.collectionName}") + @Document("#{@myBean.collectionName}") class DynamicallyMapped {} + @Document("${collectionName}_cat") + class DynamicallyMappedUsingPropertyPlaceholder {} + class CollectionProvider { String collectionName; @@ -278,17 +354,17 @@ public String getCollectionName() { @Document(language = "spanish") static class DocumentWithLanguage {} - static class AnyDocument {} + private static class AnyDocument {} @CustomDocumentAnnotation - static class DocumentWithCustomAnnotation {} + private static class DocumentWithCustomAnnotation {} @ComposedDocumentAnnotation - static class DocumentWithComposedAnnotation {} + private static class DocumentWithComposedAnnotation {} @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) - @Document(collection = "collection-1") + @Document("collection-1") static @interface CustomDocumentAnnotation { } @@ -300,4 +376,52 @@ static class DocumentWithComposedAnnotation {} @AliasFor(annotation = Document.class, attribute = "collection") String name() default "custom-collection"; } + + // DATAMONGO-1874 + @Document("#{myProperty}") + class MappedWithExtension {} + + @Document("${mongo.entity.collection}") + class MappedWithExtensionPropertyPlaceholderStyle {} + + @Document("${value.from.file}") + class MappedWithValue {} + + @Document(collation = "#{myCollation}") + class WithCollationFromSpEL {} + + @Document(collection = "#{myProperty}", collation = "#{myCollation}") + class WithCollectionAndCollationFromSpEL {} + + @Document(collation = "en_US") + class WithSimpleCollation {} + + @Document(collation = "{ 'locale' : 'en_US' }") + class WithDocumentCollation {} + + @Sharded + private class WithDefaultShardKey {} + + @Sharded("country") + private class WithSingleShardKey {} + + @Sharded({ "country", "userid" }) + private class WithMultiShardKey {} + + static class SampleExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "sampleExtension"; + } + + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("myProperty", "collectionName"); + properties.put("myCollation", "en_US"); + return properties; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 5f325e7aab..116505143e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,28 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; import java.util.Locale; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.jmolecules.ddd.annotation.Identity; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; import org.springframework.data.util.ClassTypeInformation; @@ -41,192 +44,231 @@ /** * Unit test for {@link BasicMongoPersistentProperty}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ public class BasicMongoPersistentPropertyUnitTests { - MongoPersistentEntity entity; - - @Rule public ExpectedException exception = ExpectedException.none(); + private MongoPersistentEntity entity; - @Before - public void setup() { - entity = new BasicMongoPersistentEntity(ClassTypeInformation.from(Person.class)); + @BeforeEach + void setup() { + entity = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(Person.class)); } @Test - public void usesAnnotatedFieldName() { + void usesAnnotatedFieldName() { Field field = ReflectionUtils.findField(Person.class, "firstname"); - assertThat(getPropertyFor(field).getFieldName(), is("foo")); + assertThat(getPropertyFor(field).getFieldName()).isEqualTo("foo"); } @Test - public void returns_IdForIdProperty() { + void returns_IdForIdProperty() { Field field = ReflectionUtils.findField(Person.class, "id"); MongoPersistentProperty property = getPropertyFor(field); - assertThat(property.isIdProperty(), is(true)); - assertThat(property.getFieldName(), is("_id")); + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.getFieldName()).isEqualTo("_id"); } @Test - public void returnsPropertyNameForUnannotatedProperties() { + void returnsPropertyNameForUnannotatedProperties() { Field field = ReflectionUtils.findField(Person.class, "lastname"); - assertThat(getPropertyFor(field).getFieldName(), is("lastname")); + assertThat(getPropertyFor(field).getFieldName()).isEqualTo("lastname"); } @Test - public void preventsNegativeOrder() { + void preventsNegativeOrder() { getPropertyFor(ReflectionUtils.findField(Person.class, "ssn")); } - /** - * @see DATAMONGO-553 - */ - @Test - public void usesPropertyAccessForThrowableCause() { + @Test // DATAMONGO-553 + void usesPropertyAccessForThrowableCause() { - MongoPersistentProperty property = getPropertyFor(ReflectionUtils.findField(Throwable.class, "cause")); - assertThat(property.usePropertyAccess(), is(true)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + ClassTypeInformation.from(Throwable.class)); + MongoPersistentProperty property = getPropertyFor(entity, "cause"); + + assertThat(property.usePropertyAccess()).isTrue(); } - /** - * @see DATAMONGO-607 - */ - @Test - public void usesCustomFieldNamingStrategyByDefault() throws Exception { + @Test // DATAMONGO-607 + void usesCustomFieldNamingStrategyByDefault() throws Exception { + ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); - MongoPersistentProperty property = new BasicMongoPersistentProperty(field, null, entity, new SimpleTypeHolder(), - UppercaseFieldNamingStrategy.INSTANCE); - assertThat(property.getFieldName(), is("LASTNAME")); + MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity, + SimpleTypeHolder.DEFAULT, UppercaseFieldNamingStrategy.INSTANCE); + assertThat(property.getFieldName()).isEqualTo("LASTNAME"); field = ReflectionUtils.findField(Person.class, "firstname"); - property = new BasicMongoPersistentProperty(field, null, entity, new SimpleTypeHolder(), + property = new BasicMongoPersistentProperty(Property.of(type, field), entity, SimpleTypeHolder.DEFAULT, UppercaseFieldNamingStrategy.INSTANCE); - assertThat(property.getFieldName(), is("foo")); + assertThat(property.getFieldName()).isEqualTo("foo"); } - /** - * @see DATAMONGO-607 - */ - @Test - public void rejectsInvalidValueReturnedByFieldNamingStrategy() { + @Test // DATAMONGO-607 + void rejectsInvalidValueReturnedByFieldNamingStrategy() { + ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); - MongoPersistentProperty property = new BasicMongoPersistentProperty(field, null, entity, new SimpleTypeHolder(), - InvalidFieldNamingStrategy.INSTANCE); - exception.expect(MappingException.class); - exception.expectMessage(InvalidFieldNamingStrategy.class.getName()); - exception.expectMessage(property.toString()); + MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity, + SimpleTypeHolder.DEFAULT, InvalidFieldNamingStrategy.INSTANCE); - property.getFieldName(); + assertThatExceptionOfType(MappingException.class).isThrownBy(property::getFieldName) + .withMessageContaining(InvalidFieldNamingStrategy.class.getName()).withMessageContaining(property.toString()); } - /** - * @see DATAMONGO-937 - */ - @Test - public void shouldDetectAnnotatedLanguagePropertyCorrectly() { + @Test // DATAMONGO-937 + void shouldDetectAnnotatedLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithLanguageProperty.class, "lang"); - assertThat(property.isLanguageProperty(), is(true)); + assertThat(property.isLanguageProperty()).isTrue(); } - /** - * @see DATAMONGO-937 - */ - @Test - public void shouldDetectIplicitLanguagePropertyCorrectly() { + @Test // DATAMONGO-937 + void shouldDetectImplicitLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithImplicitLanguageProperty.class, "language"); - assertThat(property.isLanguageProperty(), is(true)); + assertThat(property.isLanguageProperty()).isTrue(); } - /** - * @see DATAMONGO-976 - */ - @Test - public void shouldDetectTextScorePropertyCorrectly() { + @Test // DATAMONGO-976 + void shouldDetectTextScorePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); - assertThat(property.isTextScoreProperty(), is(true)); + assertThat(property.isTextScoreProperty()).isTrue(); } - /** - * @see DATAMONGO-976 - */ - @Test - public void shouldDetectTextScoreAsReadOnlyProperty() { + @Test // DATAMONGO-976 + void shouldDetectTextScoreAsReadOnlyProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); - assertThat(property.isWritable(), is(false)); + assertThat(property.isWritable()).isFalse(); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { + @Test // DATAMONGO-1050 + void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdProperty.class, "id"); - assertThat(property.isIdProperty(), is(false)); + assertThat(property.isIdProperty()).isFalse(); } - /** - * @see DATAMONGO-1050 - */ - @Test - public void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { + @Test // DATAMONGO-1050 + void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdPropertyHavingIdAnnotation.class, "id"); - assertThat(property.isIdProperty(), is(true)); + assertThat(property.isIdProperty()).isTrue(); } - /** - * @see DATAMONGO-1373 - */ - @Test - public void shouldConsiderComposedAnnotationsForIdField() { + @Test // DATAMONGO-1373 + void shouldConsiderComposedAnnotationsForIdField() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myId"); - assertThat(property.isIdProperty(), is(true)); - assertThat(property.getFieldName(), is("_id")); + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.getFieldName()).isEqualTo("_id"); } - /** - * @see DATAMONGO-1373 - */ - @Test - public void shouldConsiderComposedAnnotationsForFields() { + @Test // DATAMONGO-1373 + void shouldConsiderComposedAnnotationsForFields() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myField"); - assertThat(property.getFieldName(), is("myField")); + assertThat(property.getFieldName()).isEqualTo("myField"); + } + + @Test // DATAMONGO-1737 + void honorsFieldOrderWhenIteratingOverProperties() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getPersistentEntity(Sample.class); + + List properties = new ArrayList<>(); + + entity.doWithProperties((MongoPersistentProperty property) -> properties.add(property.getName())); + + assertThat(properties).containsExactly("first", "second", "third"); + } + + @Test // GH-3407 + void shouldDetectWritability() { + + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithDefaults").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithField").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeNonNull").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeAlways").writeNullValues()).isTrue(); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldReturnActualTypeForNonIdProperties() { + + MongoPersistentProperty property = getPropertyFor(Person.class, "lastname"); + assertThat(property.getFieldType()).isEqualTo(String.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithCommonsId() { + + MongoPersistentProperty property = getPropertyFor(Person.class, "id"); + assertThat(property.getFieldType()).isEqualTo(ObjectId.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeImplicitForPropertiesAnnotatedWithMongoId() { + + MongoPersistentProperty property = getPropertyFor(WithStringMongoId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(String.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithMongoIdAndTargetTypeObjectId() { + + MongoPersistentProperty property = getPropertyFor(WithStringMongoIdMappedToObjectId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(ObjectId.class); + } + + @Test // DATAMONGO-2460 + void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTypeImplicit() { + + MongoPersistentProperty property = getPropertyFor(WithComplexId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(Document.class); + } + + @Test // GH-3803 + void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { + + MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); + + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.isExplicitIdProperty()).isTrue(); } private MongoPersistentProperty getPropertyFor(Field field) { return getPropertyFor(entity, field); } - private MongoPersistentProperty getPropertyFor(Class type, String fieldname) { - return getPropertyFor(new BasicMongoPersistentEntity(ClassTypeInformation.from(type)), fieldname); + private static MongoPersistentProperty getPropertyFor(Class type, String fieldname) { + return getPropertyFor(new BasicMongoPersistentEntity<>(ClassTypeInformation.from(type)), fieldname); } - private MongoPersistentProperty getPropertyFor(MongoPersistentEntity persistentEntity, String fieldname) { - return getPropertyFor(persistentEntity, ReflectionUtils.findField(persistentEntity.getType(), fieldname)); + private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity entity, String fieldname) { + return getPropertyFor(entity, ReflectionUtils.findField(entity.getType(), fieldname)); } - private MongoPersistentProperty getPropertyFor(MongoPersistentEntity persistentEntity, Field field) { - return new BasicMongoPersistentProperty(field, null, persistentEntity, new SimpleTypeHolder(), + private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity entity, Field field) { + BasicMongoPersistentProperty property = new BasicMongoPersistentProperty( + Property.of(entity.getTypeInformation(), field), entity, SimpleTypeHolder.DEFAULT, PropertyNameFieldNamingStrategy.INSTANCE); + + entity.addPersistentProperty(property); + return property; } class Person { @@ -239,6 +281,13 @@ class Person { @org.springframework.data.mongodb.core.mapping.Field(order = -20) String ssn; } + class Sample { + + @org.springframework.data.mongodb.core.mapping.Field(order = 2) String second; + @org.springframework.data.mongodb.core.mapping.Field(order = 3) String third; + @org.springframework.data.mongodb.core.mapping.Field(order = 1) String first; + } + enum UppercaseFieldNamingStrategy implements FieldNamingStrategy { INSTANCE; @@ -271,6 +320,18 @@ static class DocumentWithTextScoreProperty { @TextScore Float score; } + static class WithFieldWrite { + + int fieldWithDefaults; + @org.springframework.data.mongodb.core.mapping.Field int fieldWithField; + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; + + } + static class DocumentWithExplicitlyRenamedIdProperty { @org.springframework.data.mongodb.core.mapping.Field("id") String id; @@ -278,12 +339,14 @@ static class DocumentWithExplicitlyRenamedIdProperty { static class DocumentWithExplicitlyRenamedIdPropertyHavingIdAnnotation { - @Id @org.springframework.data.mongodb.core.mapping.Field("id") String id; + @Id + @org.springframework.data.mongodb.core.mapping.Field("id") String id; } static class DocumentWithComposedAnnotations { - @ComposedIdAnnotation @ComposedFieldAnnotation String myId; + @ComposedIdAnnotation + @ComposedFieldAnnotation String myId; @ComposedFieldAnnotation(name = "myField") String myField; } @@ -301,4 +364,29 @@ static class DocumentWithComposedAnnotations { @Id static @interface ComposedIdAnnotation { } + + static class WithStringMongoId { + + @MongoId String id; + } + + static class WithStringMongoIdMappedToObjectId { + + @MongoId(FieldType.OBJECT_ID) String id; + } + + static class ComplexId { + + String value; + } + + static class WithComplexId { + + @Id + @org.springframework.data.mongodb.core.mapping.Field ComplexId id; + } + + static class WithJMoleculesIdentity { + @Identity ObjectId identifier; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java index 5445840ad1..44727cd81a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,13 +21,11 @@ /** * @author Jon Brisbin */ -@Document(collection = "foobar") +@Document("foobar") public class CustomCollectionWithIndex { - @Id - private String id; - @Indexed - private String name; + @Id private String id; + @Indexed private String name; public CustomCollectionWithIndex(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java index 20a4edd8c6..1eb2628c75 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,10 +24,8 @@ @Document public class DetectedCollectionWithIndex { - @Id - private String id; - @Indexed - private String name; + @Id private String id; + @Indexed private String name; public DetectedCollectionWithIndex(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java index ab0de594f1..8259e3ed95 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -26,10 +25,8 @@ @Document public class GeneratedId { - @Id - private ObjectId id; - @SuppressWarnings("unused") - private String name; + @Id private ObjectId id; + @SuppressWarnings("unused") private String name; public GeneratedId(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java index 29a2cadc95..7933dbb70f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,41 +13,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - /** * Unit tests for testing the mapping works with generic types. - * + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class GenericMappingTests { +@ExtendWith(MockitoExtension.class) +class GenericMappingTests { - MongoMappingContext context; - MongoConverter converter; + private MongoMappingContext context; + private MongoConverter converter; @Mock DbRefResolver resolver; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { context = new MongoMappingContext(); context.setInitialEntitySet(Collections.singleton(StringWrapper.class)); @@ -57,36 +54,36 @@ public void setUp() throws Exception { } @Test - public void writesGenericTypeCorrectly() { + void writesGenericTypeCorrectly() { StringWrapper wrapper = new StringWrapper(); - wrapper.container = new Container(); - wrapper.container.content = "Foo!"; + wrapper.container = new Container<>(); + wrapper.container.content = "Foo"; - DBObject dbObject = new BasicDBObject(); - converter.write(wrapper, dbObject); + Document document = new Document(); + converter.write(wrapper, document); - Object container = dbObject.get("container"); - assertThat(container, is(notNullValue())); - assertTrue(container instanceof DBObject); + Object container = document.get("container"); + assertThat(container).isNotNull(); + assertThat(container instanceof Document).isTrue(); - Object content = ((DBObject) container).get("content"); - assertTrue(content instanceof String); - assertThat((String) content, is("Foo!")); + Object content = ((Document) container).get("content"); + assertThat(content instanceof String).isTrue(); + assertThat((String) content).isEqualTo("Foo"); } @Test - public void readsGenericTypeCorrectly() { + void readsGenericTypeCorrectly() { - DBObject content = new BasicDBObject("content", "Foo!"); - BasicDBObject container = new BasicDBObject("container", content); + Document content = new Document("content", "Foo"); + Document container = new Document("container", content); StringWrapper result = converter.read(StringWrapper.class, container); - assertThat(result.container, is(notNullValue())); - assertThat(result.container.content, is("Foo!")); + assertThat(result.container).isNotNull(); + assertThat(result.container.content).isEqualTo("Foo"); } - static class StringWrapper extends Wrapper { + private static class StringWrapper extends Wrapper { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java index 3c49752f6d..0f58e6ace5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java @@ -1,50 +1,64 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapping; - -import org.springframework.context.annotation.Bean; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; -import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener; - -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - -public class GeoIndexedAppConfig extends AbstractMongoConfiguration { - - public static String GEO_DB = "database"; - public static String GEO_COLLECTION = "geolocation"; - - @Override - public String getDatabaseName() { - return GEO_DB; - } - - @Override - @Bean - public Mongo mongo() throws Exception { - return new MongoClient("127.0.0.1"); - } - - @Override - public String getMappingBasePackage() { - return "org.springframework.data.mongodb.core.core.mapping"; - } - - @Bean - public LoggingEventListener mappingEventsListener() { - return new LoggingEventListener(); - } -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.springframework.context.annotation.Bean; +import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.MongoClient; + +public class GeoIndexedAppConfig extends MongoClientClosingTestConfiguration { + + public static String GEO_DB = "database"; + public static String GEO_COLLECTION = "geolocation"; + + @Override + public String getDatabaseName() { + return GEO_DB; + } + + @Override + @Bean + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Override + protected Collection getMappingBasePackages() { + return Collections.singleton("org.springframework.data.mongodb.core.core.mapping"); + } + + @Bean + public LoggingEventListener mappingEventsListener() { + return new LoggingEventListener(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java index 1b6ed4ff30..1e1978853f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,12 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.List; +import org.bson.Document; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -29,21 +30,21 @@ import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; /** * @author Jon Brisbin * @author Oliver Gierke + * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration(classes = GeoIndexedAppConfig.class) public class GeoIndexedTests { @@ -54,35 +55,41 @@ public class GeoIndexedTests { @Autowired MongoMappingContext mappingContext; @Before - public void setUp() throws Exception { + public void setUp() { cleanDb(); } @After - public void cleanUp() throws Exception { + public void cleanUp() { cleanDb(); } - private void cleanDb() throws UnknownHostException { + private void cleanDb() { - Mongo mongo = new MongoClient(); - DB db = mongo.getDB(GeoIndexedAppConfig.GEO_DB); + try (MongoClient mongo = MongoTestUtils.client()) { - for (String coll : collectionsToDrop) { - db.getCollection(coll).drop(); + MongoDatabase db = mongo.getDatabase(GeoIndexedAppConfig.GEO_DB); + + for (String coll : collectionsToDrop) { + db.getCollection(coll).drop(); + } } } @Test public void testGeoLocation() { + GeoLocation geo = new GeoLocation(new double[] { 40.714346, -74.005966 }); template.insert(geo); boolean hasIndex = template.execute("geolocation", new CollectionCallback() { - public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { - List indexes = collection.getIndexInfo(); - for (DBObject dbo : indexes) { - if ("location".equals(dbo.get("name"))) { + public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + List indexes = new ArrayList(); + collection.listIndexes(Document.class).into(indexes); + + for (Document document : indexes) { + if ("location".equals(document.get("name"))) { return true; } } @@ -90,6 +97,6 @@ public Boolean doInCollection(DBCollection collection) throws MongoException, Da } }); - assertTrue(hasIndex); + assertThat(hasIndex).isTrue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java index 59552a97c5..e1032e6d84 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,24 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin */ -@Document(collection = "geolocation") +@Document("geolocation") public class GeoLocation { - @Id - private ObjectId id; - @GeoSpatialIndexed(collection = "geolocation") - private double[] location; + @Id private ObjectId id; + @GeoSpatialIndexed private double[] location; public GeoLocation(double[] location) { this.location = location; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java index b04bd413a1..d9626343aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin */ -@Document(collection = "places") +@Document("places") public class Location { private ObjectId id; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java index b101ae6ba1..eee407701c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.core.query.Update.*; @@ -28,40 +27,63 @@ import java.util.List; import java.util.Map; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.dao.DataAccessException; import org.springframework.dao.DuplicateKeyException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.MongoCollectionUtils; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; /** * @author Jon Brisbin * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch */ -public class MappingTests extends AbstractIntegrationTests { +@ExtendWith(MongoTemplateExtension.class) +public class MappingTests { - @Autowired MongoOperations template; + static final String DB_NAME = "mapping-tests"; + + static @Client MongoClient client; + + @Template(database = DB_NAME, + initialEntitySet = { PersonWithDbRef.class, GeoLocation.class, PersonPojoStringId.class, Account.class, + DetectedCollectionWithIndex.class, Item.class, Container.class, Person.class, PersonCustomCollection1.class, + GeneratedId.class, PersonWithObjectId.class, PersonCustomIdName.class, PersonMapProperty.class }) // + static MongoTestTemplate template; + + @AfterEach + void afterEach() { + template.flush(); + } @Test public void testGeneratedId() { GeneratedId genId = new GeneratedId("test"); template.insert(genId); - assertNotNull(genId.getId()); + assertThat(genId.getId()).isNotNull(); } @Test @@ -69,12 +91,12 @@ public void testPersonPojo() throws Exception { PersonWithObjectId p = new PersonWithObjectId(12345, "Person", "Pojo"); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(12345)), PersonWithObjectId.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getSsn(), is(12345)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getSsn()).isEqualTo(12345); } @Test @@ -85,24 +107,24 @@ public void testPersonWithCustomIdName() { List result = template.find(new Query(Criteria.where("lastName").is(p.getLastName())), PersonCustomIdName.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getFirstName(), is("Custom Id")); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getFirstName()).isEqualTo("Custom Id"); PersonCustomIdName p2 = new PersonCustomIdName(654321, "Custom Id", "LastName"); template.insert(p2); List result2 = template.find(new Query(Criteria.where("lastName").is("LastName")), PersonCustomIdName.class); - assertThat(result2.size(), is(1)); - assertNotNull(result2.get(0).getLastName()); - assertThat(result2.get(0).getLastName(), is("LastName")); + assertThat(result2.size()).isEqualTo(1); + assertThat(result2.get(0).getLastName()).isNotNull(); + assertThat(result2.get(0).getLastName()).isEqualTo("LastName"); // Test "in" query List result3 = template.find(new Query(Criteria.where("lastName").in("LastName")), PersonCustomIdName.class); - assertThat(result3.size(), is(1)); - assertNotNull(result3.get(0).getLastName()); - assertThat(result3.get(0).getLastName(), is("LastName")); + assertThat(result3.size()).isEqualTo(1); + assertThat(result3.get(0).getLastName()).isNotNull(); + assertThat(result3.get(0).getLastName()).isEqualTo("LastName"); } @Test @@ -118,13 +140,13 @@ public void testPersonMapProperty() { p.setAccounts(accounts); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(1234567)), PersonMapProperty.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getAccounts().size(), is(2)); - assertThat(result.get(0).getAccounts().get("checking").getBalance(), is(1000.0f)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getAccounts().size()).isEqualTo(2); + assertThat(result.get(0).getAccounts().get("checking").getBalance()).isEqualTo(1000.0f); } @Test @@ -155,18 +177,23 @@ public void testWriteEntity() { accounts.add(newAcct); template.save(p, "person"); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(123456789)), Person.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getAddress().getCountry(), is("USA")); - assertThat(result.get(0).getAccounts(), notNullValue()); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getAddress().getCountry()).isEqualTo("USA"); + assertThat(result.get(0).getAccounts()).isNotNull(); } - @Test(expected = DuplicateKeyException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void testUniqueIndex() { + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + Address addr = new Address(); addr.setLines(new String[] { "1234 W. 1st Street", "Apt. 12" }); addr.setCity("Anytown"); @@ -176,7 +203,7 @@ public void testUniqueIndex() { Person p1 = new Person(1234567890, "John", "Doe", 37, addr); Person p2 = new Person(1234567890, "Jane", "Doe", 38, addr); - template.insertAll(Arrays.asList(p1, p2)); + assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> template.insertAll(Arrays.asList(p1, p2))); } @Test @@ -190,8 +217,8 @@ public void testCustomCollectionInList() { PersonCustomCollection1.class, "person1"); List p2Results = template.find(new Query(Criteria.where("ssn").is(66666)), PersonCustomCollection2.class, "person2"); - assertThat(p1Results.size(), is(1)); - assertThat(p2Results.size(), is(1)); + assertThat(p1Results.size()).isEqualTo(1); + assertThat(p2Results.size()).isEqualTo(1); } @Test @@ -200,43 +227,57 @@ public void testPrimitivesAndCustomCollectionName() { template.insert(loc); List result = template.find(new Query(Criteria.where("_id").is(loc.getId())), Location.class, "places"); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); } @Test public void testIndexesCreatedInRightCollection() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + + CustomCollectionWithIndex ccwi = new CustomCollectionWithIndex("test"); template.insert(ccwi); - assertTrue(template.execute("foobar", new CollectionCallback() { - public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { - List indexes = collection.getIndexInfo(); - for (DBObject dbo : indexes) { - if (dbo.get("name") != null && dbo.get("name") instanceof String - && ((String) dbo.get("name")).startsWith("name")) { + assertThat(template.execute("foobar", new CollectionCallback() { + public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + List indexes = new ArrayList(); + collection.listIndexes(Document.class).into(indexes); + + for (Document document : indexes) { + if (document.get("name") != null && document.get("name") instanceof String + && ((String) document.get("name")).startsWith("name")) { return true; } } return false; } - })); + })).isTrue(); DetectedCollectionWithIndex dcwi = new DetectedCollectionWithIndex("test"); template.insert(dcwi); - assertTrue(template.execute(MongoCollectionUtils.getPreferredCollectionName(DetectedCollectionWithIndex.class), + assertThat(template.execute(MongoCollectionUtils.getPreferredCollectionName(DetectedCollectionWithIndex.class), new CollectionCallback() { - public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { - List indexes = collection.getIndexInfo(); - for (DBObject dbo : indexes) { - if (dbo.get("name") != null && dbo.get("name") instanceof String - && ((String) dbo.get("name")).startsWith("name")) { + public Boolean doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + List indexes = new ArrayList(); + collection.listIndexes(Document.class).into(indexes); + + for (Document document : indexes) { + if (document.get("name") != null && document.get("name") instanceof String + && ((String) document.get("name")).startsWith("name")) { return true; } } return false; } - })); + })).isTrue(); } @Test @@ -248,9 +289,9 @@ public void testMultiDimensionalArrayProperties() { template.insert(p); List result = template.find(new Query(Criteria.where("ssn").is(123)), PersonMultiDimArrays.class); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); - assertThat(result.get(0).getGrid().length, is(3)); + assertThat(result.get(0).getGrid().length).isEqualTo(3); } @Test @@ -268,9 +309,9 @@ public void testMultiDimensionalCollectionProperties() { List result = template.find(new Query(Criteria.where("ssn").is(321)), PersonMultiCollection.class); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); - assertThat(result.get(0).getGrid().size(), is(1)); + assertThat(result.get(0).getGrid().size()).isEqualTo(1); } @Test @@ -283,8 +324,8 @@ public void testDbRef() { template.insert(p); List result = template.find(new Query(Criteria.where("ssn").is(4321)), PersonWithDbRef.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getHome().getLocation(), is(pos)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getHome().getLocation()).isEqualTo(pos); } @Test @@ -292,7 +333,7 @@ public void testPersonWithNullProperties() { PersonNullProperties p = new PersonNullProperties(); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); } @Test @@ -311,7 +352,7 @@ public void testQueryUpdate() { template.updateFirst(query(where("ssn").is(1111)), update("address", addr), Person.class); Person p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("New Town")); + assertThat(p2.getAddress().getCity()).isEqualTo("New Town"); } @Test @@ -324,19 +365,19 @@ public void testUpsert() { addr.setCountry("USA"); Person p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertNull(p2); + assertThat(p2).isNull(); template.upsert(query(where("ssn").is(1111).and("firstName").is("Query").and("lastName").is("Update")), update("address", addr), Person.class); p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("Anytown")); + assertThat(p2.getAddress().getCity()).isEqualTo("Anytown"); template.dropCollection(Person.class); template.upsert(query(where("ssn").is(1111).and("firstName").is("Query").and("lastName").is("Update")), update("address", addr), "person"); p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("Anytown")); + assertThat(p2.getAddress().getCity()).isEqualTo("Anytown"); } @@ -347,12 +388,12 @@ public void testOrQuery() { PersonWithObjectId p2 = new PersonWithObjectId(2, "second", ""); template.save(p2); - List results = template.find( - new Query(new Criteria().orOperator(where("ssn").is(1), where("ssn").is(2))), PersonWithObjectId.class); + List results = template + .find(new Query(new Criteria().orOperator(where("ssn").is(1), where("ssn").is(2))), PersonWithObjectId.class); - assertNotNull(results); - assertThat(results.size(), is(2)); - assertThat(results.get(1).getSsn(), is(2)); + assertThat(results).isNotNull(); + assertThat(results.size()).isEqualTo(2); + assertThat(results.get(1).getSsn()).isEqualTo(2); } @Test @@ -363,7 +404,7 @@ public void testPrimitivesAsIds() { template.save(p); PrimitiveId p2 = template.findOne(query(where("id").is(1)), PrimitiveId.class); - assertNotNull(p2); + assertThat(p2).isNotNull(); } @Test @@ -373,13 +414,13 @@ public void testNoMappingAnnotationsUsingIntAsId() { template.updateFirst(query(where("id").is(1)), update("text", "New Text"), PersonPojoIntId.class); PersonPojoIntId p2 = template.findOne(query(where("id").is(1)), PersonPojoIntId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoIntId p3 = template.findOne(query(where("id").is(1)), PersonPojoIntId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); } @@ -390,13 +431,13 @@ public void testNoMappingAnnotationsUsingLongAsId() { template.updateFirst(query(where("id").is(1)), update("text", "New Text"), PersonPojoLongId.class); PersonPojoLongId p2 = template.findOne(query(where("id").is(1)), PersonPojoLongId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoLongId p3 = template.findOne(query(where("id").is(1)), PersonPojoLongId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); } @@ -408,21 +449,21 @@ public void testNoMappingAnnotationsUsingStringAsId() { template.updateFirst(query(where("id").is("1")), update("text", "New Text"), PersonPojoStringId.class); PersonPojoStringId p2 = template.findOne(query(where("id").is("1")), PersonPojoStringId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoStringId p3 = template.findOne(query(where("id").is("1")), PersonPojoStringId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); PersonPojoStringId p4 = new PersonPojoStringId("2", "Text-2"); template.insert(p4); Query q = query(where("id").in("1", "2")); - q.with(new Sort(Direction.ASC, "id")); + q.with(Sort.by(Direction.ASC, "id")); List people = template.find(q, PersonPojoStringId.class); - assertEquals(2, people.size()); + assertThat(people.size()).isEqualTo(2); } @@ -436,15 +477,12 @@ public void testPersonWithLongDBRef() { Query q = query(where("ssn").is(21)); PersonWithLongDBRef p2 = template.findOne(q, PersonWithLongDBRef.class); - assertNotNull(p2); - assertNotNull(p2.getPersonPojoLongId()); - assertEquals(12L, p2.getPersonPojoLongId().getId()); + assertThat(p2).isNotNull(); + assertThat(p2.getPersonPojoLongId()).isNotNull(); + assertThat(p2.getPersonPojoLongId().getId()).isEqualTo(12L); } - /** - * @see DATADOC-275 - */ - @Test + @Test // DATADOC-275 public void readsAndWritesDBRefsCorrectly() { template.dropCollection(Item.class); @@ -462,15 +500,12 @@ public void readsAndWritesDBRefsCorrectly() { template.insert(container); Container result = template.findOne(query(where("id").is(container.id)), Container.class); - assertThat(result.item.id, is(item.id)); - assertThat(result.items.size(), is(1)); - assertThat(result.items.get(0).id, is(items.id)); + assertThat(result.item.id).isEqualTo(item.id); + assertThat(result.items.size()).isEqualTo(1); + assertThat(result.items.get(0).id).isEqualTo(items.id); } - /** - * @see DATAMONGO-805 - */ - @Test + @Test // DATAMONGO-805 public void supportExcludeDbRefAssociation() { template.dropCollection(Item.class); @@ -488,14 +523,11 @@ public void supportExcludeDbRefAssociation() { query.fields().exclude("item"); Container result = template.findOne(query, Container.class); - assertThat(result, is(notNullValue())); - assertThat(result.item, is(nullValue())); + assertThat(result).isNotNull(); + assertThat(result.item).isNull(); } - /** - * @see DATAMONGO-805 - */ - @Test + @Test // DATAMONGO-805 public void shouldMapFieldsOfIterableEntity() { template.dropCollection(IterableItem.class); @@ -513,14 +545,14 @@ public void shouldMapFieldsOfIterableEntity() { Query query = new Query(Criteria.where("id").is("foo")); Container result = template.findOne(query, Container.class); - assertThat(result, is(notNullValue())); - assertThat(result.item, is(notNullValue())); - assertThat(result.item.value, is("bar")); + assertThat(result).isNotNull(); + assertThat(result.item).isNotNull(); + assertThat(result.item.value).isEqualTo("bar"); } static class Container { - @Id final String id; + @Id String id; public Container() { id = new ObjectId().toString(); @@ -536,7 +568,7 @@ public Container(String id) { static class Item { - @Id final String id; + @Id String id; String value; public Item() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java index 99e74c523e..4a4dd54717 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,44 +15,43 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.time.temporal.ChronoUnit; import java.util.AbstractMap; import java.util.Collections; import java.util.Locale; import java.util.Map; +import java.util.Optional; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.context.ApplicationContext; import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.MappingException; import com.mongodb.DBRef; /** * Unit tests for {@link MongoMappingContext}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MongoMappingContextUnitTests { @Mock ApplicationContext applicationContext; - @Rule public ExpectedException exception = ExpectedException.none(); - @Test - public void addsSelfReferencingPersistentEntityCorrectly() throws Exception { + void addsSelfReferencingPersistentEntityCorrectly() throws Exception { MongoMappingContext context = new MongoMappingContext(); @@ -61,27 +60,21 @@ public void addsSelfReferencingPersistentEntityCorrectly() throws Exception { } @Test - public void doesNotReturnPersistentEntityForMongoSimpleType() { + void doesNotReturnPersistentEntityForMongoSimpleType() { MongoMappingContext context = new MongoMappingContext(); - assertThat(context.getPersistentEntity(DBRef.class), is(nullValue())); + assertThat(context.getPersistentEntity(DBRef.class)).isNull(); } - /** - * @see DATAMONGO-638 - */ - @Test - public void doesNotCreatePersistentEntityForAbstractMap() { + @Test // DATAMONGO-638 + void doesNotCreatePersistentEntityForAbstractMap() { MongoMappingContext context = new MongoMappingContext(); - assertThat(context.getPersistentEntity(AbstractMap.class), is(nullValue())); + assertThat(context.getPersistentEntity(AbstractMap.class)).isNull(); } - /** - * @see DATAMONGO-607 - */ - @Test - public void populatesPersistentPropertyWithCustomFieldNamingStrategy() { + @Test // DATAMONGO-607 + void populatesPersistentPropertyWithCustomFieldNamingStrategy() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); @@ -92,108 +85,113 @@ public String getFieldName(PersistentProperty property) { } }); - MongoPersistentEntity entity = context.getPersistentEntity(Person.class); - assertThat(entity.getPersistentProperty("firstname").getFieldName(), is("FIRSTNAME")); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(Person.class); + assertThat(entity.getRequiredPersistentProperty("firstname").getFieldName()).isEqualTo("FIRSTNAME"); } - /** - * @see DATAMONGO-607 - */ - @Test - public void rejectsClassWithAmbiguousFieldMappings() { - - exception.expect(MappingException.class); - exception.expectMessage("firstname"); - exception.expectMessage("lastname"); - exception.expectMessage("foo"); - exception.expectMessage("@Field"); + @Test // DATAMONGO-607 + void rejectsClassWithAmbiguousFieldMappings() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); - context.getPersistentEntity(InvalidPerson.class); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> context.getPersistentEntity(InvalidPerson.class)) + .withMessageContaining("firstname").withMessageContaining("lastname").withMessageContaining("foo") + .withMessageContaining("@Field"); } - /** - * @see DATAMONGO-694 - */ - @Test - public void doesNotConsiderOverrridenAccessorANewField() { + @Test // DATAMONGO-694 + void doesNotConsiderOverrridenAccessorANewField() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); context.getPersistentEntity(Child.class); } - /** - * @see DATAMONGO-688 - */ - @Test - public void mappingContextShouldAcceptClassWithImplicitIdProperty() { + @Test // DATAMONGO-688 + void mappingContextShouldAcceptClassWithImplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getPersistentEntity(ClassWithImplicitId.class); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithImplicitId.class); - assertThat(pe, is(not(nullValue()))); - assertThat(pe.isIdProperty(pe.getPersistentProperty("id")), is(true)); + assertThat(pe).isNotNull(); + assertThat(pe.isIdProperty(pe.getRequiredPersistentProperty("id"))).isTrue(); } - /** - * @see DATAMONGO-688 - */ - @Test - public void mappingContextShouldAcceptClassWithExplicitIdProperty() { + @Test // DATAMONGO-688 + void mappingContextShouldAcceptClassWithExplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getPersistentEntity(ClassWithExplicitId.class); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitId.class); - assertThat(pe, is(not(nullValue()))); - assertThat(pe.isIdProperty(pe.getPersistentProperty("myId")), is(true)); + assertThat(pe).isNotNull(); + assertThat(pe.isIdProperty(pe.getRequiredPersistentProperty("myId"))).isTrue(); } - /** - * @see DATAMONGO-688 - */ - @Test - public void mappingContextShouldAcceptClassWithExplicitAndImplicitIdPropertyByGivingPrecedenceToExplicitIdProperty() { + @Test // DATAMONGO-688 + void mappingContextShouldAcceptClassWithExplicitAndImplicitIdPropertyByGivingPrecedenceToExplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getPersistentEntity(ClassWithExplicitIdAndImplicitId.class); - assertThat(pe, is(not(nullValue()))); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitIdAndImplicitId.class); + assertThat(pe).isNotNull(); } - /** - * @see DATAMONGO-688 - */ - @Test(expected = MappingException.class) - public void rejectsClassWithAmbiguousExplicitIdPropertyFieldMappings() { + @Test // DATAMONGO-688 + void rejectsClassWithAmbiguousExplicitIdPropertyFieldMappings() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithMultipleExplicitIds.class); + assertThatThrownBy(() -> context.getPersistentEntity(ClassWithMultipleExplicitIds.class)) + .isInstanceOf(MappingException.class); } - /** - * @see DATAMONGO-688 - */ - @Test(expected = MappingException.class) - public void rejectsClassWithAmbiguousImplicitIdPropertyFieldMappings() { + @Test // DATAMONGO-688 + void rejectsClassWithAmbiguousImplicitIdPropertyFieldMappings() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithMultipleImplicitIds.class); + assertThatThrownBy(() -> context.getPersistentEntity(ClassWithMultipleImplicitIds.class)) + .isInstanceOf(MappingException.class); } - /** - * @see DATAMONGO-976 - */ - @Test - public void shouldRejectClassWithInvalidTextScoreProperty() { + @Test // DATAMONGO-976 + void shouldRejectClassWithInvalidTextScoreProperty() { - exception.expect(MappingException.class); - exception.expectMessage("score"); - exception.expectMessage("Float"); - exception.expectMessage("Double"); + MongoMappingContext context = new MongoMappingContext(); + + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> context.getPersistentEntity(ClassWithInvalidTextScoreProperty.class)) + .withMessageContaining("score").withMessageContaining("Float").withMessageContaining("Double"); + } + + @Test // DATAMONGO-2599 + void shouldNotCreateEntityForEnum() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithInvalidTextScoreProperty.class); + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithChronoUnit.class); + + assertThat(entity.getPersistentProperty("unit").isEntity()).isFalse(); + assertThat(context.hasPersistentEntityFor(ChronoUnit.class)).isFalse(); + assertThat(context.getPersistentEntity(ChronoUnit.class)).isNull(); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalGetter() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(InterfaceWithMethodReturningOptional.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalField() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithOptionalField.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); } public class SampleClass { @@ -262,4 +260,18 @@ class ClassWithInvalidTextScoreProperty { @TextScore Locale score; } + + class ClassWithChronoUnit { + + ChronoUnit unit; + } + + interface InterfaceWithMethodReturningOptional { + + Optional getPerson(); + } + + class ClassWithOptionalField { + Optional person; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java index 56139d4ac5..9ebeb68ceb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java @@ -1,40 +1,35 @@ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity.MongoPersistentPropertyComparator; +import org.mockito.junit.jupiter.MockitoExtension; -import static org.mockito.Mockito.*; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity.MongoPersistentPropertyComparator; /** * Unit tests for {@link MongoPersistentPropertyComparator}. - * + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class MongoPersistentPropertyComparatorUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoPersistentPropertyComparatorUnitTests { - @Mock - MongoPersistentProperty firstName; + @Mock MongoPersistentProperty firstName; - @Mock - MongoPersistentProperty lastName; + @Mock MongoPersistentProperty lastName; - @Mock - MongoPersistentProperty ssn; + @Mock MongoPersistentProperty ssn; @Test - public void ordersPropertiesCorrectly() { + void ordersPropertiesCorrectly() { when(ssn.getFieldOrder()).thenReturn(10); when(firstName.getFieldOrder()).thenReturn(20); @@ -43,8 +38,8 @@ public void ordersPropertiesCorrectly() { List properties = Arrays.asList(firstName, lastName, ssn); Collections.sort(properties, MongoPersistentPropertyComparator.INSTANCE); - assertThat(properties.get(0), is(ssn)); - assertThat(properties.get(1), is(firstName)); - assertThat(properties.get(2), is(lastName)); + assertThat(properties.get(0)).isEqualTo(ssn); + assertThat(properties.get(1)).isEqualTo(firstName); + assertThat(properties.get(2)).isEqualTo(lastName); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java index 20a9e72b3a..06f0db6c35 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import java.util.List; @@ -24,8 +23,6 @@ import org.springframework.data.mongodb.core.index.CompoundIndex; import org.springframework.data.mongodb.core.index.CompoundIndexes; import org.springframework.data.mongodb.core.index.Indexed; -import org.springframework.data.mongodb.core.mapping.DBRef; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -34,18 +31,13 @@ @CompoundIndexes({ @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") }) public class Person { - @Id - private String id; - @Indexed(unique = true) - private Integer ssn; + @Id private String id; + @Indexed(unique = true) private Integer ssn; private String firstName; - @Indexed - private String lastName; + @Indexed private String lastName; private Integer age; - @Transient - private Integer accountTotal; - @DBRef - private List accounts; + @Transient private Integer accountTotal; + @DBRef private List accounts; private T address; public Person(Integer ssn) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java index 3e713b035d..f45c07984a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,10 @@ /** * @author Jon Brisbin */ -@Document(collection = "person1") +@Document("person1") public class PersonCustomCollection1 extends BasePerson { - @Id - private String id; + @Id private String id; public PersonCustomCollection1(Integer ssn, String firstName, String lastName) { super(ssn, firstName, lastName); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java index d1b2d3b77d..581c56ca7a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,10 @@ /** * @author Jon Brisbin */ -@Document(collection = "person2") +@Document("person2") public class PersonCustomCollection2 extends BasePerson { - @Id - private String id; + @Id private String id; public PersonCustomCollection2(Integer ssn, String firstName, String lastName) { super(ssn, firstName, lastName); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java index 2fe08566fe..a68fe0d531 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.springframework.data.annotation.Id; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java index 7f706a094f..ce746eed10 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import java.util.Map; import org.bson.types.ObjectId; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java index edae367a16..6ce07bbae8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import java.util.List; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java index d556e0f2e9..d98c617c1f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java index c533c157f7..8846fb161a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -26,11 +25,9 @@ @Document public class PersonNullProperties extends BasePerson { - @Id - private ObjectId id; + @Id private ObjectId id; - public PersonNullProperties() { - } + public PersonNullProperties() {} public ObjectId getId() { return id; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java index 81c0731753..6133f8f5a9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java index 857501c37a..e3034e0f73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java index 542839cd17..29d8682355 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java index 14ab360d84..e228c5b832 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import java.util.List; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java index 06890e1bf9..86baa78a2b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,10 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; - /** * @author Jon Brisbin */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java index d09c8a2d88..68cfa2653b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java index 8611fdee6c..f21e1ade23 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.mapping; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -25,8 +23,7 @@ @Document public class PrimitiveId { - @Id - int id; + @Id int id; String text; public PrimitiveId(Integer id) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java index 27847da9c0..7ce3a8b9c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,12 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import java.time.Instant; + +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.data.mongodb.core.mapping.Account; @@ -26,11 +28,10 @@ import org.springframework.data.mongodb.repository.Person; import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; /** * Unit tests for {@link AbstractMongoEventListener}. - * + * * @author Oliver Gierke * @author Martin Baumgartner */ @@ -42,7 +43,7 @@ public void invokesCallbackForEventForPerson() { MongoMappingEvent event = new BeforeConvertEvent(new Person("Dave", "Matthews"), "collection-1"); SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeConvert, is(true)); + assertThat(listener.invokedOnBeforeConvert).isTrue(); } @Test @@ -55,135 +56,113 @@ public void dropsEventIfNotForCorrectDomainType() { context.addApplicationListener(listener); context.publishEvent(new BeforeConvertEvent(new Person("Dave", "Matthews"), "collection-1")); - assertThat(listener.invokedOnBeforeConvert, is(true)); + assertThat(listener.invokedOnBeforeConvert).isTrue(); listener.invokedOnBeforeConvert = false; context.publishEvent(new BeforeConvertEvent("Test", "collection-1")); - assertThat(listener.invokedOnBeforeConvert, is(false)); + assertThat(listener.invokedOnBeforeConvert).isFalse(); context.close(); } - /** - * @see DATAMONGO-289 - */ - @Test + @Test // DATAMONGO-289 public void afterLoadEffectGetsHandledCorrectly() { SamplePersonEventListener listener = new SamplePersonEventListener(); - listener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Person.class, "collection-1")); - assertThat(listener.invokedOnAfterLoad, is(true)); + listener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); + assertThat(listener.invokedOnAfterLoad).isTrue(); } - /** - * @see DATAMONGO-289 - */ - @Test + @Test // DATAMONGO-289 public void afterLoadEventGetsFilteredForDomainType() { SamplePersonEventListener personListener = new SamplePersonEventListener(); SampleAccountEventListener accountListener = new SampleAccountEventListener(); - personListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Person.class, "collection-1")); - accountListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Person.class, "collection-1")); + personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); + accountListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(true)); - assertThat(accountListener.invokedOnAfterLoad, is(false)); + assertThat(personListener.invokedOnAfterLoad).isTrue(); + assertThat(accountListener.invokedOnAfterLoad).isFalse(); } - /** - * @see DATAMONGO-289 - */ - @Test + @Test // DATAMONGO-289 public void afterLoadEventGetsFilteredForDomainTypeWorksForSubtypes() { SamplePersonEventListener personListener = new SamplePersonEventListener(); SampleContactEventListener contactListener = new SampleContactEventListener(); - personListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Person.class, "collection-1")); - contactListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Person.class, "collection-1")); + personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); + contactListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(true)); - assertThat(contactListener.invokedOnAfterLoad, is(true)); + assertThat(personListener.invokedOnAfterLoad).isTrue(); + assertThat(contactListener.invokedOnAfterLoad).isTrue(); } - /** - * @see DATAMONGO-289 - */ - @Test + @Test // DATAMONGO-289 public void afterLoadEventGetsFilteredForDomainTypeWorksForSubtypes2() { SamplePersonEventListener personListener = new SamplePersonEventListener(); SampleContactEventListener contactListener = new SampleContactEventListener(); - personListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Contact.class, "collection-1")); - contactListener.onApplicationEvent(new AfterLoadEvent(new BasicDBObject(), Contact.class, "collection-1")); + personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Contact.class, "collection-1")); + contactListener.onApplicationEvent(new AfterLoadEvent(new Document(), Contact.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(false)); - assertThat(contactListener.invokedOnAfterLoad, is(true)); + assertThat(personListener.invokedOnAfterLoad).isFalse(); + assertThat(contactListener.invokedOnAfterLoad).isTrue(); } - /** - * @see DATAMONGO-333 - */ - @Test + @Test // DATAMONGO-333 @SuppressWarnings({ "rawtypes", "unchecked" }) public void handlesUntypedImplementations() { UntypedEventListener listener = new UntypedEventListener(); - listener.onApplicationEvent(new MongoMappingEvent(new Object(), new BasicDBObject(), "collection")); + listener.onApplicationEvent(new MongoMappingEvent(new Object(), new Document(), "collection-1")); } - /** - * @see DATAMONGO-545 - */ - @Test + @Test // DATAMONGO-545 public void invokeContactCallbackForPersonEvent() { - MongoMappingEvent event = new BeforeDeleteEvent(new BasicDBObject(), Person.class, - "collection-1"); + MongoMappingEvent event = new BeforeDeleteEvent(new Document(), Person.class, "collection-1"); SampleContactEventListener listener = new SampleContactEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(true)); + assertThat(listener.invokedOnBeforeDelete).isTrue(); } - /** - * @see DATAMONGO-545 - */ - @Test + @Test // DATAMONGO-545 public void invokePersonCallbackForPersonEvent() { - MongoMappingEvent event = new BeforeDeleteEvent(new BasicDBObject(), Person.class, - "collection-1"); + MongoMappingEvent event = new BeforeDeleteEvent(new Document(), Person.class, "collection-1"); SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(true)); + assertThat(listener.invokedOnBeforeDelete).isTrue(); } - /** - * @see DATAMONGO-545 - */ - @Test + @Test // DATAMONGO-545 public void dontInvokePersonCallbackForAccountEvent() { - MongoMappingEvent event = new BeforeDeleteEvent(new BasicDBObject(), Account.class, - "collection-1"); + MongoMappingEvent event = new BeforeDeleteEvent(new Document(), Account.class, "collection-1"); SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(false)); + assertThat(listener.invokedOnBeforeDelete).isFalse(); } - /** - * @see DATAMONGO-545 - */ - @Test + @Test // DATAMONGO-545 public void donInvokePersonCallbackForUntypedEvent() { - MongoMappingEvent event = new BeforeDeleteEvent(new BasicDBObject(), null, "collection-1"); + MongoMappingEvent event = new BeforeDeleteEvent(new Document(), null, "collection-1"); SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(false)); + assertThat(listener.invokedOnBeforeDelete).isFalse(); + } + + @Test // GH-3968 + public void debugLogShouldNotFailMongoDBCodecError() { + + MongoMappingEvent event = new BeforeConvertEvent<>(new BasicDBObject("date", Instant.now()), "collection-1"); + UntypedEventListener listener = new UntypedEventListener(); + listener.onApplicationEvent(event); } class SamplePersonEventListener extends AbstractMongoEventListener { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java index 95b681a1a8..bef2ae92fa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,11 @@ import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationListener; -public class AfterSaveListener implements ApplicationListener> { +public class AfterSaveListener implements ApplicationListener> { public final ArrayList seenEvents = new ArrayList(); - public void onApplicationEvent(AfterSaveEvent event) { + public void onApplicationEvent(AfterSaveEvent event) { this.seenEvents.add(event); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java index 2e03229a37..9bc1dc78aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,61 +15,85 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.context.ApplicationContext; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.PersonPojoStringId; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.annotation.DirtiesContext; -import com.mongodb.DB; -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; /** * Integration test for Mapping Events. - * + * * @author Mark Pollack * @author Christoph Strobl + * @author Jordi Llach + * @author Mark Paluch */ +@ExtendWith({ MongoClientExtension.class }) public class ApplicationContextEventTests { private static final String COLLECTION_NAME = "personPojoStringId"; + private static final String ROOT_COLLECTION_NAME = "root"; + private static final String RELATED_COLLECTION_NAME = "related"; + + private final String[] collectionsToDrop = new String[] { COLLECTION_NAME, ROOT_COLLECTION_NAME, + RELATED_COLLECTION_NAME }; - private final String[] collectionsToDrop = new String[] { COLLECTION_NAME }; + static @Client MongoClient mongoClient; - private ApplicationContext applicationContext; + private ConfigurableApplicationContext applicationContext; private MongoTemplate template; + private SimpleMappingEventListener listener; + + @BeforeEach + public void setUp() { - @Before - public void setUp() throws Exception { cleanDb(); + applicationContext = new AnnotationConfigApplicationContext(ApplicationContextEventTestsAppConfig.class); template = applicationContext.getBean(MongoTemplate.class); - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); + listener = applicationContext.getBean(SimpleMappingEventListener.class); } - @After - public void cleanUp() throws Exception { + @AfterEach + public void cleanUp() { + cleanDb(); + applicationContext.close(); } - private void cleanDb() throws UnknownHostException { + private void cleanDb() { - Mongo mongo = new MongoClient(); - DB db = mongo.getDB("database"); + MongoDatabase db = mongoClient.getDatabase("database"); for (String coll : collectionsToDrop) { db.getCollection(coll).drop(); } @@ -78,122 +102,444 @@ private void cleanDb() throws UnknownHostException { @Test @SuppressWarnings("unchecked") public void beforeSaveEvent() { + PersonBeforeSaveListener personBeforeSaveListener = applicationContext.getBean(PersonBeforeSaveListener.class); AfterSaveListener afterSaveListener = applicationContext.getBean(AfterSaveListener.class); - SimpleMappingEventListener simpleMappingEventListener = applicationContext - .getBean(SimpleMappingEventListener.class); - assertEquals(0, personBeforeSaveListener.seenEvents.size()); - assertEquals(0, afterSaveListener.seenEvents.size()); + assertThat(personBeforeSaveListener.seenEvents).isEmpty(); + assertThat(afterSaveListener.seenEvents).isEmpty(); - assertEquals(0, simpleMappingEventListener.onBeforeSaveEvents.size()); - assertEquals(0, simpleMappingEventListener.onAfterSaveEvents.size()); + assertThat(listener.onBeforeSaveEvents).isEmpty(); + assertThat(listener.onAfterSaveEvents).isEmpty(); PersonPojoStringId p = new PersonPojoStringId("1", "Text"); template.insert(p); - assertEquals(1, personBeforeSaveListener.seenEvents.size()); - assertEquals(1, afterSaveListener.seenEvents.size()); + assertThat(personBeforeSaveListener.seenEvents).hasSize(1); + assertThat(afterSaveListener.seenEvents).hasSize(1); - assertEquals(1, simpleMappingEventListener.onBeforeSaveEvents.size()); - assertEquals(1, simpleMappingEventListener.onAfterSaveEvents.size()); + assertThat(listener.onBeforeSaveEvents).hasSize(1); + assertThat(listener.onAfterSaveEvents).hasSize(1); - assertEquals(COLLECTION_NAME, simpleMappingEventListener.onBeforeSaveEvents.get(0).getCollectionName()); - assertEquals(COLLECTION_NAME, simpleMappingEventListener.onAfterSaveEvents.get(0).getCollectionName()); + assertThat(listener.onBeforeSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + assertThat(listener.onAfterSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - Assert.assertTrue(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent); - Assert.assertTrue(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent); + assertThat(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent).isTrue(); + assertThat(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent).isTrue(); BeforeSaveEvent beforeSaveEvent = (BeforeSaveEvent) personBeforeSaveListener.seenEvents .get(0); PersonPojoStringId p2 = beforeSaveEvent.getSource(); - DBObject dbo = beforeSaveEvent.getDBObject(); + org.bson.Document document = beforeSaveEvent.getDocument(); - comparePersonAndDbo(p, p2, dbo); + comparePersonAndDocument(p, p2, document); AfterSaveEvent afterSaveEvent = (AfterSaveEvent) afterSaveListener.seenEvents.get(0); - Assert.assertTrue(afterSaveEvent.getSource() instanceof PersonPojoStringId); + assertThat(afterSaveEvent.getSource() instanceof PersonPojoStringId).isTrue(); p2 = (PersonPojoStringId) afterSaveEvent.getSource(); - dbo = beforeSaveEvent.getDBObject(); + document = beforeSaveEvent.getDocument(); - comparePersonAndDbo(p, p2, dbo); + comparePersonAndDocument(p, p2, document); } - /** - * @see DATAMONGO-1256 - */ - @Test + @Test // DATAMONGO-1256 public void loadAndConvertEvents() { - SimpleMappingEventListener simpleMappingEventListener = applicationContext - .getBean(SimpleMappingEventListener.class); - PersonPojoStringId entity = new PersonPojoStringId("1", "Text"); template.insert(entity); template.findOne(query(where("id").is(entity.getId())), PersonPojoStringId.class); - assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onBeforeConvertEvents).hasSize(1); + assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); } - /** - * @see DATAMONGO-1256 - */ - @Test - public void loadEventsOnAggregation() { + @Test // GH-4107 + @DirtiesContext + public void configurationShouldDisableLifecycleEvents() { - SimpleMappingEventListener simpleMappingEventListener = applicationContext - .getBean(SimpleMappingEventListener.class); + template.setEntityLifecycleEventsEnabled(false); + + PersonPojoStringId entity = new PersonPojoStringId("1", "Text"); + template.insert(entity); + + template.findOne(query(where("id").is(entity.getId())), PersonPojoStringId.class); + + assertThat(listener.onAfterLoadEvents).isEmpty(); + assertThat(listener.onBeforeConvertEvents).isEmpty(); + assertThat(listener.onAfterConvertEvents).isEmpty(); + } + + @Test // DATAMONGO-1256 + public void loadEventsOnAggregation() { template.insert(new PersonPojoStringId("1", "Text")); template.aggregate(Aggregation.newAggregation(Aggregation.project("text")), PersonPojoStringId.class, PersonPojoStringId.class); - assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onBeforeConvertEvents).hasSize(1); + assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); } - /** - * @see DATAMONGO-1256 - */ - @Test + @Test // DATAMONGO-1256 public void deleteEvents() { - SimpleMappingEventListener simpleMappingEventListener = applicationContext - .getBean(SimpleMappingEventListener.class); - PersonPojoStringId entity = new PersonPojoStringId("1", "Text"); template.insert(entity); template.remove(entity); - assertThat(simpleMappingEventListener.onBeforeDeleteEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onBeforeDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onBeforeDeleteEvents).hasSize(1); + assertThat(listener.onBeforeDeleteEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + + assertThat(listener.onAfterDeleteEvents).hasSize(1); + assertThat(listener.onAfterDeleteEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + } + + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForDBRef() { + + Related ref1 = new Related(2L, "related desc1"); + + template.insert(ref1); + + Root source = new Root(); + source.id = 1L; + source.reference = ref1; + + template.insert(source); + + template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(2); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); - assertThat(simpleMappingEventListener.onAfterDeleteEvents.size(), is(1)); - assertThat(simpleMappingEventListener.onAfterDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME)); + assertThat(listener.onAfterConvertEvents).hasSize(2); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); } - private void comparePersonAndDbo(PersonPojoStringId p, PersonPojoStringId p2, DBObject dbo) { - assertEquals(p.getId(), p2.getId()); - assertEquals(p.getText(), p2.getText()); + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingDBRef() { + + Related ref1 = new Related(2L, "related desc1"); + + template.insert(ref1); + + Root source = new Root(); + source.id = 1L; + source.lazyReference = ref1; + + template.insert(source); + + Root target = template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + + target.getLazyReference().getDescription(); + + assertThat(listener.onAfterLoadEvents).hasSize(2); + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(2); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + } + + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForListOfDBRef() { + + List references = Arrays.asList(new Related(20L, "ref 1"), new Related(30L, "ref 2")); + + template.insert(references, Related.class); + + Root source = new Root(); + source.id = 1L; + source.listOfReferences = references; + + template.insert(source); + + template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(3); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(3); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + } + + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingListOfDBRef() { + + List references = Arrays.asList(new Related(20L, "ref 1"), new Related(30L, "ref 2")); + + template.insert(references, Related.class); + + Root source = new Root(); + source.id = 1L; + source.lazyListOfReferences = references; + + template.insert(source); + + Root target = template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + + target.getLazyListOfReferences().size(); + + assertThat(listener.onAfterLoadEvents).hasSize(3); + assertThat(listener.onAfterConvertEvents).hasSize(3); + + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + } + + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForMapOfDBRef() { + + Map references = new LinkedHashMap(); + references.put("ref-1", new Related(20L, "ref 1")); + references.put("ref-2", new Related(30L, "ref 2")); + + template.insert(references.values(), Related.class); + + Root source = new Root(); + source.id = 1L; + source.mapOfReferences = references; + + template.insert(source); + + template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(3); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(3); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + } + + @Test // DATAMONGO-1271 + public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingMapOfDBRef() { + + Map references = new LinkedHashMap(); + references.put("ref-1", new Related(20L, "ref 1")); + references.put("ref-2", new Related(30L, "ref 2")); + + template.insert(references.values(), Related.class); + + Root source = new Root(); + source.id = 1L; + source.lazyMapOfReferences = references; + + template.insert(source); + + Root target = template.findOne(query(where("id").is(source.getId())), Root.class); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); - assertEquals("org.springframework.data.mongodb.core.mapping.PersonPojoStringId", dbo.get("_class")); - assertEquals("1", dbo.get("_id")); - assertEquals("Text", dbo.get("text")); + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME); + + target.getLazyMapOfReferences().size(); + + assertThat(listener.onAfterLoadEvents).hasSize(3); + assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(3); + assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME); + } + + @Test // DATAMONGO-1823 + public void publishesAfterConvertEventForFindQueriesUsingProjections() { + + PersonPojoStringId entity = new PersonPojoStringId("1", "Text"); + template.insert(entity); + + template.query(PersonPojoStringId.class).matching(query(where("id").is(entity.getId()))).all(); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + + assertThat(listener.onBeforeConvertEvents).hasSize(1); + assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); + } + + @Test // DATAMONGO-700, DATAMONGO-1185, DATAMONGO-1848 + public void publishesEventsForQuerydslFindQueries() { + + template.dropCollection(Person.class); + + template.save(new Person("Boba", "Fett", 40)); + + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + QuerydslMongoPredicateExecutor executor = new QuerydslMongoPredicateExecutor<>(entityInformation, template); + + executor.findOne(QPerson.person.lastname.startsWith("Fe")); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo("person"); + + assertThat(listener.onBeforeConvertEvents).hasSize(1); + assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo("person"); + + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo("person"); + } + + private void comparePersonAndDocument(PersonPojoStringId p, PersonPojoStringId p2, org.bson.Document document) { + + assertThat(p2.getId()).isEqualTo(p.getId()); + assertThat(p2.getText()).isEqualTo(p.getText()); + + assertThat(document.get("_id")).isEqualTo("1"); + assertThat(document.get("text")).isEqualTo("Text"); + assertTypeHint(document, PersonPojoStringId.class); + } + + @org.springframework.data.mongodb.core.mapping.Document + public static class Root { + + @Id Long id; + + @DBRef Related reference; + @DBRef(lazy = true) Related lazyReference; + + @DBRef List listOfReferences; + @DBRef(lazy = true) List lazyListOfReferences; + + @DBRef Map mapOfReferences; + @DBRef(lazy = true) Map lazyMapOfReferences; + + public Long getId() { + return this.id; + } + + public Related getReference() { + return this.reference; + } + + public Related getLazyReference() { + return this.lazyReference; + } + + public List getListOfReferences() { + return this.listOfReferences; + } + + public List getLazyListOfReferences() { + return this.lazyListOfReferences; + } + + public Map getMapOfReferences() { + return this.mapOfReferences; + } + + public Map getLazyMapOfReferences() { + return this.lazyMapOfReferences; + } + + public void setId(Long id) { + this.id = id; + } + + public void setReference(Related reference) { + this.reference = reference; + } + + public void setLazyReference(Related lazyReference) { + this.lazyReference = lazyReference; + } + + public void setListOfReferences(List listOfReferences) { + this.listOfReferences = listOfReferences; + } + + public void setLazyListOfReferences(List lazyListOfReferences) { + this.lazyListOfReferences = lazyListOfReferences; + } + + public void setMapOfReferences(Map mapOfReferences) { + this.mapOfReferences = mapOfReferences; + } + + public void setLazyMapOfReferences(Map lazyMapOfReferences) { + this.lazyMapOfReferences = lazyMapOfReferences; + } + + public String toString() { + return "ApplicationContextEventTests.Root(id=" + this.getId() + ", reference=" + this.getReference() + + ", lazyReference=" + this.getLazyReference() + ", listOfReferences=" + this.getListOfReferences() + + ", lazyListOfReferences=" + this.getLazyListOfReferences() + ", mapOfReferences=" + + this.getMapOfReferences() + ", lazyMapOfReferences=" + this.getLazyMapOfReferences() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document + public static class Related { + + @Id Long id; + String description; + + public Related(Long id, String description) { + this.id = id; + this.description = description; + } + + public Long getId() { + return this.id; + } + + public String getDescription() { + return this.description; + } + + public void setId(Long id) { + this.id = id; + } + + public void setDescription(String description) { + this.description = description; + } + + public String toString() { + return "ApplicationContextEventTests.Related(id=" + this.getId() + ", description=" + this.getDescription() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java index b990c23cd2..8c5aad8b1a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,13 +17,13 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; @Configuration -public class ApplicationContextEventTestsAppConfig extends AbstractMongoConfiguration { +public class ApplicationContextEventTestsAppConfig extends MongoClientClosingTestConfiguration { @Override public String getDatabaseName() { @@ -32,8 +32,8 @@ public String getDatabaseName() { @Override @Bean - public Mongo mongo() throws Exception { - return new MongoClient("127.0.0.1"); + public MongoClient mongoClient() { + return MongoTestUtils.client(); } @Bean @@ -50,5 +50,4 @@ public AfterSaveListener afterSaveListener() { public SimpleMappingEventListener simpleMappingEventListener() { return new SimpleMappingEventListener(); } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..7d01c30345 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java @@ -0,0 +1,207 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.core.Ordered; +import org.springframework.data.annotation.CreatedDate; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; + +/** + * Unit tests for {@link AuditingEntityCallback}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class AuditingEntityCallbackUnitTests { + + private final MongoMappingContext mappingContext = new MongoMappingContext(); + + private IsNewAwareAuditingHandler handler; + private AuditingEntityCallback callback; + + @BeforeEach + void setUp() { + + mappingContext.getPersistentEntity(Sample.class); + + handler = spy(new IsNewAwareAuditingHandler(new PersistentEntities(Arrays.asList(mappingContext)))); + + callback = new AuditingEntityCallback(() -> handler); + } + + @Test // DATAMONGO-2261 + void rejectsNullAuditingHandler() { + assertThatIllegalArgumentException().isThrownBy(() -> new AuditingEntityCallback(null)); + } + + @Test // DATAMONGO-2261 + void triggersCreationMarkForObjectWithEmptyId() { + + Sample sample = new Sample(); + callback.onBeforeConvert(sample, "foo"); + + verify(handler, times(1)).markCreated(sample); + verify(handler, times(0)).markModified(any()); + } + + @Test // DATAMONGO-2261 + void triggersModificationMarkForObjectWithSetId() { + + Sample sample = new Sample(); + sample.id = "id"; + callback.onBeforeConvert(sample, "foo"); + + verify(handler, times(0)).markCreated(any()); + verify(handler, times(1)).markModified(sample); + } + + @Test // DATAMONGO-2261 + void hasExplicitOrder() { + + assertThat(callback).isInstanceOf(Ordered.class); + assertThat(callback.getOrder()).isEqualTo(100); + } + + @Test // DATAMONGO-2261 + void propagatesChangedInstanceToEvent() { + + ImmutableSample sample = new ImmutableSample(); + + ImmutableSample newSample = new ImmutableSample(); + IsNewAwareAuditingHandler handler = mock(IsNewAwareAuditingHandler.class); + doReturn(newSample).when(handler).markAudited(eq(sample)); + + AuditingEntityCallback listener = new AuditingEntityCallback(() -> handler); + Object result = listener.onBeforeConvert(sample, "foo"); + + assertThat(result).isSameAs(newSample); + } + + @Test // GH-4732 + void shouldApplyAuditingToUnwrappedImmutableObject() { + + WithUnwrapped sample = new WithUnwrapped(); + sample.auditingData = new MyAuditingData(null, null); + + IsNewAwareAuditingHandler handler = new IsNewAwareAuditingHandler(PersistentEntities.of(mappingContext)); + + AuditingEntityCallback listener = new AuditingEntityCallback(() -> handler); + WithUnwrapped result = (WithUnwrapped) listener.onBeforeConvert(sample, "foo"); + + assertThat(result.auditingData.created).isNotNull(); + assertThat(result.auditingData.modified).isNotNull(); + } + + static class Sample { + + @Id String id; + @CreatedDate Date created; + @LastModifiedDate Date modified; + } + + static class WithUnwrapped { + + @Id String id; + + @Unwrapped(onEmpty = Unwrapped.OnEmpty.USE_NULL) MyAuditingData auditingData; + + } + + record MyAuditingData(@CreatedDate Date created, @LastModifiedDate Date modified) { + + } + + private static final class ImmutableSample { + + @Id private final String id; + @CreatedDate private final Date created; + @LastModifiedDate private final Date modified; + + public ImmutableSample() { + this(null, null, null); + } + + public ImmutableSample(String id, Date created, Date modified) { + this.id = id; + this.created = created; + this.modified = modified; + } + + public String getId() { + return this.id; + } + + public Date getCreated() { + return this.created; + } + + public Date getModified() { + return this.modified; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableSample that = (ImmutableSample) o; + return Objects.equals(id, that.id) && Objects.equals(created, that.created) + && Objects.equals(modified, that.modified); + } + + @Override + public int hashCode() { + return Objects.hash(id, created, modified); + } + + public String toString() { + return "AuditingEntityCallbackUnitTests.ImmutableSample(id=" + this.getId() + ", created=" + this.getCreated() + + ", modified=" + this.getModified() + ")"; + } + + public ImmutableSample withId(String id) { + return this.id == id ? this : new ImmutableSample(id, this.created, this.modified); + } + + public ImmutableSample withCreated(Date created) { + return this.created == created ? this : new ImmutableSample(this.id, created, this.modified); + } + + public ImmutableSample withModified(Date modified) { + return this.modified == modified ? this : new ImmutableSample(this.id, this.created, modified); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java deleted file mode 100644 index 6cc1c0043d..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2012-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapping.event; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - -import java.util.Arrays; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.ObjectFactory; -import org.springframework.core.Ordered; -import org.springframework.data.annotation.Id; -import org.springframework.data.auditing.IsNewAwareAuditingHandler; -import org.springframework.data.mapping.context.PersistentEntities; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; - -/** - * Unit tests for {@link AuditingEventListener}. - * - * @author Oliver Gierke - * @author Thomas Darimont - */ -@RunWith(MockitoJUnitRunner.class) -public class AuditingEventListenerUnitTests { - - IsNewAwareAuditingHandler handler; - AuditingEventListener listener; - - @Before - public void setUp() { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.getPersistentEntity(Sample.class); - - handler = spy(new IsNewAwareAuditingHandler(new PersistentEntities(Arrays.asList(mappingContext)))); - doNothing().when(handler).markCreated(Mockito.any(Object.class)); - doNothing().when(handler).markModified(Mockito.any(Object.class)); - - listener = new AuditingEventListener(new ObjectFactory() { - - @Override - public IsNewAwareAuditingHandler getObject() throws BeansException { - return handler; - } - }); - } - - /** - * @see DATAMONGO-577 - */ - @Test(expected = IllegalArgumentException.class) - public void rejectsNullAuditingHandler() { - new AuditingEventListener(null); - } - - /** - * @see DATAMONGO-577 - */ - @Test - public void triggersCreationMarkForObjectWithEmptyId() { - - Sample sample = new Sample(); - listener.onApplicationEvent(new BeforeConvertEvent(sample, "collection-1")); - - verify(handler, times(1)).markCreated(sample); - verify(handler, times(0)).markModified(Mockito.any(Sample.class)); - } - - /** - * @see DATAMONGO-577 - */ - @Test - public void triggersModificationMarkForObjectWithSetId() { - - Sample sample = new Sample(); - sample.id = "id"; - listener.onApplicationEvent(new BeforeConvertEvent(sample, "collection-1")); - - verify(handler, times(0)).markCreated(Mockito.any(Sample.class)); - verify(handler, times(1)).markModified(sample); - } - - @Test - public void hasExplicitOrder() { - - assertThat(listener, is(instanceOf(Ordered.class))); - assertThat(listener.getOrder(), is(100)); - } - - static class Sample { - - @Id String id; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java new file mode 100644 index 0000000000..772ed3cecb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java @@ -0,0 +1,120 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.read.ListAppender; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.LoggerFactory; + +/** + * Tests for {@link LoggingEventListener}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +public class LoggingEventListenerTests { + + ListAppender appender; + ch.qos.logback.classic.Logger logger; + LoggingEventListener listener; + + @BeforeEach + public void setUp() { + + appender = new ListAppender<>(); + + // set log level for LoggingEventListener to "info" and set up an appender capturing events. + logger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(LoggingEventListener.class); + + logger.setAdditive(false); + logger.setLevel(Level.INFO); + logger.addAppender(appender); + + appender.start(); + + listener = new LoggingEventListener(); + } + + @AfterEach + public void tearDown() { + + // cleanup + if (logger != null) { + + logger.detachAppender(appender); + logger.setAdditive(true); + logger.setLevel(null); + } + + if (appender != null) { + appender.stop(); + } + } + + @Test // DATAMONGO-1645 + public void shouldSerializeAfterConvertEventCorrectly() { + + listener.onAfterConvert(new AfterConvertEvent(new Document("foo", new Foo()), this, "collection")); + + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onAfterConvert: { \"foo\""); + } + + @Test // DATAMONGO-1645 + public void shouldSerializeBeforeSaveEventEventCorrectly() { + + listener.onBeforeSave(new BeforeSaveEvent(new Foo(), new Document("foo", new Foo()), "collection")); + + assertThat(appender.list.get(0).getFormattedMessage()) + .startsWith("onBeforeSave: org.springframework.data.mongodb.core."); + } + + @Test // DATAMONGO-1645 + public void shouldSerializeAfterSaveEventEventCorrectly() { + + listener.onAfterSave(new AfterSaveEvent(new Foo(), new Document("foo", new Foo()), "collection")); + + assertThat(appender.list.get(0).getFormattedMessage()) + .startsWith("onAfterSave: org.springframework.data.mongodb.core."); + } + + @Test // DATAMONGO-1645 + public void shouldSerializeBeforeDeleteEventEventCorrectly() { + + listener.onBeforeDelete(new BeforeDeleteEvent(new Document("foo", new Foo()), Object.class, "collection")); + + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onBeforeDelete: { \"foo\""); + } + + @Test // DATAMONGO-1645 + public void shouldSerializeAfterDeleteEventEventCorrectly() { + + listener.onAfterDelete(new AfterDeleteEvent(new Document("foo", new Foo()), Object.class, "collection")); + + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onAfterDelete: { \"foo\""); + } + + static class Foo { + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java index eda34e756f..e05efb6a45 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,10 +25,6 @@ public class PersonBeforeSaveListener extends AbstractMongoEventListener seenEvents = new ArrayList(); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject) - */ @Override public void onBeforeSave(BeforeSaveEvent event) { seenEvents.add(event); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..c0db92a3d9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validation; +import jakarta.validation.ValidatorFactory; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; +import reactor.test.StepVerifier; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ReactiveValidatingEntityCallback}. + * + * @author Mark Paluch + * @author Rene Felgenträger + */ +class ReactiveValidatingEntityCallbackUnitTests { + + private ReactiveValidatingEntityCallback callback; + + @BeforeEach + void setUp() { + try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) { + callback = new ReactiveValidatingEntityCallback(factory.getValidator()); + } + } + + @Test // GH-4910 + void validationThrowsException() { + + Coordinates coordinates = new Coordinates(-1, -1); + + callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates") // + .as(StepVerifier::create) // + .verifyError(ConstraintViolationException.class); + } + + @Test // GH-4910 + void validateSuccessful() { + + Coordinates coordinates = new Coordinates(0, 0); + + callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates") // + .as(StepVerifier::create) // + .expectNext(coordinates) // + .verifyComplete(); + } + + record Coordinates(@NotNull @Min(0) Integer x, @NotNull @Min(0) Integer y) { + + Document toDocument() { + return Document.parse(""" + { + "x": %d, + "y": %d + } + """.formatted(x, y)); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java index c73727ea28..1d77bb0e2f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright (c) 2011-2015 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java index c638aeadc6..8727f1dfe2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,20 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import javax.validation.constraints.Min; -import javax.validation.constraints.Size; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.Size; /** * Class used to test JSR-303 validation * {@link org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener} - * - * @see DATAMONGO-36 + * * @author Maciej Walkowiak */ public class User { - @Size(min = 10) - private String name; + @Size(min = 10) private String name; - @Min(18) - private Integer age; + @Min(18) private Integer age; public User(String name, Integer age) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..e20da176b3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java @@ -0,0 +1,78 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; + +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validation; +import jakarta.validation.ValidatorFactory; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ValidatingEntityCallback}. + * + * @author Rene Felgenträger + * @author Mark Paluch + */ +class ValidatingEntityCallbackUnitTests { + + private ValidatingEntityCallback callback; + + @BeforeEach + void setUp() { + try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) { + callback = new ValidatingEntityCallback(factory.getValidator()); + } + } + + @Test // GH-4910 + void validationThrowsException() { + + Coordinates coordinates = new Coordinates(-1, -1); + + assertThatExceptionOfType(ConstraintViolationException.class).isThrownBy( + () -> callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates")) + .satisfies(e -> assertThat(e.getConstraintViolations()).hasSize(2)); + } + + @Test // GH-4910 + void validateSuccessful() { + + Coordinates coordinates = new Coordinates(0, 0); + Object entity = callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates"); + + assertThat(entity).isEqualTo(coordinates); + } + + record Coordinates(@NotNull @Min(0) Integer x, @NotNull @Min(0) Integer y) { + + Document toDocument() { + return Document.parse(""" + { + "x": %d, + "y": %d + } + """.formatted(x, y)); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java index 8a8d06cd3e..a1253bf98a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,44 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import javax.validation.ConstraintViolationException; +import jakarta.validation.ConstraintViolationException; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link ValidatingMongoEventListener}. - * - * @see DATAMONGO-36 + * * @author Maciej Walkowiak * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration -public class ValidatingMongoEventListenerTests { - - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); +class ValidatingMongoEventListenerTests { @Autowired MongoTemplate mongoTemplate; - @Test - public void shouldThrowConstraintViolationException() { + @Test // DATAMONGO-36 + void shouldThrowConstraintViolationException() { User user = new User("john", 17); - try { - mongoTemplate.save(user); - fail(); - } catch (ConstraintViolationException e) { - assertThat(e.getConstraintViolations().size(), equalTo(2)); - } + assertThatExceptionOfType(ConstraintViolationException.class).isThrownBy(() -> mongoTemplate.save(user)) + .satisfies(e -> { + assertThat(e.getConstraintViolations()).hasSize(2); + }); } @Test - public void shouldNotThrowAnyExceptions() { + void shouldNotThrowAnyExceptions() { mongoTemplate.save(new User("john smith", 18)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java deleted file mode 100644 index 96e75870b6..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright 2011-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.mapreduce.GroupBy.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; - -import java.util.Arrays; -import java.util.HashSet; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.Mongo; - -/** - * Integration tests for group-by operations. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Christoph Strobl - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class GroupByTests { - - @Autowired MongoDbFactory factory; - @Autowired ApplicationContext applicationContext; - - MongoTemplate mongoTemplate; - - @Autowired - public void setMongo(Mongo mongo) throws Exception { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(new HashSet>(Arrays.asList(XObject.class))); - mappingContext.initialize(); - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); - MappingMongoConverter mappingConverter = new MappingMongoConverter(dbRefResolver, mappingContext); - mappingConverter.afterPropertiesSet(); - - this.mongoTemplate = new MongoTemplate(factory, mappingConverter); - mongoTemplate.setApplicationContext(applicationContext); - } - - @Before - public void setUp() { - cleanDb(); - } - - @After - public void cleanUp() { - cleanDb(); - } - - protected void cleanDb() { - mongoTemplate.dropCollection(mongoTemplate.getCollectionName(XObject.class)); - mongoTemplate.dropCollection("group_test_collection"); - } - - @Test - public void singleKeyCreation() { - - DBObject gc = new GroupBy("a").getGroupByObject(); - - assertThat(gc.toString(), is("{ \"key\" : { \"a\" : 1} , \"$reduce\" : null , \"initial\" : null }")); - } - - @Test - public void multipleKeyCreation() { - - DBObject gc = GroupBy.key("a", "b").getGroupByObject(); - - assertThat(gc.toString(), is("{ \"key\" : { \"a\" : 1 , \"b\" : 1} , \"$reduce\" : null , \"initial\" : null }")); - } - - @Test - public void keyFunctionCreation() { - - DBObject gc = GroupBy.keyFunction("classpath:keyFunction.js").getGroupByObject(); - - assertThat(gc.toString(), - is("{ \"$keyf\" : \"classpath:keyFunction.js\" , \"$reduce\" : null , \"initial\" : null }")); - } - - @Test - public void simpleGroupFunction() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group( - "group_test_collection", - GroupBy.key("x").initialDocument(new BasicDBObject("count", 0)) - .reduceFunction("function(doc, prev) { prev.count += 1 }"), XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithKeyFunction() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group( - "group_test_collection", - GroupBy.keyFunction("function(doc) { return { x : doc.x }; }").initialDocument("{ count: 0 }") - .reduceFunction("function(doc, prev) { prev.count += 1 }"), XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithFunctionsAsResources() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group( - "group_test_collection", - GroupBy.keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }") - .reduceFunction("classpath:groupReduce.js"), XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithQueryAndFunctionsAsResources() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group( - where("x").gt(0), - "group_test_collection", - keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction( - "classpath:groupReduce.js"), XObject.class); - - assertMapReduceResults(results); - } - - private void assertMapReduceResults(GroupByResults results) { - - int numResults = 0; - for (XObject xObject : results) { - if (xObject.getX() == 1) { - Assert.assertEquals(2, xObject.getCount(), 0.001); - } - if (xObject.getX() == 2) { - Assert.assertEquals(1, xObject.getCount(), 0.001); - } - if (xObject.getX() == 3) { - Assert.assertEquals(3, xObject.getCount(), 0.001); - } - numResults++; - } - assertThat(numResults, is(3)); - assertThat(results.getKeys(), is(3)); - assertEquals(6, results.getCount(), 0.001); - } - - private void createGroupByData() { - - DBCollection c = mongoTemplate.getDb().getCollection("group_test_collection"); - - c.save(new BasicDBObject("x", 1)); - c.save(new BasicDBObject("x", 1)); - c.save(new BasicDBObject("x", 2)); - c.save(new BasicDBObject("x", 3)); - c.save(new BasicDBObject("x", 3)); - c.save(new BasicDBObject("x", 3)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java index a0da06bc0e..687786456c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,43 +15,36 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link MapReduceCounts}. - * + * * @author Oliver Gierke */ public class MapReduceCountsUnitTests { - /** - * @see DATACMNS-378 - */ - @Test + @Test // DATACMNS-378 public void equalsForSameNumberValues() { MapReduceCounts left = new MapReduceCounts(1L, 1L, 1L); MapReduceCounts right = new MapReduceCounts(1L, 1L, 1L); - assertThat(left, is(right)); - assertThat(right, is(left)); - assertThat(left.hashCode(), is(right.hashCode())); + assertThat(left).isEqualTo(right); + assertThat(right).isEqualTo(left); + assertThat(left.hashCode()).isEqualTo(right.hashCode()); } - /** - * @see DATACMNS-378 - */ - @Test + @Test // DATACMNS-378 public void notEqualForDifferentNumberValues() { MapReduceCounts left = new MapReduceCounts(1L, 1L, 1L); MapReduceCounts right = new MapReduceCounts(1L, 2L, 1L); - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); - assertThat(left.hashCode(), is(not(right.hashCode()))); + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + assertThat(left.hashCode()).isNotEqualTo(right.hashCode()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java index 9cc1728de4..a3a2161845 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Mark Pollack @@ -32,23 +31,17 @@ public void testFinalize() { new MapReduceOptions().finalizeFunction("code"); } - /** - * @see DATAMONGO-1334 - */ - @Test + @Test // DATAMONGO-1334 public void limitShouldBeIncludedCorrectly() { MapReduceOptions options = new MapReduceOptions(); options.limit(10); - assertThat(options.getOptionsObject(), isBsonObject().containing("limit", 10)); + assertThat(options.getOptionsObject()).containsEntry("limit", 10); } - /** - * @see DATAMONGO-1334 - */ - @Test - public void limitShouldNotBePresentInDboWhenNotSet() { - assertThat(new MapReduceOptions().getOptionsObject(), isBsonObject().notContaining("limit")); + @Test // DATAMONGO-1334 + public void limitShouldNotBePresentInDocumentWhenNotSet() { + assertThat(new MapReduceOptions().getOptionsObject()).doesNotContainKey("limit"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java index 087ee786eb..c34fa32be0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,72 +15,57 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; -import org.junit.Test; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link MapReduceResults}. - * + * * @author Oliver Gierke */ public class MapReduceResultsUnitTests { - /** - * @see DATAMONGO-428 - */ - @Test + @Test // DATAMONGO-428 public void resolvesOutputCollectionForPlainResult() { - DBObject rawResult = new BasicDBObject("result", "FOO"); + Document rawResult = new Document("result", "FOO"); MapReduceResults results = new MapReduceResults(Collections.emptyList(), rawResult); - assertThat(results.getOutputCollection(), is("FOO")); + assertThat(results.getOutputCollection()).isEqualTo("FOO"); } - /** - * @see DATAMONGO-428 - */ - @Test - public void resolvesOutputCollectionForDBObjectResult() { + @Test // DATAMONGO-428 + public void resolvesOutputCollectionForDocumentResult() { - DBObject rawResult = new BasicDBObject("result", new BasicDBObject("collection", "FOO")); + Document rawResult = new Document("result", new Document("collection", "FOO")); MapReduceResults results = new MapReduceResults(Collections.emptyList(), rawResult); - assertThat(results.getOutputCollection(), is("FOO")); + assertThat(results.getOutputCollection()).isEqualTo("FOO"); } - /** - * @see DATAMONGO-378 - */ - @Test + @Test // DATAMONGO-378 public void handlesLongTotalInResult() { - DBObject inner = new BasicDBObject("total", 1L); + Document inner = new Document("total", 1L); inner.put("mapTime", 1L); inner.put("emitLoop", 1); - DBObject source = new BasicDBObject("timing", inner); + Document source = new Document("timing", inner); new MapReduceResults(Collections.emptyList(), source); } - /** - * @see DATAMONGO-378 - */ - @Test + @Test // DATAMONGO-378 public void handlesLongResultsForCounts() { - DBObject inner = new BasicDBObject("input", 1L); + Document inner = new Document("input", 1L); inner.put("emit", 1L); inner.put("output", 1); - DBObject source = new BasicDBObject("counts", inner); + Document source = new Document("counts", inner); new MapReduceResults(Collections.emptyList(), source); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java index d971168334..c265a9b739 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,69 +15,47 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; +import org.bson.Document; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.geo.Box; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.Query; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.Mongo; +import com.mongodb.client.MongoCollection; /** * Integration test for {@link MongoTemplate}'s Map-Reduce operations - * + * * @author Mark Pollack * @author Thomas Darimont + * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class MapReduceTests { - private String mapFunction = "function(){ for ( var i=0; i>(Arrays.asList(ValueObject.class))); - mappingContext.initialize(); - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); - MappingMongoConverter mappingConverter = new MappingMongoConverter(dbRefResolver, mappingContext); - mappingConverter.afterPropertiesSet(); - this.mongoTemplate = new MongoTemplate(factory, mappingConverter); - } + @Autowired MongoTemplate mongoTemplate; @Before public void setUp() { @@ -90,162 +68,104 @@ public void cleanUp() { } protected void cleanDb() { + template.dropCollection(template.getCollectionName(ValueObject.class)); template.dropCollection("jmr2"); template.dropCollection("jmr2_out"); template.dropCollection("jmr1_out"); template.dropCollection("jmr1"); + template.dropCollection("jmrWithGeo"); + template.getMongoDatabaseFactory().getMongoDatabase("jmr1-out-db").drop(); } - @Test - @Ignore - public void testForDocs() { - createMapReduceData(); - MapReduceResults results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, - ValueObject.class); - for (ValueObject valueObject : results) { - System.out.println(valueObject); - } - } - - @Test + @Test // DATAMONGO-260 public void testIssue260() { + createContentAndVersionData(); String map = "function () { emit(this.document_id, this.version); }"; String reduce = "function (key, values) { return Math.max.apply(Math, values); }"; + MapReduceResults results = mongoTemplate.mapReduce("jmr2", map, reduce, new MapReduceOptions().outputCollection("jmr2_out"), ContentAndVersion.class); - int size = 0; + assertThat(results).hasSize(3); for (ContentAndVersion cv : results) { + if ("Resume".equals(cv.getId())) { - assertEquals(6, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(6); } if ("Schema".equals(cv.getId())) { - assertEquals(2, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(2); } if ("mongoDB How-To".equals(cv.getId())) { - assertEquals(2, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(2); } - size++; } - assertEquals(3, size); + } - @Test + @Test // DATAMONGO-260 public void testIssue260Part2() { + createNumberAndVersionData(); String map = "function () { emit(this.number, this.version); }"; String reduce = "function (key, values) { return Math.max.apply(Math, values); }"; + MapReduceResults results = mongoTemplate.mapReduce("jmr2", map, reduce, new MapReduceOptions().outputCollection("jmr2_out"), NumberAndVersion.class); - int size = 0; + for (NumberAndVersion nv : results) { if ("1".equals(nv.getId())) { - assertEquals(2, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(2); } if ("2".equals(nv.getId())) { - assertEquals(6, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(6); } if ("3".equals(nv.getId())) { - assertEquals(2, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(2); } - size++; } - assertEquals(3, size); - } - - private void createNumberAndVersionData() { - NumberAndVersion nv1 = new NumberAndVersion(); - nv1.setNumber(1L); - nv1.setVersion(1L); - template.save(nv1, "jmr2"); - - NumberAndVersion nv2 = new NumberAndVersion(); - nv2.setNumber(1L); - nv2.setVersion(2L); - template.save(nv2, "jmr2"); - - NumberAndVersion nv3 = new NumberAndVersion(); - nv3.setNumber(2L); - nv3.setVersion(6L); - template.save(nv3, "jmr2"); - - NumberAndVersion nv4 = new NumberAndVersion(); - nv4.setNumber(3L); - nv4.setVersion(1L); - template.save(nv4, "jmr2"); - - NumberAndVersion nv5 = new NumberAndVersion(); - nv5.setNumber(3L); - nv5.setVersion(2L); - template.save(nv5, "jmr2"); + assertThat(results).hasSize(3); } - private void createContentAndVersionData() { - /* - { "_id" : 1, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1 } - { "_id" : 2, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1.1 } - { "_id" : 3, "document_id" : "Resume", "author" : "Author", "content" : "...", "version" : 6 } - { "_id" : 4, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 0.9 } - { "_id" : 5, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 1 } + @Test // DATADOC-7, DATAMONGO-2027 + public void testMapReduce() { - */ - ContentAndVersion cv1 = new ContentAndVersion(); - cv1.setDocumentId("mongoDB How-To"); - cv1.setAuthor("Amos King"); - cv1.setContent("..."); - cv1.setVersion(1L); - template.save(cv1, "jmr2"); + performMapReduce(false, false); - ContentAndVersion cv2 = new ContentAndVersion(); - cv2.setDocumentId("mongoDB How-To"); - cv2.setAuthor("Amos King"); - cv2.setContent("..."); - cv2.setVersion(2L); - template.save(cv2, "jmr2"); + List results = mongoTemplate.find(new Query(), ValueObject.class, "jmr1_out"); + assertMapReduceResults(copyToMap(results)); + } - ContentAndVersion cv3 = new ContentAndVersion(); - cv3.setDocumentId("Resume"); - cv3.setAuthor("Author"); - cv3.setContent("..."); - cv3.setVersion(6L); - template.save(cv3, "jmr2"); + @Test // DATADOC-7, DATAMONGO-2027 + public void testMapReduceInline() { - ContentAndVersion cv4 = new ContentAndVersion(); - cv4.setDocumentId("Schema"); - cv4.setAuthor("Someone Else"); - cv4.setContent("..."); - cv4.setVersion(1L); - template.save(cv4, "jmr2"); + performMapReduce(true, false); + assertThat(template.collectionExists("jmr1_out")).isFalse(); + } - ContentAndVersion cv5 = new ContentAndVersion(); - cv5.setDocumentId("Schema"); - cv5.setAuthor("Someone Else"); - cv5.setContent("..."); - cv5.setVersion(2L); - template.save(cv5, "jmr2"); + @Test // DATAMONGO-2027 + public void mapReduceWithOutputDatabaseShouldWorkCorrectly() { - } + createMapReduceData(); - @Test - public void testMapReduce() { - performMapReduce(false, false); - } + mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, + options().outputDatabase("jmr1-out-db").outputCollection("jmr1-out"), ValueObject.class); - @Test - public void testMapReduceInline() { - performMapReduce(true, false); + assertThat( + template.getMongoDatabaseFactory().getMongoDatabase("jmr1-out-db").listCollectionNames().into(new ArrayList<>())) + .contains("jmr1-out"); } - @Test + @Test // DATADOC-7 public void testMapReduceWithQuery() { performMapReduce(false, true); } - @Test + @Test // DATADOC-7 public void testMapReduceInlineWithScope() { + createMapReduceData(); Map scopeVariables = new HashMap(); @@ -253,57 +173,56 @@ public void testMapReduceInlineWithScope() { String mapWithExcludeFunction = "function(){ for ( var i=0; i results = mongoTemplate.mapReduce("jmr1", mapWithExcludeFunction, reduceFunction, - new MapReduceOptions().scopeVariables(scopeVariables).outputTypeInline(), ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(2, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + MapReduceResults results = mongoTemplate.mapReduce("jmr1", mapWithExcludeFunction, REDUCE_FUNCTION, + new MapReduceOptions().scopeVariables(scopeVariables), ValueObject.class); + + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 2F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } - @Test + @Test // DATADOC-7 public void testMapReduceExcludeQuery() { + createMapReduceData(); Query query = new Query(where("x").ne(new String[] { "a", "b" })); - MapReduceResults results = mongoTemplate.mapReduce(query, "jmr1", mapFunction, reduceFunction, + MapReduceResults results = mongoTemplate.mapReduce(query, "jmr1", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(1, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); - + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 1F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } - /** - * @see DATAMONGO-938 - */ - @Test + @Test // DATAMONGO-938 public void mapReduceShouldUseQueryMapper() { - DBCollection c = mongoTemplate.getDb().getCollection("jmrWithGeo"); + MongoCollection c = mongoTemplate.getDb().getCollection("jmrWithGeo", Document.class); - c.save(new BasicDBObject("x", new String[] { "a", "b" }).append("loc", new double[] { 0, 0 })); - c.save(new BasicDBObject("x", new String[] { "b", "c" }).append("loc", new double[] { 0, 0 })); - c.save(new BasicDBObject("x", new String[] { "c", "d" }).append("loc", new double[] { 0, 0 })); + c.insertOne(new Document("x", Arrays.asList("a", "b")).append("loc", Arrays.asList(0D, 0D))); + c.insertOne(new Document("x", Arrays.asList("b", "c")).append("loc", Arrays.asList(0D, 0D))); + c.insertOne(new Document("x", Arrays.asList("c", "d")).append("loc", Arrays.asList(0D, 0D))); Query query = new Query(where("x").ne(new String[] { "a", "b" }).and("loc") .within(new Box(new double[] { 0, 0 }, new double[] { 1, 1 }))); - MapReduceResults results = template.mapReduce(query, "jmrWithGeo", mapFunction, reduceFunction, + MapReduceResults results = template.mapReduce(query, "jmrWithGeo", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(1, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 1F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } private void performMapReduce(boolean inline, boolean withQuery) { + createMapReduceData(); MapReduceResults results; if (inline) { @@ -311,47 +230,124 @@ private void performMapReduce(boolean inline, boolean withQuery) { results = mongoTemplate.mapReduce(new Query(), "jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); } else { - results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, ValueObject.class); + results = mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); } } else { if (withQuery) { - results = mongoTemplate.mapReduce(new Query(), "jmr1", mapFunction, reduceFunction, + results = mongoTemplate.mapReduce(new Query(), "jmr1", MAP_FUNCTION, REDUCE_FUNCTION, options().outputCollection("jmr1_out"), ValueObject.class); } else { - results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, + results = mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); } } - Map m = copyToMap(results); - assertMapReduceResults(m); + + assertMapReduceResults(copyToMap(results)); } private void createMapReduceData() { - DBCollection c = mongoTemplate.getDb().getCollection("jmr1"); - c.save(new BasicDBObject("x", new String[] { "a", "b" })); - c.save(new BasicDBObject("x", new String[] { "b", "c" })); - c.save(new BasicDBObject("x", new String[] { "c", "d" })); + + MongoCollection c = mongoTemplate.getDb().getCollection("jmr1", Document.class); + c.insertOne(new Document("x", Arrays.asList("a", "b"))); + c.insertOne(new Document("x", Arrays.asList("b", "c"))); + c.insertOne(new Document("x", Arrays.asList("c", "d"))); } - private Map copyToMap(MapReduceResults results) { - List valueObjects = new ArrayList(); + private Map copyToMap(Iterable results) { + + List valueObjects = new ArrayList<>(); for (ValueObject valueObject : results) { valueObjects.add(valueObject); } - Map m = new HashMap(); + Map m = new HashMap<>(); for (ValueObject vo : valueObjects) { m.put(vo.getId(), vo.getValue()); } return m; } - private void assertMapReduceResults(Map m) { - assertEquals(4, m.size()); - assertEquals(1, m.get("a").intValue()); - assertEquals(2, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + private void assertMapReduceResults(Map map) { + + assertThat(map) // + .hasSize(4) // + .containsEntry("a", 1F) // + .containsEntry("b", 2F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); + } + + private void createNumberAndVersionData() { + + NumberAndVersion nv1 = new NumberAndVersion(); + nv1.setNumber(1L); + nv1.setVersion(1L); + template.save(nv1, "jmr2"); + + NumberAndVersion nv2 = new NumberAndVersion(); + nv2.setNumber(1L); + nv2.setVersion(2L); + template.save(nv2, "jmr2"); + + NumberAndVersion nv3 = new NumberAndVersion(); + nv3.setNumber(2L); + nv3.setVersion(6L); + template.save(nv3, "jmr2"); + + NumberAndVersion nv4 = new NumberAndVersion(); + nv4.setNumber(3L); + nv4.setVersion(1L); + template.save(nv4, "jmr2"); + + NumberAndVersion nv5 = new NumberAndVersion(); + nv5.setNumber(3L); + nv5.setVersion(2L); + template.save(nv5, "jmr2"); + } + private void createContentAndVersionData() { + /* + { "_id" : 1, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1 } + { "_id" : 2, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1.1 } + { "_id" : 3, "document_id" : "Resume", "author" : "Author", "content" : "...", "version" : 6 } + { "_id" : 4, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 0.9 } + { "_id" : 5, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 1 } + + */ + ContentAndVersion cv1 = new ContentAndVersion(); + cv1.setDocumentId("mongoDB How-To"); + cv1.setAuthor("Amos King"); + cv1.setContent("..."); + cv1.setVersion(1L); + template.save(cv1, "jmr2"); + + ContentAndVersion cv2 = new ContentAndVersion(); + cv2.setDocumentId("mongoDB How-To"); + cv2.setAuthor("Amos King"); + cv2.setContent("..."); + cv2.setVersion(2L); + template.save(cv2, "jmr2"); + + ContentAndVersion cv3 = new ContentAndVersion(); + cv3.setDocumentId("Resume"); + cv3.setAuthor("Author"); + cv3.setContent("..."); + cv3.setVersion(6L); + template.save(cv3, "jmr2"); + + ContentAndVersion cv4 = new ContentAndVersion(); + cv4.setDocumentId("Schema"); + cv4.setAuthor("Someone Else"); + cv4.setContent("..."); + cv4.setVersion(1L); + template.save(cv4, "jmr2"); + + ContentAndVersion cv5 = new ContentAndVersion(); + cv5.setDocumentId("Schema"); + cv5.setAuthor("Someone Else"); + cv5.setContent("..."); + cv5.setVersion(2L); + template.save(cv5, "jmr2"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java new file mode 100644 index 0000000000..5bd7b284f5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java @@ -0,0 +1,218 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapreduce; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + * @currentRead Beyond the Shadows - Brent Weeks + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMapReduceTests { + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoTemplate template; + + private String mapFunction = "function(){ for ( var i=0; i { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2027 + public void shouldStoreResultInCollection() { + + createMapReduceData(); + + template.mapReduce(new Query(), Person.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, // + MapReduceOptions.options().outputCollection("mapreduceout")).as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + + template.find(new Query(), ValueObject.class, "mapreduceout").buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineAndFilterQuery() { + + createMapReduceData(); + + template + .mapReduce(query(where("x").ne(new String[] { "a", "b" })), ValueObject.class, "jmr1", ValueObject.class, + mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890, DATAMONGO-2027 + public void mapReduceWithOutputCollection() { + + createMapReduceData(); + + template + .mapReduce(new Query(), ValueObject.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, + MapReduceOptions.options().outputCollection("jmr1_out")) + .as(StepVerifier::create).expectNextCount(4).verifyComplete(); + + template.find(new Query(), ValueObject.class, "jmr1_out").buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2027 + public void mapReduceWithOutputDatabase() { + + createMapReduceData(); + + template + .mapReduce(new Query(), ValueObject.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, + MapReduceOptions.options().outputDatabase("reactive-jrm1-out-db").outputCollection("jmr1_out")) + .as(StepVerifier::create).expectNextCount(4).verifyComplete(); + + factory.getMongoDatabase("reactive-jrm1-out-db").flatMapMany(MongoDatabase::listCollectionNames).buffer(10) + .map(list -> list.contains("jmr1_out")).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineAndMappedFilterQuery() { + + createMapReduceData(); + + template + .mapReduce(query(where("values").ne(new String[] { "a", "b" })), MappedFieldsValueObject.class, "jmr1", + ValueObject.class, mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineFilterQueryAndExtractedCollection() { + + createMapReduceData(); + + template + .mapReduce(query(where("values").ne(new String[] { "a", "b" })), MappedFieldsValueObject.class, + ValueObject.class, mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void throwsExceptionWhenTryingToLoadFunctionsFromDisk() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.mapReduce(new Query(), + Person.class, "foo", ValueObject.class, "classpath:map.js", "classpath:reduce.js", MapReduceOptions.options())) + .withMessageContaining("classpath:map.js"); + } + + private void createMapReduceData() { + + factory.getMongoDatabase() + .flatMapMany(db -> db.getCollection("jmr1", Document.class) + .insertMany(Arrays.asList(new Document("x", Arrays.asList("a", "b")), + new Document("x", Arrays.asList("b", "c")), new Document("x", Arrays.asList("c", "d"))))) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @org.springframework.data.mongodb.core.mapping.Document("jmr1") + static class MappedFieldsValueObject { + + @Field("x") String[] values; + + public String[] getValues() { + return this.values; + } + + public void setValues(String[] values) { + this.values = values; + } + + public String toString() { + return "ReactiveMapReduceTests.MappedFieldsValueObject(values=" + Arrays.deepToString(this.getValues()) + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java index b491d1246a..34753e2172 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java @@ -1,17 +1,50 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core.mapreduce; +import java.util.Objects; + +/** + * @author Mark Pollack + * @author Oliver Gierke + * @author Christoph Strobl + */ public class ValueObject { private String id; - public String getId() { - return id; + private float value; + + public ValueObject() {} + + public ValueObject(String id, float value) { + this.id = id; + this.value = value; } - private float value; + public String getId() { + return this.id; + } public float getValue() { - return value; + return this.value; + } + + public void setId(String id) { + this.id = id; } public void setValue(float value) { @@ -23,4 +56,20 @@ public String toString() { return "ValueObject [id=" + id + ", value=" + value + "]"; } + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValueObject that = (ValueObject) o; + return Float.compare(that.value, value) == 0 && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java new file mode 100644 index 0000000000..12b57ca47d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.mockito.Mockito.*; + +import java.util.UUID; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.client.ChangeStreamIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * Unit tests for {@link ChangeStreamTask}. + * + * @author Christoph Strobl + * @author Myroslav Kosinskyi + */ +@ExtendWith(MockitoExtension.class) +@SuppressWarnings({ "unchecked", "rawtypes" }) +class ChangeStreamTaskUnitTests { + + @Mock MongoTemplate template; + @Mock MongoDatabase mongoDatabase; + @Mock MongoCollection mongoCollection; + @Mock ChangeStreamIterable changeStreamIterable; + + MongoConverter converter; + + @BeforeEach + void setUp() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + + when(template.getConverter()).thenReturn(converter); + when(template.getDb()).thenReturn(mongoDatabase); + + when(mongoDatabase.getCollection(any())).thenReturn(mongoCollection); + when(mongoCollection.watch(eq(Document.class))).thenReturn(changeStreamIterable); + when(changeStreamIterable.fullDocument(any())).thenReturn(changeStreamIterable); + } + + @Test // DATAMONGO-2258 + void shouldNotBreakLovelaceBehavior() { + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + when(changeStreamIterable.resumeAfter(any())).thenReturn(changeStreamIterable); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .resumeToken(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).resumeAfter(resumeToken); + } + + @Test // DATAMONGO-2258 + void shouldApplyResumeAfterToChangeStream() { + + when(changeStreamIterable.resumeAfter(any())).thenReturn(changeStreamIterable); + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .resumeAfter(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).resumeAfter(resumeToken); + } + + @Test // DATAMONGO-2258 + void shouldApplyStartAfterToChangeStream() { + + when(changeStreamIterable.startAfter(any())).thenReturn(changeStreamIterable); + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .startAfter(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).startAfter(resumeToken); + } + + @Test // GH-4495 + void shouldApplyFullDocumentBeforeChangeToChangeStream() { + + when(changeStreamIterable.fullDocumentBeforeChange(any())).thenReturn(changeStreamIterable); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).fullDocumentBeforeChange(FullDocumentBeforeChange.REQUIRED); + } + + private MongoCursor> initTask(ChangeStreamRequest request, Class targetType) { + + ChangeStreamTask task = new ChangeStreamTask(template, request, targetType, er -> {}); + return task.initCursor(template, request.getRequestOptions(), targetType); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java new file mode 100644 index 0000000000..53d093897e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java @@ -0,0 +1,841 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.messaging.SubscriptionUtils.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.bson.BsonDocument; +import org.bson.Document; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.RepeatFailedTest; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.ChangeStreamOptions; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.messaging.ChangeStreamTask.ChangeStreamEventMessage; +import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * Integration test for subscribing to a {@link com.mongodb.operation.ChangeStreamBatchCursor} inside the + * {@link DefaultMessageListenerContainer} using {@link ChangeStreamRequest}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi + */ +@ExtendWith({ MongoTemplateExtension.class }) +@EnableIfReplicaSetAvailable +class ChangeStreamTests { + + private static ThreadPoolExecutor executor; + + @Template(initialEntitySet = User.class, replicaSet = true) // + private static MongoTestTemplate template; + + private MessageListenerContainer container; + + private User jellyBelly; + private User huffyFluffy; + private User sugarSplashy; + + @BeforeAll + static void beforeClass() { + executor = new ThreadPoolExecutor(2, 2, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<>()); + } + + @BeforeEach + void setUp() { + + template.dropCollection(User.class); + + container = new DefaultMessageListenerContainer(template, executor); + container.start(); + + jellyBelly = new User(); + jellyBelly.id = "id-1"; + jellyBelly.userName = "jellyBelly"; + jellyBelly.age = 7; + + huffyFluffy = new User(); + huffyFluffy.id = "id-2"; + huffyFluffy.userName = "huffyFluffy"; + huffyFluffy.age = 7; + + sugarSplashy = new User(); + sugarSplashy.id = "id-3"; + sugarSplashy.userName = "sugarSplashy"; + sugarSplashy.age = 5; + } + + @AfterEach + void tearDown() { + container.stop(); + } + + @AfterAll + static void afterClass() { + executor.shutdown(); + } + + @Test // DATAMONGO-1803 + void readsPlainDocumentMessageCorrectly() throws InterruptedException { + + CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); + + Subscription subscription = container.register(request, Document.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + awaitMessages(messageListener, 1); + + Message, Document> message1 = messageListener.getFirstMessage(); + + assertThat(message1.getRaw()).isNotNull(); + assertThat(message1.getProperties()) + .isEqualTo(MessageProperties.builder().collectionName("user").databaseName("change-stream-tests").build()); + assertThat(message1.getBody()).isEqualTo(new Document("_id", "id-1").append("user_name", "jellyBelly") + .append("age", 7).append("_class", User.class.getName())); + } + + @Test // DATAMONGO-1803 + void useSimpleAggregationToFilterMessages() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(match(where("age").is(7)))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(sugarSplashy); + } + + @Test // DATAMONGO-1803 + @MongoVersion(asOf = "4.0") + void useAggregationToFilterMessages() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(match( + new Criteria().orOperator(where("user_name").is("huffyFluffy"), where("user_name").is("jellyBelly"))))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(sugarSplashy); + } + + @RepeatFailedTest(3) // DATAMONGO-1803 + void mapsTypedAggregationToFilterMessages() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .publishTo(messageListener) // + .filter(newAggregation(User.class, + match(new Criteria().orOperator(where("userName").is("huffyFluffy"), where("userName").is("jellyBelly"))))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener, 2); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(sugarSplashy); + } + + @Test // DATAMONGO-1803 + void mapsReservedWordsCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .publishTo(messageListener) // + .filter(newAggregation(User.class, match(where("operationType").is("replace")))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + template.save(sugarSplashy); + + User replacement = new User(); + replacement.id = jellyBelly.id; + replacement.userName = new StringBuilder(jellyBelly.userName).reverse().toString(); + replacement.age = jellyBelly.age; + + template.save(replacement); + + awaitMessages(messageListener, 1); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(1).containsExactly(replacement); + } + + @Test // DATAMONGO-1803 + void plainAggregationPipelineToFilterMessages() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .publishTo(messageListener) // + .filter(new Document("$match", new Document("fullDocument.user_name", "sugarSplashy"))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener, 1); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(1).containsExactly(sugarSplashy); + } + + @Test // DATAMONGO-1803 + void resumesCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener1 = new CollectingMessageListener<>(); + Subscription subscription1 = container.register( + new ChangeStreamRequest<>(messageListener1, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())), + User.class); + + awaitSubscription(subscription1); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener1, 3); + + BsonDocument resumeToken = messageListener1.getFirstMessage().getRaw().getResumeToken(); + + CollectingMessageListener, User> messageListener2 = new CollectingMessageListener<>(); + ChangeStreamRequest subSequentRequest = ChangeStreamRequest.builder().collection("user") + .publishTo(messageListener2).resumeToken(resumeToken).maxAwaitTime(Duration.ofMillis(10)).build(); + + Subscription subscription2 = container.register(subSequentRequest, User.class); + awaitSubscription(subscription2); + + awaitMessages(messageListener2, 2); + + List messageBodies = messageListener2.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(jellyBelly); + } + + @Test // DATAMONGO-1803 + void readsAndConvertsMessageBodyCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + awaitMessages(messageListener, 1); + + Message, User> message1 = messageListener.getFirstMessage(); + + assertThat(message1.getRaw()).isNotNull(); + assertThat(message1.getProperties()) + .isEqualTo(MessageProperties.builder().collectionName("user").databaseName("change-stream-tests").build()); + assertThat(message1.getBody()).isEqualTo(jellyBelly); + } + + @Test // DATAMONGO-1803 + void readsAndConvertsUpdateMessageBodyCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isNotNull().hasFieldOrPropertyWithValue("age", 8); + } + + @Test // DATAMONGO-1803 + void readsOnlyDiffForUpdateWhenNotMappedToDomainType() throws InterruptedException { + + CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); + + Subscription subscription = container.register(request, Document.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(new Document("_id", "id-1") + .append("user_name", "jellyBelly").append("age", 7).append("_class", User.class.getName())); + assertThat(messageListener.getLastMessage().getBody()).isNull(); + } + + @Test // DATAMONGO-1803 + void readsOnlyDiffForUpdateWhenOptionsDeclareDefaultExplicitly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.DEFAULT) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isNull(); + } + + @Test // DATAMONGO-1803 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + void readsFullDocumentForUpdateWhenNotMappedToDomainTypeButLookupSpecified() throws InterruptedException { + + CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.UPDATE_LOOKUP) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, Document.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(new Document("_id", "id-1") + .append("user_name", "jellyBelly").append("age", 7).append("_class", User.class.getName())); + assertThat(messageListener.getLastMessage().getBody()).isEqualTo(new Document("_id", "id-1") + .append("user_name", "jellyBelly").append("age", 8).append("_class", User.class.getName())); + } + + @Test // DATAMONGO-2012, DATAMONGO-2113 + @MongoVersion(asOf = "4.0") + void resumeAtTimestampCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener1 = new CollectingMessageListener<>(); + Subscription subscription1 = container.register( + new ChangeStreamRequest<>(messageListener1, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())), + User.class); + + awaitSubscription(subscription1); + + template.save(jellyBelly); + + Thread.sleep(1000); // cluster timestamp is in seconds, so we need to wait at least one. + + template.save(sugarSplashy); + + awaitMessages(messageListener1, 12); + + Instant resumeAt = ((ChangeStreamEventMessage) messageListener1.getLastMessage()).getTimestamp(); + + template.save(huffyFluffy); + + awaitMessages(messageListener1, 3); + + CollectingMessageListener, User> messageListener2 = new CollectingMessageListener<>(); + ChangeStreamRequest subSequentRequest = ChangeStreamRequest.builder() // + .collection("user") // + .resumeAt(resumeAt) // + .publishTo(messageListener2) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription2 = container.register(subSequentRequest, User.class); + awaitSubscription(subscription2); + + awaitMessages(messageListener2, 2); + + List messageBodies = messageListener2.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(jellyBelly); + } + + @Test // DATAMONGO-1996 + void filterOnNestedElementWorksCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(User.class, match(where("address.street").is("flower street")))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + jellyBelly.address = new Address(); + jellyBelly.address.street = "candy ave"; + + huffyFluffy.address = new Address(); + huffyFluffy.address.street = "flower street"; + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(1).contains(huffyFluffy); + } + + @Test // DATAMONGO-1996 + void filterOnUpdateDescriptionElement() throws InterruptedException { + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(User.class, match(where("updateDescription.updatedFields.address").exists(true)))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .fullDocumentLookup(FullDocument.UPDATE_LOOKUP).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))) + .apply(Update.update("address", new Address("candy ave"))).first(); + + template.update(User.class).matching(query(where("id").is(sugarSplashy.id))).apply(new Update().inc("age", 1)) + .first(); + + template.update(User.class).matching(query(where("id").is(huffyFluffy.id))) + .apply(Update.update("address", new Address("flower street"))).first(); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredWhenAvailable() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.WHEN_AVAILABLE) // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isEqualTo(jellyBelly.withAge(8)); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredRequired() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.WHEN_AVAILABLE) // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isEqualTo(jellyBelly.withAge(8)); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionIsNotDeclared() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredDefault() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.DEFAULT).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredOff() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.OFF).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredWhenAvailableAndChangeStreamPreAndPostImagesDisabled() + throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @Disabled("Flakey test failing occasionally due to timing issues") + void readsFullDocumentBeforeChangeWhenOptionDeclaredRequiredAndMongoVersionIsLessThan6() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + private void createUserCollectionWithChangeStreamPreAndPostImagesEnabled() { + template.createCollection(User.class, CollectionOptions.emitChangedRevisions()); + } + + static class User { + + @Id String id; + @Field("user_name") String userName; + int age; + + Address address; + + User withAge(int age) { + + User user = new User(); + user.id = id; + user.userName = userName; + user.age = age; + + return user; + } + + public String getId() { + return this.id; + } + + public String getUserName() { + return this.userName; + } + + public int getAge() { + return this.age; + } + + public Address getAddress() { + return this.address; + } + + public void setId(String id) { + this.id = id; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public void setAge(int age) { + this.age = age; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + User user = (User) o; + return age == user.age && Objects.equals(id, user.id) && Objects.equals(userName, user.userName) + && Objects.equals(address, user.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, userName, age, address); + } + + public String toString() { + return "ChangeStreamTests.User(id=" + this.getId() + ", userName=" + this.getUserName() + ", age=" + this.getAge() + + ", address=" + this.getAddress() + ")"; + } + } + + static class Address { + + @Field("s") String street; + + public Address(String street) { + this.street = street; + } + + public Address() {} + + public String getStreet() { + return this.street; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(street); + } + + public String toString() { + return "ChangeStreamTests.Address(street=" + this.getStreet() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java new file mode 100644 index 0000000000..5e9acbdcda --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java @@ -0,0 +1,268 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static edu.umd.cs.mtc.TestFramework.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import edu.umd.cs.mtc.MultithreadedTestCase; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.core.messaging.Task.State; +import org.springframework.util.ErrorHandler; + +import com.mongodb.ServerAddress; +import com.mongodb.ServerCursor; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; + +/** + * Unit test for mainly lifecycle issues of {@link CursorReadingTask}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class CursorReadingTaskUnitTests { + + @Mock MongoDatabase db; + @Mock MongoCursor cursor; + @Mock SubscriptionRequest request; + @Mock MessageListener listener; + @Mock RequestOptions options; + @Mock MongoTemplate template; + @Mock ErrorHandler errorHandler; + + ValueCapturingTaskStub task; + + @BeforeEach + public void setUp() { + + when(request.getRequestOptions()).thenReturn(options); + when(request.getMessageListener()).thenReturn(listener); + when(options.getCollectionName()).thenReturn("collection-name"); + when(template.getDb()).thenReturn(db); + when(template.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(db.getName()).thenReturn("mock-db"); + + task = new ValueCapturingTaskStub(template, request, Object.class, cursor, errorHandler); + } + + @Test // DATAMONGO-1803 + public void stopTaskWhileStarting() throws Throwable { + runOnce(new MultithreadedStopDuringStartupInitialization(task, cursor)); + } + + @Test // DATAMONGO-1803 + public void stopRunningTask() throws Throwable { + + when(cursor.getServerCursor()).thenReturn(new ServerCursor(10, new ServerAddress("mock"))); + + runOnce(new MultithreadedStopRunning(task, cursor)); + } + + @Test // DATAMONGO-1803 + public void stopTaskWhileEmittingMessages() throws Throwable { + + when(cursor.getServerCursor()).thenReturn(new ServerCursor(10, new ServerAddress("mock"))); + when(cursor.tryNext()).thenReturn("hooyah"); + + runOnce(new MultithreadedStopRunningWhileEmittingMessages(task, cursor)); + + verify(listener, times(task.getValues().size())).onMessage(any()); + } + + @Test // DATAMONGO-2173, DATAMONGO-2366 + public void writesErrorOnStartToErrorHandler() { + + ArgumentCaptor errorCaptor = ArgumentCaptor.forClass(Throwable.class); + Task task = new ErrorOnInitCursorTaskStub(template, request, Object.class, errorHandler); + + task.run(); + verify(errorHandler).handleError(errorCaptor.capture()); + assertThat(errorCaptor.getValue()).hasMessageStartingWith("let's get it started (ha)"); + } + + @Test // DATAMONGO-2366 + public void errorOnNextNotifiesErrorHandlerOnlyOnce() { + + ArgumentCaptor errorCaptor = ArgumentCaptor.forClass(Throwable.class); + when(cursor.getServerCursor()).thenReturn(new ServerCursor(10, new ServerAddress("mock"))); + when(cursor.tryNext()).thenThrow(new IllegalStateException()); + + task.run(); + verify(errorHandler).handleError(errorCaptor.capture()); + assertThat(errorCaptor.getValue()).isInstanceOf(IllegalStateException.class); + } + + private static class MultithreadedStopRunningWhileEmittingMessages extends MultithreadedTestCase { + + CursorReadingTask task; + MongoCursor cursor; + + public MultithreadedStopRunningWhileEmittingMessages(CursorReadingTask task, MongoCursor cursor) { + + this.task = task; + this.cursor = cursor; + } + + public void thread1() { + + assertTick(0); + + assertThat(task.getState()).isEqualTo(State.CREATED); + task.run(); + + waitForTick(1); + assertThat(task.isActive()).isFalse(); + assertThat(task.getState()).isEqualTo(State.CANCELLED); + verify(cursor).close(); + } + + public void thread2() throws InterruptedException { + + while (!task.isActive()) { + Thread.sleep(20); + } + + verify(cursor, never()).close(); + task.cancel(); + } + } + + private static class MultithreadedStopRunning extends MultithreadedTestCase { + + CursorReadingTask task; + MongoCursor cursor; + + public MultithreadedStopRunning(CursorReadingTask task, MongoCursor cursor) { + + this.task = task; + this.cursor = cursor; + } + + public void thread1() { + + assertTick(0); + + assertThat(task.getState()).isEqualTo(State.CREATED); + task.run(); + + waitForTick(2); + assertThat(task.isActive()).isFalse(); + assertThat(task.getState()).isEqualTo(State.CANCELLED); + verify(cursor).close(); + } + + public void thread2() throws InterruptedException { + + waitForTick(1); + assertThat(task.isActive()).isTrue(); + assertThat(task.getState()).isEqualTo(State.RUNNING); + verify(cursor, never()).close(); + + task.cancel(); + } + } + + private static class MultithreadedStopDuringStartupInitialization extends MultithreadedTestCase { + + CursorReadingTask task; + MongoCursor cursor; + + public MultithreadedStopDuringStartupInitialization(CursorReadingTask task, MongoCursor cursor) { + this.task = task; + this.cursor = cursor; + } + + public void thread1() { + + assertTick(0); + task.run(); + + waitForTick(2); + assertThat(task.isActive()).isFalse(); + assertThat(task.getState()).isEqualTo(State.CANCELLED); + verify(cursor).close(); + } + + public void thread2() throws InterruptedException { + + waitForTick(1); + assertThat(task.isActive()).isFalse(); + assertThat(task.getState()).isEqualTo(State.STARTING); + + task.cancel(); + } + } + + static class ValueCapturingTaskStub extends CursorReadingTask { + + final MongoCursor cursor; + final List values = new CopyOnWriteArrayList<>(); + + public ValueCapturingTaskStub(MongoTemplate template, SubscriptionRequest request, Class targetType, + MongoCursor cursor, ErrorHandler errorHandler) { + + super(template, request, targetType, errorHandler); + this.cursor = cursor; + } + + @Override + protected MongoCursor initCursor(MongoTemplate dbFactory, RequestOptions options, Class targetType) { + return cursor; + } + + @Override + protected Message createMessage(Object source, Class targetType, RequestOptions options) { + + values.add(source); + return super.createMessage(source, targetType, options); + } + + public List getValues() { + return values; + } + } + + static class ErrorOnInitCursorTaskStub extends CursorReadingTask { + + public ErrorOnInitCursorTaskStub(MongoTemplate template, SubscriptionRequest request, Class targetType, + ErrorHandler errorHandler) { + super(template, request, targetType, errorHandler); + } + + @Override + protected MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType) { + throw new RuntimeException("let's get it started (ha), let's get it started in here..."); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java new file mode 100644 index 0000000000..9373845a89 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java @@ -0,0 +1,440 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.messaging.SubscriptionUtils.*; + +import java.time.Duration; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.dao.DataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.ChangeStreamOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.util.ErrorHandler; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.changestream.ChangeStreamDocument; + +/** + * Integration tests for {@link DefaultMessageListenerContainer}. + * + * @author Christoph Strobl + */ +@ExtendWith({ MongoTemplateExtension.class, MongoServerCondition.class }) +public class DefaultMessageListenerContainerTests { + + static final String DATABASE_NAME = "change-stream-events"; + static final String COLLECTION_NAME = "collection-1"; + static final String COLLECTION_2_NAME = "collection-2"; + static final String COLLECTION_3_NAME = "collection-3"; + + static final Duration TIMEOUT = Duration.ofSeconds(2); + + @Client static MongoClient client; + + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // + static MongoTemplate template; + + MongoDatabaseFactory dbFactory = template.getMongoDatabaseFactory(); + + MongoCollection collection = template.getCollection(COLLECTION_NAME); + MongoCollection collection2 = template.getCollection(COLLECTION_2_NAME); + + private CollectingMessageListener messageListener; + + @BeforeEach + void beforeEach() throws InterruptedException { + + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_2_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_3_NAME, client); + + Thread.sleep(100); + + messageListener = new CollectingMessageListener<>(); + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void shouldCollectMappedChangeStreamMessagesCorrectly() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Person.class); + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "id-1").append("firstname", "foo")); + collection.insertOne(new Document("_id", "id-2").append("firstname", "bar")); + + awaitMessages(messageListener, 2, TIMEOUT); + + assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) + .containsExactly(new Person("id-1", "foo"), new Person("id-2", "bar")); + } + + @Test // DATAMONGO-2322 + @EnableIfReplicaSetAvailable + public void shouldNotifyErrorHandlerOnErrorInListener() throws InterruptedException { + + ErrorHandler errorHandler = mock(ErrorHandler.class); + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + AtomicBoolean thrownException = new AtomicBoolean(); + Subscription subscription = container.register(new ChangeStreamRequest(message -> { + + try { + if (thrownException.compareAndSet(false, true)) { + throw new IllegalStateException("Boom"); + } + } finally { + messageListener.onMessage(message); + } + + }, options()), Person.class, errorHandler); + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "id-1").append("firstname", "foo")); + collection.insertOne(new Document("_id", "id-2").append("firstname", "bar")); + + awaitMessages(messageListener, 2, TIMEOUT); + + verify(errorHandler, atLeast(1)).handleError(any(IllegalStateException.class)); + assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + public void shouldNoLongerReceiveMessagesWhenContainerStopped() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + collection.insertOne(new Document("_id", "id-2").append("value", "bar")); + + awaitMessages(messageListener, 2, TIMEOUT); + + container.stop(); + + collection.insertOne(new Document("_id", "id-3").append("value", "bar")); + + Thread.sleep(200); + + assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + public void shouldReceiveMessagesWhenAddingRequestToAlreadyStartedContainer() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + container.start(); + + Document unexpected = new Document("_id", "id-1").append("value", "foo"); + collection.insertOne(unexpected); + + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); + + awaitSubscription(subscription, TIMEOUT); + + Document expected = new Document("_id", "id-2").append("value", "bar"); + collection.insertOne(expected); + + awaitMessages(messageListener, 1, TIMEOUT); + container.stop(); + + assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) + .containsExactly(expected); + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + public void shouldStartReceivingMessagesWhenContainerStarts() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + + Thread.sleep(200); + + container.start(); + + awaitSubscription(subscription); + + Document expected = new Document("_id", "id-2").append("value", "bar"); + collection.insertOne(expected); + + awaitMessages(messageListener); + + container.stop(); + + assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) + .containsExactly(expected); + } + + @Test // DATAMONGO-1803 + public void tailableCursor() throws InterruptedException { + + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + container.start(); + + awaitSubscription(container.register(new TailableCursorRequest(messageListener, options()), Document.class), + TIMEOUT); + + collection.insertOne(new Document("_id", "id-2").append("value", "bar")); + + awaitMessages(messageListener, 2, TIMEOUT); + container.stop(); + + assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); + } + + @Test // DATAMONGO-1803 + public void tailableCursorOnEmptyCollection() throws InterruptedException { + + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + container.start(); + + awaitSubscription(container.register(new TailableCursorRequest(messageListener, options()), Document.class), + TIMEOUT); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + collection.insertOne(new Document("_id", "id-2").append("value", "bar")); + + awaitMessages(messageListener, 2, TIMEOUT); + container.stop(); + + assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); + } + + @Test // DATAMONGO-1803 + public void abortsSubscriptionOnError() throws InterruptedException { + + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + container.start(); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + + Subscription subscription = container.register(new TailableCursorRequest(messageListener, options()), + Document.class); + + awaitSubscription(subscription); + + assertThat(subscription.isActive()).isTrue(); + + collection.insertOne(new Document("_id", "id-2").append("value", "bar")); + collection.drop(); + + awaitMessages(messageListener); + + assertThat(subscription.isActive()).isFalse(); + + container.stop(); + } + + @Test // DATAMONGO-1803 + public void callsDefaultErrorHandlerOnError() throws InterruptedException { + + dbFactory.getMongoDatabase().createCollection(COLLECTION_3_NAME, + new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + + ErrorHandler errorHandler = mock(ErrorHandler.class); + + DefaultMessageListenerContainer container = new DefaultMessageListenerContainer(template, + new SimpleAsyncTaskExecutor(), errorHandler); + + try { + container.start(); + + Subscription subscription = container.register(new TailableCursorRequest(messageListener, options()), + Document.class); + + SubscriptionUtils.awaitSubscription(subscription); + dbFactory.getMongoDatabase().drop(); + + verify(errorHandler, atLeast(1)).handleError(any(DataAccessException.class)); + } finally { + container.stop(); + } + } + + @Test // DATAMONGO-1803 + @EnableIfReplicaSetAvailable + public void runsMoreThanOneTaskAtOnce() throws InterruptedException { + + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + container.start(); + + CollectingMessageListener tailableListener = new CollectingMessageListener<>(); + Subscription tailableSubscription = container.register(new TailableCursorRequest(tailableListener, options()), + Document.class); + + CollectingMessageListener, Document> changeStreamListener = new CollectingMessageListener<>(); + Subscription changeStreamSubscription = container.register(new ChangeStreamRequest(changeStreamListener, options()), + Document.class); + + awaitSubscriptions(tailableSubscription, changeStreamSubscription); + + collection.insertOne(new Document("_id", "id-1").append("value", "foo")); + + awaitMessages(tailableListener); + awaitMessages(changeStreamListener); + + assertThat(tailableListener.getTotalNumberMessagesReceived()).isEqualTo(1); + assertThat(tailableListener.getFirstMessage().getRaw()).isInstanceOf(Document.class); + + assertThat(changeStreamListener.getTotalNumberMessagesReceived()).isEqualTo(1); + assertThat(changeStreamListener.getFirstMessage().getRaw()).isInstanceOf(ChangeStreamDocument.class); + } + + @Test // DATAMONGO-2012 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void databaseLevelWatch() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, RequestOptions.none()), + Person.class); + + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "col-1-id-1").append("firstname", "foo")); + collection.insertOne(new Document("_id", "col-1-id-2").append("firstname", "bar")); + + collection2.insertOne(new Document("_id", "col-2-id-1").append("firstname", "bar")); + collection2.insertOne(new Document("_id", "col-2-id-2").append("firstname", "foo")); + + awaitMessages(messageListener, 4, TIMEOUT); + + assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) + .containsExactly(new Person("col-1-id-1", "foo"), new Person("col-1-id-2", "bar"), + new Person("col-2-id-1", "bar"), new Person("col-2-id-2", "foo")); + } + + static class Person { + + @Id String id; + private String firstname; + private String lastname; + + public Person() {} + + public Person(String id, String firstname) { + this.id = id; + this.firstname = firstname; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname); + } + + public String toString() { + return "DefaultMessageListenerContainerTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } + + static ChangeStreamRequestOptions options() { + return new ChangeStreamRequestOptions(DATABASE_NAME, COLLECTION_NAME, Duration.ofMillis(10), + ChangeStreamOptions.builder().build()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java new file mode 100644 index 0000000000..4df47b1c51 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java @@ -0,0 +1,297 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static edu.umd.cs.mtc.TestFramework.*; +import static org.assertj.core.api.Assertions.*; + +import edu.umd.cs.mtc.MultithreadedTestCase; + +import java.time.Duration; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.util.ErrorHandler; + +/** + * Unit tests for {@link DefaultMessageListenerContainer}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class DefaultMessageListenerContainerUnitTests { + + @Mock MongoTemplate template; + @Mock ErrorHandler errorHandler; + + private DefaultMessageListenerContainer container; + + @BeforeEach + void setUp() { + container = new DefaultMessageListenerContainer(template); + } + + @Test // DATAMONGO-1803 + void throwsErrorOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultMessageListenerContainer(null)); + } + + @Test // DATAMONGO-1803 + void startStopContainer() throws Throwable { + runOnce(new MultithreadedStartStopContainer(container)); + } + + @Test // DATAMONGO-1803 + void subscribeToContainerBeforeStartup() throws Throwable { + runOnce(new MultithreadedSubscribeBeforeStartup(container)); + } + + @Test // DATAMONGO-1803 + void subscribeToContainerAfterStartup() throws Throwable { + runOnce(new MultithreadedSubscribeAfterStartup(container)); + } + + @Test // DATAMONGO-1803 + void stopSubscriptionWhileRunning() throws Throwable { + runOnce(new StopSubscriptionWhileRunning(container)); + } + + @Test // DATAMONGO-1803 + void removeSubscriptionWhileRunning() throws Throwable { + runOnce(new RemoveSubscriptionWhileRunning(container)); + } + + private static class RemoveSubscriptionWhileRunning extends MultithreadedTestCase { + + DefaultMessageListenerContainer container; + Subscription subscription; + + RemoveSubscriptionWhileRunning(DefaultMessageListenerContainer container) { + this.container = container; + subscription = container.register(new MockSubscriptionRequest(), new MockTask()); + } + + public void thread1() { + + assertTick(0); + container.start(); + + waitForTick(2); + assertThat(container.isRunning()); + container.stop(); + } + + public void thread2() throws InterruptedException { + + waitForTick(1); + assertThat(subscription.isActive()).isTrue(); + + container.remove(subscription); + assertThat(subscription.isActive()).isFalse(); + } + } + + private static class StopSubscriptionWhileRunning extends MultithreadedTestCase { + + DefaultMessageListenerContainer container; + Subscription subscription; + + StopSubscriptionWhileRunning(DefaultMessageListenerContainer container) { + this.container = container; + subscription = container.register(new MockSubscriptionRequest(), new MockTask()); + } + + public void thread1() { + + assertTick(0); + container.start(); + + waitForTick(2); + assertThat(container.isRunning()); + container.stop(); + } + + public void thread2() throws InterruptedException { + + waitForTick(1); + assertThat(subscription.isActive()).isTrue(); + + subscription.cancel(); + assertThat(subscription.isActive()).isFalse(); + } + + } + + private static class MultithreadedSubscribeAfterStartup extends MultithreadedTestCase { + + DefaultMessageListenerContainer container; + + MultithreadedSubscribeAfterStartup(DefaultMessageListenerContainer container) { + this.container = container; + } + + public void thread1() { + + assertTick(0); + container.start(); + + waitForTick(2); + container.stop(); + } + + public void thread2() throws InterruptedException { + + waitForTick(1); + Subscription subscription = container.register(new MockSubscriptionRequest(), new MockTask()); + Thread.sleep(10); + assertThat(subscription.isActive()).isTrue(); + + waitForTick(3); + assertThat(subscription.isActive()).isFalse(); + } + + } + + private static class MultithreadedSubscribeBeforeStartup extends MultithreadedTestCase { + + DefaultMessageListenerContainer container; + + MultithreadedSubscribeBeforeStartup(DefaultMessageListenerContainer container) { + this.container = container; + } + + public void thread1() { + + assertTick(0); + + Subscription subscription = container.register(new MockSubscriptionRequest(), new MockTask()); + assertThat(subscription.isActive()).isFalse(); + + waitForTick(2); + assertThat(subscription.isActive()).isTrue(); + + waitForTick(4); + assertThat(subscription.isActive()).isFalse(); + } + + public void thread2() { + + waitForTick(1); + container.start(); + + waitForTick(3); + container.stop(); + } + + } + + private static class MultithreadedStartStopContainer extends MultithreadedTestCase { + + DefaultMessageListenerContainer container; + + MultithreadedStartStopContainer(DefaultMessageListenerContainer container) { + this.container = container; + } + + public void thread1() { + + assertTick(0); + container.start(); + waitForTick(2); + assertThat(container.isRunning()).isFalse(); + } + + public void thread2() { + + waitForTick(1); + assertThat(container.isRunning()).isTrue(); + container.stop(); + } + } + + static class MockTask implements Task { + + volatile State state; + volatile RuntimeException error; + + @Override + public void cancel() throws DataAccessResourceFailureException { + state = State.CANCELLED; + } + + @Override + public boolean isLongLived() { + return true; + } + + @Override + public State getState() { + return state; + } + + @Override + public void run() { + + state = State.RUNNING; + + while (isActive()) { + + if (error != null) { + throw error; + } + + try { + Thread.sleep(10); + } catch (InterruptedException e) { + Thread.interrupted(); + } + } + } + + void emitError(RuntimeException error) { + this.error = error; + } + + @Override + public boolean awaitStart(Duration timeout) throws InterruptedException { + + while (getState() == State.STARTING) { + Thread.sleep(10); + } + + return true; + } + } + + static class MockSubscriptionRequest implements SubscriptionRequest { + + @Override + public MessageListener getMessageListener() { + return message -> {}; + } + + @Override + public RequestOptions getRequestOptions() { + return () -> "foo"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java new file mode 100644 index 0000000000..7cfe859e8e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java @@ -0,0 +1,167 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +/** + * Utilities for testing long running asnyc message retrieval. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class SubscriptionUtils { + + static final Duration DEFAULT_TIMEOUT = Duration.ofMillis(1500); + + /** + * Wait for {@link Subscription#isActive() to become active} but not longer than {@link #DEFAULT_TIMEOUT}. + * + * @param subscription + * @throws InterruptedException + */ + static void awaitSubscription(Subscription subscription) throws InterruptedException { + awaitSubscription(subscription, DEFAULT_TIMEOUT); + } + + /** + * Wait for all {@link Subscription Subscriptions} to {@link Subscription#isActive() become active} but not longer + * than {@link #DEFAULT_TIMEOUT}. + * + * @param subscriptions + * @throws InterruptedException + */ + static void awaitSubscriptions(Subscription... subscriptions) throws InterruptedException { + awaitSubscriptions(DEFAULT_TIMEOUT, subscriptions); + } + + /** + * Wait for all {@link Subscription Subscriptions} to {@link Subscription#isActive() become active} but not longer + * than {@literal timeout}. + * + * @param timeout + * @param subscriptions + * @throws InterruptedException + */ + static void awaitSubscriptions(Duration timeout, Subscription... subscriptions) throws InterruptedException { + + long passedMs = 0; + long maxMs = timeout.toMillis(); + + Collection subscriptionList = Arrays.asList(subscriptions); + + while (!subscriptionList.stream().allMatch(Subscription::isActive) && passedMs < maxMs) { + + Thread.sleep(10); + passedMs += 10; + } + } + + /** + * Wait for {@link Subscription#isActive() to become active} but not longer than {@literal timeout}. + * + * @param subscription + * @param timeout + * @throws InterruptedException + */ + static void awaitSubscription(Subscription subscription, Duration timeout) throws InterruptedException { + subscription.await(timeout); + } + + /** + * Wait for {@link CollectingMessageListener} to receive messages but not longer than {@link #DEFAULT_TIMEOUT}. + * + * @param listener + * @throws InterruptedException + */ + static void awaitMessages(CollectingMessageListener listener) throws InterruptedException { + awaitMessages(listener, Integer.MAX_VALUE); + } + + /** + * Wait for {@link CollectingMessageListener} to receive exactly {@literal nrMessages} messages but not longer than + * {@link #DEFAULT_TIMEOUT}. + * + * @param listener + * @param nrMessages + * @throws InterruptedException + */ + static void awaitMessages(CollectingMessageListener listener, int nrMessages) throws InterruptedException { + awaitMessages(listener, nrMessages, DEFAULT_TIMEOUT); + } + + /** + * Wait for {@link CollectingMessageListener} to receive exactly {@literal nrMessages} messages but not longer than + * {@literal timeout}. + * + * @param listener + * @param nrMessages + * @param timeout + * @throws InterruptedException + */ + static void awaitMessages(CollectingMessageListener listener, int nrMessages, Duration timeout) + throws InterruptedException { + + long passedMs = 0; + long maxMs = timeout.toMillis(); + + while (listener.getTotalNumberMessagesReceived() < nrMessages && passedMs < maxMs) { + Thread.sleep(10); + passedMs += 10; + } + } + + /** + * {@link MessageListener} implementation collecting received {@link Message messages}. + * + * @param source message type. + * @param target message type. + */ + static class CollectingMessageListener implements MessageListener { + + private volatile List> messages = new ArrayList<>(); + + @Override + public void onMessage(Message message) { + messages.add(message); + } + + int getTotalNumberMessagesReceived() { + return messages.size(); + } + + public List> getMessages() { + return messages; + } + + public Message getMessage(int nr) { + return messages.get(nr); + } + + public Message getFirstMessage() { + return messages.get(0); + } + + public Message getLastMessage() { + return messages.get(messages.size() - 1); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java new file mode 100644 index 0000000000..f9d4c71eda --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.messaging.DefaultMessageListenerContainerTests.Person; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link TailableCursorRequest}. + * + * @author Mark Paluch + */ +public class TailableCursorRequestUnitTests { + + @Test // DATAMONGO-1803 + public void shouldBuildRequest() { + + MessageListener listener = System.out::println; + + TailableCursorRequest request = TailableCursorRequest.builder(listener).collection("foo") + .filter(Query.query(where("firstname").is("bar"))).build(); + + assertThat(request.getRequestOptions().getCollectionName()).isEqualTo("foo"); + assertThat(request.getRequestOptions().getQuery()).isPresent(); + assertThat(request.getMessageListener()).isEqualTo(listener); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java new file mode 100644 index 0000000000..60d9153212 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java @@ -0,0 +1,261 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.springframework.data.mongodb.core.messaging.SubscriptionUtils.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Objects; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; +import org.springframework.data.mongodb.core.messaging.TailableCursorRequest.TailableCursorRequestOptions; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration test for subscribing to a capped {@link com.mongodb.client.MongoCollection} inside the + * {@link DefaultMessageListenerContainer} using {@link TailableCursorRequest}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class }) +public class TailableCursorTests { + + static final String COLLECTION_NAME = "user"; + + static @Client MongoClient mongoClient; + static ThreadPoolExecutor executor; + MongoTemplate template; + MessageListenerContainer container; + + User jellyBelly; + User huffyFluffy; + User sugarSplashy; + + @BeforeAll + public static void beforeClass() { + executor = new ThreadPoolExecutor(2, 2, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<>()); + } + + @BeforeEach + public void setUp() { + + template = new MongoTemplate(mongoClient, "tailable-cursor-tests"); + + template.dropCollection(User.class); + template.createCollection(User.class, CollectionOptions.empty().capped().maxDocuments(10000).size(10000)); + + container = new DefaultMessageListenerContainer(template, executor); + container.start(); + + jellyBelly = new User(); + jellyBelly.id = "id-1"; + jellyBelly.userName = "jellyBelly"; + jellyBelly.age = 7; + + huffyFluffy = new User(); + huffyFluffy.id = "id-2"; + huffyFluffy.userName = "huffyFluffy"; + huffyFluffy.age = 7; + + sugarSplashy = new User(); + sugarSplashy.id = "id-3"; + sugarSplashy.userName = "sugarSplashy"; + sugarSplashy.age = 5; + } + + @AfterEach + public void tearDown() { + container.stop(); + } + + @AfterAll + public static void afterClass() { + executor.shutdown(); + } + + @Test // DATAMONGO-1803 + public void readsDocumentMessageCorrectly() throws InterruptedException { + + CollectingMessageListener messageListener = new CollectingMessageListener<>(); + + awaitSubscription( + container.register(new TailableCursorRequest<>(messageListener, () -> COLLECTION_NAME), Document.class)); + + template.save(jellyBelly); + + awaitMessages(messageListener, 1); + + Document expected = new Document("_id", "id-1").append("user_name", "jellyBelly").append("age", 7).append("_class", + TailableCursorTests.User.class.getName()); + + assertThat(messageListener.getFirstMessage().getProperties()) + .isEqualTo(MessageProperties.builder().collectionName("user").databaseName("tailable-cursor-tests").build()); + assertThat(messageListener.getFirstMessage().getRaw()).isEqualTo(expected); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(expected); + } + + @Test // DATAMONGO-1803 + public void convertsMessageCorrectly() throws InterruptedException { + + CollectingMessageListener messageListener = new CollectingMessageListener<>(); + + awaitSubscription( + container.register(new TailableCursorRequest<>(messageListener, () -> COLLECTION_NAME), User.class)); + + template.save(jellyBelly); + + awaitMessages(messageListener, 1); + + Document expected = new Document("_id", "id-1").append("user_name", "jellyBelly").append("age", 7).append("_class", + TailableCursorTests.User.class.getName()); + + assertThat(messageListener.getFirstMessage().getProperties()) + .isEqualTo(MessageProperties.builder().collectionName("user").databaseName("tailable-cursor-tests").build()); + assertThat(messageListener.getFirstMessage().getRaw()).isEqualTo(expected); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + } + + @Test // DATAMONGO-1803 + public void filtersMessagesCorrectly() throws InterruptedException { + + CollectingMessageListener messageListener = new CollectingMessageListener<>(); + + awaitSubscription(container.register(new TailableCursorRequest<>(messageListener, + TailableCursorRequestOptions.builder().collection(COLLECTION_NAME).filter(query(where("age").is(7))).build()), + User.class)); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(2).doesNotContain(sugarSplashy); + } + + @Test // DATAMONGO-1803 + public void mapsFilterToDomainType() throws InterruptedException { + + CollectingMessageListener messageListener = new CollectingMessageListener<>(); + + awaitSubscription( + container + .register( + new TailableCursorRequest<>(messageListener, TailableCursorRequestOptions.builder() + .collection(COLLECTION_NAME).filter(query(where("userName").is("sugarSplashy"))).build()), + User.class)); + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener, 1); + + assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(1).containsExactly(sugarSplashy); + } + + @Test // DATAMONGO-1803 + public void emitsFromStart() throws InterruptedException { + + template.save(jellyBelly); + template.save(huffyFluffy); + + CollectingMessageListener messageListener = new CollectingMessageListener<>(); + + awaitSubscription( + container.register(new TailableCursorRequest<>(messageListener, () -> COLLECTION_NAME), User.class)); + + template.save(sugarSplashy); + + awaitMessages(messageListener, 3); + + assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(3).containsExactly(jellyBelly, + huffyFluffy, sugarSplashy); + } + + static class User { + + @Id String id; + @Field("user_name") String userName; + int age; + + public String getId() { + return this.id; + } + + public String getUserName() { + return this.userName; + } + + public int getAge() { + return this.age; + } + + public void setId(String id) { + this.id = id; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + User user = (User) o; + return age == user.age && Objects.equals(id, user.id) && Objects.equals(userName, user.userName); + } + + @Override + public int hashCode() { + return Objects.hash(id, userName, age); + } + + public String toString() { + return "TailableCursorTests.User(id=" + this.getId() + ", userName=" + this.getUserName() + ", age=" + + this.getAge() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java new file mode 100644 index 0000000000..6888f9101c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.util.ErrorHandler; + +/** + * Unit tests for {@link TaskFactory}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class TaskFactoryUnitTests { + + @Mock MongoConverter converter; + @Mock MongoTemplate template; + @Mock MessageListener messageListener; + @Mock ErrorHandler errorHandler; + + private TaskFactory factory; + + @BeforeEach + void setUp() { + factory = new TaskFactory(template); + } + + @Test // DATAMONGO-1803 + void requestMustNotBeNull() { + assertThatIllegalArgumentException().isThrownBy(() -> factory.forRequest(null, Object.class, errorHandler)); + } + + @Test // DATAMONGO-1803 + void createsChangeStreamRequestCorrectly() { + + when(template.getConverter()).thenReturn(converter); + + ChangeStreamRequestOptions options = Mockito.mock(ChangeStreamRequestOptions.class); + Task task = factory.forRequest(new ChangeStreamRequest(messageListener, options), Object.class, errorHandler); + + assertThat(task).isInstanceOf(ChangeStreamTask.class); + } + + @Test // DATAMONGO-1803 + void createsTailableRequestCorrectly() { + + when(template.getConverter()).thenReturn(converter); + + RequestOptions options = Mockito.mock(RequestOptions.class); + when(options.getCollectionName()).thenReturn("collection-1"); + Task task = factory.forRequest(new TailableCursorRequest(messageListener, options), Object.class, errorHandler); + + assertThat(task).isInstanceOf(TailableCursorTask.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java index e933768b1d..70bd6dc3d9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,23 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; - -import org.junit.Test; -import org.springframework.data.domain.Sort.Direction; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; + /** * Unit tests for {@link BasicQuery}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author John Willemin @@ -41,41 +41,39 @@ public class BasicQueryUnitTests { @Test public void createsQueryFromPlainJson() { Query q = new BasicQuery("{ \"name\" : \"Thomas\"}"); - DBObject reference = new BasicDBObject("name", "Thomas"); - assertThat(q.getQueryObject(), is(reference)); + Document reference = new Document("name", "Thomas"); + assertThat(q.getQueryObject()).isEqualTo(reference); } @Test public void addsCriteriaCorrectly() { Query q = new BasicQuery("{ \"name\" : \"Thomas\"}").addCriteria(where("age").lt(80)); - DBObject reference = new BasicDBObject("name", "Thomas"); - reference.put("age", new BasicDBObject("$lt", 80)); - assertThat(q.getQueryObject(), is(reference)); + Document reference = new Document("name", "Thomas"); + reference.put("age", new Document("$lt", 80)); + assertThat(q.getQueryObject()).isEqualTo(reference); } @Test public void overridesSortCorrectly() { BasicQuery query = new BasicQuery("{}"); - query.setSortObject(new BasicDBObject("name", -1)); - query.with(new org.springframework.data.domain.Sort(Direction.ASC, "lastname")); + query.setSortObject(new Document("name", -1)); + query.with(Sort.by(Direction.ASC, "lastname")); - DBObject sortReference = new BasicDBObject("name", -1); + Document sortReference = new Document("name", -1); sortReference.put("lastname", 1); - assertThat(query.getSortObject(), is(sortReference)); + assertThat(query.getSortObject()).isEqualTo(sortReference); } - /** - * @see DATAMONGO-1093 - */ - @Test + @Test // DATAMONGO-1093 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "EqualsVerifier uses reflection on Optional") public void equalsContract() { BasicQuery query1 = new BasicQuery("{ \"name\" : \"Thomas\"}", "{\"name\":1, \"age\":1}"); - query1.setSortObject(new BasicDBObject("name", -1)); + query1.setSortObject(new Document("name", -1)); BasicQuery query2 = new BasicQuery("{ \"name\" : \"Oliver\"}", "{\"name\":1, \"address\":1}"); - query2.setSortObject(new BasicDBObject("name", 1)); + query2.setSortObject(new Document("name", 1)); EqualsVerifier.forExamples(query1, query2) // .withRedefinedSuperclass() // @@ -83,49 +81,40 @@ public void equalsContract() { .verify(); } - /** - * @see DATAMONGO-1093 - */ - @Test + @Test // DATAMONGO-1093 public void handlesEqualsAndHashCodeCorrectlyForExactCopies() { String qry = "{ \"name\" : \"Thomas\"}"; String fields = "{\"name\":1, \"age\":1}"; BasicQuery query1 = new BasicQuery(qry, fields); - query1.setSortObject(new BasicDBObject("name", -1)); + query1.setSortObject(new Document("name", -1)); BasicQuery query2 = new BasicQuery(qry, fields); - query2.setSortObject(new BasicDBObject("name", -1)); + query2.setSortObject(new Document("name", -1)); - assertThat(query1, is(equalTo(query1))); - assertThat(query1, is(equalTo(query2))); - assertThat(query1.hashCode(), is(query2.hashCode())); + assertThat(query1).isEqualTo(query1); + assertThat(query1).isEqualTo(query2); + assertThat(query1.hashCode()).isEqualTo(query2.hashCode()); } - /** - * @see DATAMONGO-1093 - */ - @Test + @Test // DATAMONGO-1093 public void handlesEqualsAndHashCodeCorrectlyWhenBasicQuerySettingsDiffer() { String qry = "{ \"name\" : \"Thomas\"}"; String fields = "{\"name\":1, \"age\":1}"; BasicQuery query1 = new BasicQuery(qry, fields); - query1.setSortObject(new BasicDBObject("name", -1)); + query1.setSortObject(new Document("name", -1)); BasicQuery query2 = new BasicQuery(qry, fields); - query2.setSortObject(new BasicDBObject("name", 1)); + query2.setSortObject(new Document("name", 1)); - assertThat(query1, is(not(equalTo(query2)))); - assertThat(query1.hashCode(), is(not(query2.hashCode()))); + assertThat(query1).isNotEqualTo(query2); + assertThat(query1.hashCode()).isNotEqualTo(query2.hashCode()); } - /** - * @see DATAMONGO-1093 - */ - @Test + @Test // DATAMONGO-1093 public void handlesEqualsAndHashCodeCorrectlyWhenQuerySettingsDiffer() { String qry = "{ \"name\" : \"Thomas\"}"; @@ -137,14 +126,11 @@ public void handlesEqualsAndHashCodeCorrectlyWhenQuerySettingsDiffer() { BasicQuery query2 = new BasicQuery(qry, fields); query2.getMeta().setComment("bar"); - assertThat(query1, is(not(equalTo(query2)))); - assertThat(query1.hashCode(), is(not(query2.hashCode()))); + assertThat(query1).isNotEqualTo(query2); + assertThat(query1.hashCode()).isNotEqualTo(query2.hashCode()); } - /** - * @see DATAMONGO-1387 - */ - @Test + @Test // DATAMONGO-1387 public void returnsFieldsCorrectly() { String qry = "{ \"name\" : \"Thomas\"}"; @@ -152,13 +138,10 @@ public void returnsFieldsCorrectly() { BasicQuery query1 = new BasicQuery(qry, fields); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age")); + assertThat(query1.getFieldsObject()).containsKeys("name", "age"); } - /** - * @see DATAMONGO-1387 - */ - @Test + @Test // DATAMONGO-1387 public void handlesFieldsIncludeCorrectly() { String qry = "{ \"name\" : \"Thomas\"}"; @@ -166,13 +149,10 @@ public void handlesFieldsIncludeCorrectly() { BasicQuery query1 = new BasicQuery(qry); query1.fields().include("name"); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name")); + assertThat(query1.getFieldsObject()).containsKey("name"); } - /** - * @see DATAMONGO-1387 - */ - @Test + @Test // DATAMONGO-1387 public void combinesFieldsIncludeCorrectly() { String qry = "{ \"name\" : \"Thomas\"}"; @@ -181,7 +161,6 @@ public void combinesFieldsIncludeCorrectly() { BasicQuery query1 = new BasicQuery(qry, fields); query1.fields().include("gender"); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age").containing("gender")); + assertThat(query1.getFieldsObject()).containsKeys("name", "age", "gender"); } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java new file mode 100644 index 0000000000..dacc270230 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.mongodb.core.query.Update.Position; + +/** + * Unit tests for {@link BasicUpdate}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class BasicUpdateUnitTests { + + @Test // GH-4918 + void setOperationValueShouldAppendsOpsCorrectly() { + + BasicUpdate basicUpdate = new BasicUpdate("{}"); + basicUpdate.setOperationValue("$set", "key1", "alt"); + basicUpdate.setOperationValue("$set", "key2", "nps"); + basicUpdate.setOperationValue("$unset", "key3", "x"); + + assertThat(basicUpdate.getUpdateObject()) + .isEqualTo("{ '$set' : { 'key1' : 'alt', 'key2' : 'nps' }, '$unset' : { 'key3' : 'x' } }"); + } + + @Test // GH-4918 + void setOperationErrorsOnNonMapType() { + + BasicUpdate basicUpdate = new BasicUpdate("{ '$set' : 1 }"); + assertThatExceptionOfType(IllegalStateException.class) + .isThrownBy(() -> basicUpdate.setOperationValue("$set", "k", "v")); + } + + @ParameterizedTest // GH-4918 + @CsvSource({ // + "{ }, k1, false", // + "{ '$set' : { 'k1' : 'v1' } }, k1, true", // + "{ '$set' : { 'k1' : 'v1' } }, k2, false", // + "{ '$set' : { 'k1.k2' : 'v1' } }, k1, false", // + "{ '$set' : { 'k1.k2' : 'v1' } }, k1.k2, true", // + "{ '$set' : { 'k1' : 'v1' } }, '', false", // + "{ '$inc' : { 'k1' : 1 } }, k1, true" }) + void modifiesLooksUpKeyCorrectly(String source, String key, boolean modified) { + + BasicUpdate basicUpdate = new BasicUpdate(source); + assertThat(basicUpdate.modifies(key)).isEqualTo(modified); + } + + @ParameterizedTest // GH-4918 + @MethodSource("updateOpArgs") + void updateOpsShouldNotOverrideExistingValues(String operator, Function updateFunction) { + + Document source = Document.parse("{ '%s' : { 'key-1' : 'value-1' } }".formatted(operator)); + Update update = updateFunction.apply(new BasicUpdate(source)); + + assertThat(update.getUpdateObject()).containsEntry("%s.key-1".formatted(operator), "value-1") + .containsKey("%s.key-2".formatted(operator)); + } + + @Test // GH-4918 + void shouldNotOverridePullAll() { + + Document source = Document.parse("{ '$pullAll' : { 'key-1' : ['value-1'] } }"); + Update update = new BasicUpdate(source).pullAll("key-1", new String[] { "value-2" }).pullAll("key-2", + new String[] { "value-3" }); + + assertThat(update.getUpdateObject()).containsEntry("$pullAll.key-1", Arrays.asList("value-1", "value-2")) + .containsEntry("$pullAll.key-2", List.of("value-3")); + } + + static Stream updateOpArgs() { + return Stream.of( // + Arguments.of("$set", (Function) update -> update.set("key-2", "value-2")), + Arguments.of("$unset", (Function) update -> update.unset("key-2")), + Arguments.of("$inc", (Function) update -> update.inc("key-2", 1)), + Arguments.of("$push", (Function) update -> update.push("key-2", "value-2")), + Arguments.of("$addToSet", (Function) update -> update.addToSet("key-2", "value-2")), + Arguments.of("$pop", (Function) update -> update.pop("key-2", Position.FIRST)), + Arguments.of("$pull", (Function) update -> update.pull("key-2", "value-2")), + Arguments.of("$pullAll", + (Function) update -> update.pullAll("key-2", new String[] { "value-2" })), + Arguments.of("$rename", (Function) update -> update.rename("key-2", "value-2"))); + }; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java index dc8e1ce84c..72f42db9b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java @@ -1,270 +1,225 @@ -/* - * Copyright 2010-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; - -import org.junit.Test; -import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; -import org.springframework.data.mongodb.core.geo.GeoJsonLineString; -import org.springframework.data.mongodb.core.geo.GeoJsonPoint; - -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; - -/** - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class CriteriaTests { - - @Test - public void testSimpleCriteria() { - Criteria c = new Criteria("name").is("Bubba"); - assertEquals("{ \"name\" : \"Bubba\"}", c.getCriteriaObject().toString()); - } - - @Test - public void testNotEqualCriteria() { - Criteria c = new Criteria("name").ne("Bubba"); - assertEquals("{ \"name\" : { \"$ne\" : \"Bubba\"}}", c.getCriteriaObject().toString()); - } - - @Test - public void buildsIsNullCriteriaCorrectly() { - - DBObject reference = new BasicDBObject("name", null); - - Criteria criteria = new Criteria("name").is(null); - assertThat(criteria.getCriteriaObject(), is(reference)); - } - - @Test - public void testChainedCriteria() { - Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); - assertEquals("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}", c.getCriteriaObject().toString()); - } - - @Test(expected = InvalidMongoDbApiUsageException.class) - public void testCriteriaWithMultipleConditionsForSameKey() { - Criteria c = new Criteria("name").gte("M").and("name").ne("A"); - c.getCriteriaObject(); - } - - @Test - public void equalIfCriteriaMatches() { - - Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar"); - Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar"); - - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); - } - - /** - * @see DATAMONGO-507 - */ - @Test(expected = IllegalArgumentException.class) - public void shouldThrowExceptionWhenTryingToNegateAndOperation() { - - new Criteria() // - .not() // - .andOperator(Criteria.where("delete").is(true).and("_id").is(42)); // - } - - /** - * @see DATAMONGO-507 - */ - @Test(expected = IllegalArgumentException.class) - public void shouldThrowExceptionWhenTryingToNegateOrOperation() { - - new Criteria() // - .not() // - .orOperator(Criteria.where("delete").is(true).and("_id").is(42)); // - } - - /** - * @see DATAMONGO-507 - */ - @Test(expected = IllegalArgumentException.class) - public void shouldThrowExceptionWhenTryingToNegateNorOperation() { - - new Criteria() // - .not() // - .norOperator(Criteria.where("delete").is(true).and("_id").is(42)); // - } - - /** - * @see DATAMONGO-507 - */ - @Test - public void shouldNegateFollowingSimpleExpression() { - - Criteria c = Criteria.where("age").not().gt(18).and("status").is("student"); - DBObject co = c.getCriteriaObject(); - - assertThat(co, is(notNullValue())); - assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")); - } - - /** - * @see DATAMONGO-1068 - */ - @Test - public void getCriteriaObjectShouldReturnEmptyDBOWhenNoCriteriaSpecified() { - - DBObject dbo = new Criteria().getCriteriaObject(); - - assertThat(dbo, equalTo(new BasicDBObjectBuilder().get())); - } - - /** - * @see DATAMONGO-1068 - */ - @Test - public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() { - - DBObject dbo = new Criteria().lt("foo").getCriteriaObject(); - - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").get())); - } - - /** - * @see DATAMONGO-1068 - */ - @Test - public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() { - - DBObject dbo = new Criteria().lt("foo").gt("bar").getCriteriaObject(); - - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").add("$gt", "bar").get())); - } - - /** - * @see DATAMONGO-1068 - */ - @Test - public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() { - - DBObject dbo = new Criteria().lt("foo").not().getCriteriaObject(); - - assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$not", new BasicDBObject("$lt", "foo")).get())); - } - - /** - * @see DATAMONGO-1135 - */ - @Test - public void geoJsonTypesShouldBeWrappedInGeometry() { - - DBObject dbo = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$near.$geometry", new GeoJsonPoint(100, 200))); - } - - /** - * @see DATAMONGO-1135 - */ - @Test - public void legacyCoordinateTypesShouldNotBeWrappedInGeometry() { - - DBObject dbo = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject(); - - assertThat(dbo, isBsonObject().notContaining("foo.$near.$geometry")); - } - - /** - * @see DATAMONGO-1135 - */ - @Test - public void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { - - DBObject dbo = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$near.$maxDistance", 50D)); - } - - /** - * @see DATAMONGO-1135 - */ - @Test - public void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { - - DBObject dbo = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$nearSphere.$maxDistance", 50D)); - } - - /** - * @see DATAMONGO-1110 - */ - @Test - public void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { - - DBObject dbo = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$near.$minDistance", 50D)); - } - - /** - * @see DATAMONGO-1110 - */ - @Test - public void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { - - DBObject dbo = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D)); - } - - /** - * @see DATAMONGO-1110 - */ - @Test - public void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { - - DBObject dbo = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D) - .getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D)); - assertThat(dbo, isBsonObject().containing("foo.$nearSphere.$maxDistance", 100D)); - } - - /** - * @see DATAMONGO-1134 - */ - @Test(expected = IllegalArgumentException.class) - public void intersectsShouldThrowExceptionWhenCalledWihtNullValue() { - new Criteria("foo").intersects(null); - } - - /** - * @see DATAMONGO-1134 - */ - @Test - public void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() { - - GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10)); - DBObject dbo = new Criteria("foo").intersects(lineString).getCriteriaObject(); - - assertThat(dbo, isBsonObject().containing("foo.$geoIntersects.$geometry", lineString)); - } -} +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.Base64; + +import org.bson.types.Binary; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link Criteria} usage as part of a {@link Query}. + * + * @author Christoph Strobl + * @author Andreas Zink + */ +@ExtendWith(MongoTemplateExtension.class) +class CriteriaTests { + + @Template(initialEntitySet = { DocumentWithBitmask.class }) // + static MongoTestTemplate ops; + + static final DocumentWithBitmask FIFTY_FOUR/*00110110*/ = new DocumentWithBitmask("1", Integer.valueOf(54), + Integer.toBinaryString(54)); + static final DocumentWithBitmask TWENTY_INT/*00010100*/ = new DocumentWithBitmask("2", Integer.valueOf(20), + Integer.toBinaryString(20)); + static final DocumentWithBitmask TWENTY_FLOAT/*00010100*/ = new DocumentWithBitmask("3", Float.valueOf(20), + Integer.toBinaryString(20)); + static final DocumentWithBitmask ONE_HUNDRED_TWO/*01100110*/ = new DocumentWithBitmask("4", + new Binary(Base64.getDecoder().decode("Zg==")), "01100110"); + + @BeforeEach + void beforeEach() { + + ops.flush(); + + ops.insert(FIFTY_FOUR); + ops.insert(TWENTY_INT); + ops.insert(TWENTY_FLOAT); + ops.insert(ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAllClearWithBitPositions() { + + assertThat(ops.find(query(where("value").bits().allClear(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); + } + + @Test // DATAMONGO-1808 + void bitsAllClearWithNumericBitmask() { + + assertThat(ops.find(query(where("value").bits().allClear(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); + } + + @Test // DATAMONGO-1808 + void bitsAllClearWithStringBitmask() { + + assertThat(ops.find(query(where("value").bits().allClear("ID==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); + } + + @Test // DATAMONGO-1808 + void bitsAllSetWithBitPositions() { + + assertThat(ops.find(query(where("value").bits().allSet(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAllSetWithNumericBitmask() { + + assertThat(ops.find(query(where("value").bits().allSet(50)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR); + } + + @Test // DATAMONGO-1808 + void bitsAllSetWithStringBitmask() { + + assertThat(ops.find(query(where("value").bits().allSet("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR); + } + + @Test // DATAMONGO-1808 + void bitsAnyClearWithBitPositions() { + + assertThat(ops.find(query(where("value").bits().anyClear(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); + } + + @Test // DATAMONGO-1808 + void bitsAnyClearWithNumericBitmask() { + + assertThat(ops.find(query(where("value").bits().anyClear(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAnyClearWithStringBitmask() { + + assertThat(ops.find(query(where("value").bits().anyClear("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAnySetWithBitPositions() { + + assertThat(ops.find(query(where("value").bits().anySet(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAnySetWithNumericBitmask() { + + assertThat(ops.find(query(where("value").bits().anySet(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); + } + + @Test // DATAMONGO-1808 + void bitsAnySetWithStringBitmask() { + + assertThat(ops.find(query(where("value").bits().anySet("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); + } + + static class DocumentWithBitmask { + + @Id String id; + Object value; + String binaryValue; + + public DocumentWithBitmask(String id, Object value, String binaryValue) { + + this.id = id; + this.value = value; + this.binaryValue = binaryValue; + } + + public String getId() { + return this.id; + } + + public Object getValue() { + return this.value; + } + + public String getBinaryValue() { + return this.binaryValue; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(Object value) { + this.value = value; + } + + public void setBinaryValue(String binaryValue) { + this.binaryValue = binaryValue; + } + + public String toString() { + return "CriteriaTests.DocumentWithBitmask(id=" + this.getId() + ", value=" + this.getValue() + ", binaryValue=" + + this.getBinaryValue() + ")"; + } + + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof DocumentWithBitmask)) + return false; + final DocumentWithBitmask other = (DocumentWithBitmask) o; + if (!other.canEqual((Object) this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) + return false; + final Object this$binaryValue = this.getBinaryValue(); + final Object other$binaryValue = other.getBinaryValue(); + if (this$binaryValue == null ? other$binaryValue != null : !this$binaryValue.equals(other$binaryValue)) + return false; + return true; + } + + protected boolean canEqual(final Object other) { + return other instanceof DocumentWithBitmask; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $binaryValue = this.getBinaryValue(); + result = result * PRIME + ($binaryValue == null ? 43 : $binaryValue.hashCode()); + return result; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java new file mode 100644 index 0000000000..e734dd6aba --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -0,0 +1,479 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.regex.Pattern; + +import org.bson.BsonRegularExpression; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.geo.GeoJsonLineString; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; + +/** + * Unit tests for {@link Criteria}. + * + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + * @author Andreas Zink + * @author Ziemowit Stolarczyk + * @author Clément Petit + * @author Mark Paluch + * @author James McNee + */ +class CriteriaUnitTests { + + @Test + void testSimpleCriteria() { + Criteria c = new Criteria("name").is("Bubba"); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\"}"); + } + + @Test // GH-4850 + void testCombiningSimpleCriteria() { + + Document expected = Document.parse("{ name : { $eq : 123, $type : ['long'] } }"); + + Criteria c = Criteria.where("name") // + .is(123) // + .type(Type.INT_64); + + assertThat(c.getCriteriaObject()).isEqualTo(expected); + + c = Criteria.where("name") // + .type(Type.INT_64).is(123); + + assertThat(c.getCriteriaObject()).isEqualTo(expected); + } + + @Test // GH-4850 + void testCombiningBsonRegexCriteria() { + + Criteria c = Criteria.where("name").regex(new BsonRegularExpression("^spring$")).type(Type.INT_64); + + assertThat(c.getCriteriaObject()) + .isEqualTo(Document.parse("{ name : { $regex : RegExp('^spring$'), $type : ['long'] } }")); + } + + @Test // GH-4850 + void testCombiningRegexCriteria() { + + Criteria c = Criteria.where("name").regex("^spring$").type(Type.INT_64); + + assertThat(c.getCriteriaObject()).hasEntrySatisfying("name.$regex", + it -> assertThat(it).isInstanceOf(Pattern.class)); + } + + @Test + void testNotEqualCriteria() { + Criteria c = new Criteria("name").ne("Bubba"); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : { \"$ne\" : \"Bubba\"}}"); + } + + @Test + void buildsIsNullCriteriaCorrectly() { + + Document reference = new Document("name", null); + + Criteria criteria = new Criteria("name").is(null); + assertThat(criteria.getCriteriaObject()).isEqualTo(reference); + } + + @Test + void testChainedCriteria() { + Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"); + } + + @Test + void testCriteriaWithMultipleConditionsForSameKey() { + Criteria c = new Criteria("name").gte("M").and("name").ne("A"); + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class).isThrownBy(c::getCriteriaObject); + } + + @Test + void equalIfCriteriaMatches() { + + Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar"); + Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar"); + + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + } + + @Test // GH-3286 + void shouldBuildCorrectAndOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").andOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$and\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // GH-3286 + void shouldBuildCorrectOrOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").orOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$or\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // GH-3286 + void shouldBuildCorrectNorOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").norOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$nor\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateAndOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .andOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateOrOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .orOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateNorOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .norOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldNegateFollowingSimpleExpression() { + + Criteria c = Criteria.where("age").not().gt(18).and("status").is("student"); + Document co = c.getCriteriaObject(); + + assertThat(co).isNotNull(); + assertThat(co).isEqualTo("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"); + } + + @Test // GH-3726 + void shouldBuildCorrectSampleRateOperation() { + Criteria c = new Criteria().sampleRate(0.4); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"$sampleRate\" : 0.4 }"); + } + + @Test // GH-3726 + void shouldThrowExceptionWhenSampleRateIsNegative() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(-1)); + } + + @Test // GH-3726 + void shouldThrowExceptionWhenSampleRateIsGreatedThanOne() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(1.01)); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() { + + Document document = new Criteria().getCriteriaObject(); + + assertThat(document).isEqualTo(new Document()); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() { + + Document document = new Criteria().lt("foo").getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$lt", "foo")); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() { + + Document document = new Criteria().lt("foo").gt("bar").getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$lt", "foo").append("$gt", "bar")); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() { + + Document document = new Criteria().lt("foo").not().getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$not", new Document("$lt", "foo"))); + } + + @Test // GH-4220 + void usesCorrectBsonType() { + + Document document = new Criteria("foo").type(Type.BOOLEAN).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$type", Collections.singletonList("bool")); + } + + @Test // DATAMONGO-1135 + void geoJsonTypesShouldBeWrappedInGeometry() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$geometry", new GeoJsonPoint(100, 200)); + } + + @Test // DATAMONGO-1135 + void legacyCoordinateTypesShouldNotBeWrappedInGeometry() { + + Document document = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject(); + + assertThat(document).doesNotContainKey("foo.$near.$geometry"); + } + + @Test // DATAMONGO-1135 + void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$maxDistance", 50D); + } + + @Test // DATAMONGO-1135 + void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$maxDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$minDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$minDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D) + .getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$minDistance", 50D); + assertThat(document).containsEntry("foo.$nearSphere.$maxDistance", 100D); + } + + @Test // DATAMONGO-1134 + void intersectsShouldThrowExceptionWhenCalledWihtNullValue() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria("foo").intersects(null)); + } + + @Test // DATAMONGO-1134 + void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() { + + GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10)); + Document document = new Criteria("foo").intersects(lineString).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$geoIntersects.$geometry", lineString); + } + + @Test // DATAMONGO-1835 + void extractsJsonSchemaInChainCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); + Criteria criteria = Criteria.where("foo").is("bar").andDocumentStructureMatches(schema); + + assertThat(criteria.getCriteriaObject()).isEqualTo(new Document("foo", "bar").append("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("name")))); + } + + @Test // DATAMONGO-1835 + void extractsJsonSchemaFromFactoryMethodCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); + Criteria criteria = Criteria.matchingDocumentStructure(schema); + + assertThat(criteria.getCriteriaObject()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("name")))); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllClearWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().allClear(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAllClear\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllClearWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allClear(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllSetWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().allSet(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAllSet\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllSetWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allSet(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnyClearWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().anyClear(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnyClearWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anyClear(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnySetWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().anySet(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAnySet\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnySetWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anySet(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-2002 + void shouldEqualForSamePattern() { + + Criteria left = new Criteria("field").regex("foo"); + Criteria right = new Criteria("field").regex("foo"); + + assertThat(left).isEqualTo(right); + } + + @Test // DATAMONGO-2002 + void shouldEqualForDocument() { + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))) + .isEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two"))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two"))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("null", null).append("two", "two"))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document())).isNotEqualTo(new Criteria("field").is("foo")); + assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(new Document())); + } + + @Test // DATAMONGO-2002 + void shouldEqualForCollection() { + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))) + .isEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", 1))) + .isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Collections.singletonList("foo"))) + .isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))) + .isNotEqualTo(new Criteria("field").is(Collections.singletonList("foo"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))).isNotEqualTo(new Criteria("field").is("foo")); + + assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + } + + @Test // GH-3414 + void shouldEqualForSamePatternAndFlags() { + + Criteria left = new Criteria("field").regex("foo", "iu"); + Criteria right = new Criteria("field").regex("foo"); + + assertThat(left).isNotEqualTo(right); + } + + @Test // GH-3414 + void shouldEqualForNestedPattern() { + + Criteria left = new Criteria("a").orOperator(new Criteria("foo").regex("value", "i"), + new Criteria("bar").regex("value")); + Criteria right = new Criteria("a").orOperator(new Criteria("foo").regex("value", "i"), + new Criteria("bar").regex("value")); + + assertThat(left).isEqualTo(right); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java index 7c801f5ca8..5299ac08d6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,74 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import java.util.List; /** - * Unit tests for {@link DocumentField}. - * + * Unit tests for {@link Field}. + * * @author Oliver Gierke + * @author Owen Q + * @author Mark Paluch + * @author Kirill Egorov */ -public class FieldUnitTests { +class FieldUnitTests { @Test - public void sameObjectSetupCreatesEqualField() { + void sameObjectSetupCreatesEqualField() { Field left = new Field().elemMatch("key", Criteria.where("foo").is("bar")); Field right = new Field().elemMatch("key", Criteria.where("foo").is("bar")); - assertThat(left, is(right)); - assertThat(right, is(left)); + assertThat(left).isEqualTo(right); + assertThat(right).isEqualTo(left); + assertThat(left.getFieldsObject()).isEqualTo("{key: { $elemMatch: {foo:\"bar\"}}}"); + } + + @Test // DATAMONGO-2294 + void rendersInclusionCorrectly() { + + Field fields = new Field().include("foo", "bar").include("baz"); + + assertThat(fields.getFieldsObject()).isEqualTo("{foo:1, bar:1, baz:1}"); } @Test - public void differentObjectSetupCreatesEqualField() { + void differentObjectSetupCreatesEqualField() { Field left = new Field().elemMatch("key", Criteria.where("foo").is("bar")); Field right = new Field().elemMatch("key", Criteria.where("foo").is("foo")); - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + } + + @Test // DATAMONGO-2294 + void rendersExclusionCorrectly() { + + Field fields = new Field().exclude("foo", "bar").exclude("baz"); + + assertThat(fields.getFieldsObject()).isEqualTo("{foo:0, bar:0, baz:0}"); + } + + @Test // GH-4625 + void overriddenInclusionMethodsCreateEqualFields() { + + Field left = new Field().include("foo", "bar"); + Field right = new Field().include(List.of("foo", "bar")); + + assertThat(left).isEqualTo(right); + } + + @Test // GH-4625 + void overriddenExclusionMethodsCreateEqualFields() { + + Field left = new Field().exclude("foo", "bar"); + Field right = new Field().exclude(List.of("foo", "bar")); + + assertThat(left).isEqualTo(right); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java index 97384eb541..156b5b23c6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java @@ -1,108 +1,94 @@ -/* - * Copyright 2010-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Test; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; -import org.springframework.data.mongodb.core.index.GeospatialIndex; -import org.springframework.data.mongodb.core.index.Index; -import org.springframework.data.mongodb.core.index.Index.Duplicates; - -/** - * Unit tests for {@link Index}. - * - * @author Oliver Gierke - * @author Laurent Canet - */ -public class IndexUnitTests { - - @Test - public void testWithAscendingIndex() { - Index i = new Index().on("name", Direction.ASC); - assertEquals("{ \"name\" : 1}", i.getIndexKeys().toString()); - } - - @Test - public void testWithDescendingIndex() { - Index i = new Index().on("name", Direction.DESC); - assertEquals("{ \"name\" : -1}", i.getIndexKeys().toString()); - } - - @Test - public void testNamedMultiFieldUniqueIndex() { - Index i = new Index().on("name", Direction.ASC).on("age", Direction.DESC); - i.named("test").unique(); - assertEquals("{ \"name\" : 1 , \"age\" : -1}", i.getIndexKeys().toString()); - assertEquals("{ \"name\" : \"test\" , \"unique\" : true}", i.getIndexOptions().toString()); - } - - @Test - public void testWithDropDuplicates() { - Index i = new Index().on("name", Direction.ASC); - i.unique(Duplicates.DROP); - assertEquals("{ \"name\" : 1}", i.getIndexKeys().toString()); - assertEquals("{ \"unique\" : true , \"dropDups\" : true}", i.getIndexOptions().toString()); - } - - @Test - public void testWithSparse() { - Index i = new Index().on("name", Direction.ASC); - i.sparse().unique(); - assertEquals("{ \"name\" : 1}", i.getIndexKeys().toString()); - assertEquals("{ \"unique\" : true , \"sparse\" : true}", i.getIndexOptions().toString()); - } - - @Test - public void testGeospatialIndex() { - GeospatialIndex i = new GeospatialIndex("location").withMin(0); - assertEquals("{ \"location\" : \"2d\"}", i.getIndexKeys().toString()); - assertEquals("{ \"min\" : 0}", i.getIndexOptions().toString()); - } - - /** - * @see DATAMONGO-778 - */ - @Test - public void testGeospatialIndex2DSphere() { - - GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE); - assertEquals("{ \"location\" : \"2dsphere\"}", i.getIndexKeys().toString()); - assertEquals("{ }", i.getIndexOptions().toString()); - } - - /** - * @see DATAMONGO-778 - */ - @Test - public void testGeospatialIndexGeoHaystack() { - - GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK) - .withAdditionalField("name").withBucketSize(40); - assertEquals("{ \"location\" : \"geoHaystack\" , \"name\" : 1}", i.getIndexKeys().toString()); - assertEquals("{ \"bucketSize\" : 40.0}", i.getIndexOptions().toString()); - } - - @Test - public void ensuresPropertyOrder() { - - Index on = new Index("foo", Direction.ASC).on("bar", Direction.ASC); - assertThat(on.getIndexKeys().toString(), is("{ \"foo\" : 1 , \"bar\" : 1}")); - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.index.Index; + +/** + * Unit tests for {@link Index}. + * + * @author Oliver Gierke + * @author Laurent Canet + */ +public class IndexUnitTests { + + @Test + public void testWithAscendingIndex() { + Index i = new Index().on("name", Direction.ASC); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1}")); + } + + @Test + public void testWithDescendingIndex() { + Index i = new Index().on("name", Direction.DESC); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : -1}")); + } + + @Test + public void testNamedMultiFieldUniqueIndex() { + Index i = new Index().on("name", Direction.ASC).on("age", Direction.DESC); + i.named("test").unique(); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1 , \"age\" : -1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"name\" : \"test\" , \"unique\" : true}")); + } + + @Test + public void testWithSparse() { + Index i = new Index().on("name", Direction.ASC); + i.sparse().unique(); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"unique\" : true , \"sparse\" : true}")); + } + + @Test + public void testGeospatialIndex() { + GeospatialIndex i = new GeospatialIndex("location").withMin(0); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"2d\"}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"min\" : 0}")); + } + + @Test // DATAMONGO-778 + public void testGeospatialIndex2DSphere() { + + GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"2dsphere\"}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ }")); + } + + @Test // DATAMONGO-778 + public void testGeospatialIndexGeoHaystack() { + + GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK) + .withAdditionalField("name").withBucketSize(40); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"geoHaystack\" , \"name\" : 1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"bucketSize\" : 40.0}")); + } + + @Test + public void ensuresPropertyOrder() { + + Index on = new Index("foo", Direction.ASC).on("bar", Direction.ASC); + assertThat(on.getIndexKeys()).isEqualTo(Document.parse("{ \"foo\" : 1 , \"bar\" : 1}")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java deleted file mode 100644 index 399a55a194..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; -import org.hamcrest.core.IsEqual; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.util.StringUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * A {@link TypeSafeMatcher} that tests whether a given {@link Query} matches a query specification. - * - * @author Christoph Strobl - * @param - */ -public class IsQuery extends TypeSafeMatcher { - - protected DBObject query; - protected DBObject sort; - protected DBObject fields; - - private int skip; - private int limit; - private String hint; - - protected IsQuery() { - query = new BasicDBObject(); - sort = new BasicDBObject(); - } - - public static IsQuery isQuery() { - return new IsQuery(); - } - - public IsQuery limitingTo(int limit) { - this.limit = limit; - return this; - } - - public IsQuery skippig(int skip) { - this.skip = skip; - return this; - } - - public IsQuery providingHint(String hint) { - this.hint = hint; - return this; - } - - public IsQuery includingField(String fieldname) { - - if (fields == null) { - fields = new BasicDBObject(); - } - fields.put(fieldname, 1); - - return this; - } - - public IsQuery excludingField(String fieldname) { - - if (fields == null) { - fields = new BasicDBObject(); - } - fields.put(fieldname, -1); - - return this; - } - - public IsQuery sortingBy(String fieldname, Direction direction) { - - sort.put(fieldname, Direction.ASC.equals(direction) ? 1 : -1); - - return this; - } - - public IsQuery where(Criteria criteria) { - - this.query.putAll(criteria.getCriteriaObject()); - return this; - } - - @Override - public void describeTo(Description description) { - - BasicQuery expected = new BasicQuery(this.query, this.fields); - expected.setSortObject(sort); - expected.skip(this.skip); - expected.limit(this.limit); - - if (StringUtils.hasText(this.hint)) { - expected.withHint(this.hint); - } - - description.appendValue(expected); - } - - @Override - protected boolean matchesSafely(T item) { - - if (item == null) { - return false; - } - - if (!new IsEqual(query).matches(item.getQueryObject())) { - return false; - } - - if (item.getSortObject() == null && !sort.toMap().isEmpty()) { - if (!new IsEqual(sort).matches(item.getSortObject())) { - return false; - } - } - - if (!new IsEqual(fields).matches(item.getFieldsObject())) { - return false; - } - - if (!new IsEqual(this.hint).matches(item.getHint())) { - return false; - } - - if (!new IsEqual(this.skip).matches(item.getSkip())) { - return false; - } - - if (!new IsEqual(this.limit).matches(item.getLimit())) { - return false; - } - - return true; - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java deleted file mode 100644 index 6e7d9403c9..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import org.hamcrest.TypeSafeMatcher; -import org.springframework.data.mongodb.core.query.Criteria; -import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.TextQuery; -import org.springframework.util.StringUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -/** - * A {@link TypeSafeMatcher} that tests whether a given {@link TextQuery} matches a query specification. - * - * @author Christoph Strobl - * @param - */ -public class IsTextQuery extends IsQuery { - - private final String SCORE_DEFAULT_FIELDNAME = "score"; - private final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore"); - - private String scoreFieldName = SCORE_DEFAULT_FIELDNAME; - - private IsTextQuery() { - super(); - } - - public static IsTextQuery isTextQuery() { - return new IsTextQuery(); - } - - public IsTextQuery searchingFor(String term) { - appendTerm(term); - return this; - } - - public IsTextQuery inLanguage(String language) { - appendLanguage(language); - return this; - } - - public IsTextQuery returningScore() { - - if (fields == null) { - fields = new BasicDBObject(); - } - fields.put(scoreFieldName, META_TEXT_SCORE); - - return this; - } - - public IsTextQuery returningScoreAs(String fieldname) { - - this.scoreFieldName = fieldname != null ? fieldname : SCORE_DEFAULT_FIELDNAME; - - return this.returningScore(); - } - - public IsTextQuery sortingByScore() { - - sort.put(scoreFieldName, META_TEXT_SCORE); - - return this; - } - - @Override - public IsTextQuery where(Criteria criteria) { - - super.where(criteria); - return this; - } - - @Override - public IsTextQuery excludingField(String fieldname) { - - super.excludingField(fieldname); - return this; - } - - @Override - public IsTextQuery includingField(String fieldname) { - - super.includingField(fieldname); - return this; - } - - @Override - public IsTextQuery limitingTo(int limit) { - - super.limitingTo(limit); - return this; - } - - @Override - public IsQuery skippig(int skip) { - - super.skippig(skip); - return this; - } - - private void appendLanguage(String language) { - - DBObject dbo = getOrCreateTextDbo(); - dbo.put("$language", language); - } - - private DBObject getOrCreateTextDbo() { - - DBObject dbo = (DBObject) query.get("$text"); - if (dbo == null) { - dbo = new BasicDBObject(); - } - - return dbo; - } - - private void appendTerm(String term) { - - DBObject dbo = getOrCreateTextDbo(); - String searchString = (String) dbo.get("$search"); - if (StringUtils.hasText(searchString)) { - searchString += (" " + term); - } else { - searchString = term; - } - dbo.put("$search", searchString); - query.put("$text", dbo); - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java new file mode 100644 index 0000000000..bbdad047f2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.data.Offset.offset; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.Metrics; + +/** + * Unit tests for {@link MetricConversion}. + * + * @author Mark Paluch + */ +public class MetricConversionUnitTests { + + @Test // DATAMONGO-1348 + public void shouldConvertMilesToMeters() { + + Distance distance = new Distance(1, Metrics.MILES); + double distanceInMeters = MetricConversion.getDistanceInMeters(distance); + + assertThat(distanceInMeters).isCloseTo(1609.3438343d, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldConvertKilometersToMeters() { + + Distance distance = new Distance(1, Metrics.KILOMETERS); + double distanceInMeters = MetricConversion.getDistanceInMeters(distance); + + assertThat(distanceInMeters).isCloseTo(1000, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldCalculateMetersToKilometersMultiplier() { + + double multiplier = MetricConversion.getMetersToMetricMultiplier(Metrics.KILOMETERS); + + assertThat(multiplier).isCloseTo(0.001, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldCalculateMetersToMilesMultiplier() { + + double multiplier = MetricConversion.getMetersToMetricMultiplier(Metrics.MILES); + + assertThat(multiplier).isCloseTo(0.00062137, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertMetersToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.metersToRadians(1000)).isCloseTo(0.000156785594d, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertKilometersToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.toRadians(new Distance(1, Metrics.KILOMETERS))).isCloseTo(0.000156785594d, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertMilesToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.toRadians(new Distance(1, Metrics.MILES))).isCloseTo(0.000252321328d, offset(0.000000001)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java new file mode 100644 index 0000000000..d6bceea5d0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.MongoRegexCreatorUnitTests.TestParameter.*; + +import java.util.List; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; +import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode; + +/** + * Tests the creation of Regex's in {@link MongoRegexCreator} + * + * @author Jens Schauder + * @author Oliver Gierke + */ +@RunWith(Parameterized.class) +public class MongoRegexCreatorUnitTests { + + @Parameters(name = "{index}: {0}") // + public static List parameters() { + + return asList(// + test(null, MatchMode.EXACT, null, "Null input string -> null"), // + test("anystring", null, "anystring", "type=null -> input"), // + test("anystring", MatchMode.REGEX, "anystring", "REGEX -> input"), // + test("*", MatchMode.LIKE, ".*", "LIKE * -> .*"), // + test("*.*", MatchMode.LIKE, ".*\\Q.\\E.*", "Wildcards & Punctuation"), // + test("*.", MatchMode.LIKE, ".*\\Q.\\E", "Leading Wildcard & Punctuation"), // + test(".*", MatchMode.LIKE, "\\Q.\\E.*", "Trailing Wildcard & Punctuation"), // + test("other", MatchMode.LIKE, "other", "No Wildcard & Other"), // + test("other*", MatchMode.LIKE, "other.*", "Trailing Wildcard & Other"), // + test("*other", MatchMode.LIKE, ".*other", "Leading Wildcard & Other"), // + test("o*t.*h.er", MatchMode.LIKE, "\\Qo*t.*h.er\\E", "Dots & Stars"), // + test("other", MatchMode.STARTING_WITH, "^other", "Dots & Stars"), // + test("other", MatchMode.ENDING_WITH, "other$", "Dots & Stars"), // + test("other", MatchMode.CONTAINING, ".*other.*", "Dots & Stars"), // + test("other", MatchMode.EXACT, "^other$", "Dots & Stars")); + } + + @Parameter(0) // + public TestParameter parameter; + + @Test + public void testSpecialCases() { + parameter.check(); + } + + static class TestParameter { + + private final String source; + private final MatchMode mode; + private final String expectedResult, comment; + + private TestParameter(String source, MatchMode mode, String expectedResult, String comment) { + this.source = source; + this.mode = mode; + this.expectedResult = expectedResult; + this.comment = comment; + } + + public static TestParameter test(String source, MatchMode mode, String expectedResult, String comment) { + return new TestParameter(source, mode, expectedResult, comment); + } + + void check() { + + assertThat(MongoRegexCreator.INSTANCE.toRegularExpression(source, mode))// + .as(comment)// + .isEqualTo(expectedResult); + } + + @Override + public String toString() { + return String.format("Mode: %s, %s", mode, comment); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java index 10232f56f7..f4e3d26eb1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,40 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import java.math.BigDecimal; +import java.math.RoundingMode; + +import org.junit.jupiter.api.Test; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** * Unit tests for {@link NearQuery}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ public class NearQueryUnitTests { private static final Distance ONE_FIFTY_KILOMETERS = new Distance(150, Metrics.KILOMETERS); - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullPoint() { - NearQuery.near(null); + assertThatIllegalArgumentException().isThrownBy(() -> NearQuery.near(null)); } @Test @@ -48,9 +56,9 @@ public void settingUpNearWithMetricRecalculatesDistance() { NearQuery query = NearQuery.near(2.5, 2.5, Metrics.KILOMETERS).maxDistance(150); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(query.isSpherical()).isTrue(); } @Test @@ -60,102 +68,223 @@ public void settingMetricRecalculatesMaxDistance() { query.inMiles(); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); } @Test public void configuresResultMetricCorrectly() { NearQuery query = NearQuery.near(2.5, 2.1); - assertThat(query.getMetric(), is((Metric) Metrics.NEUTRAL)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.NEUTRAL); query = query.maxDistance(ONE_FIFTY_KILOMETERS); - assertThat(query.getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.isSpherical()).isTrue(); query = query.in(Metrics.MILES); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.isSpherical()).isTrue(); query = query.maxDistance(new Distance(200, Metrics.KILOMETERS)); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); } - /** - * @see DATAMONGO-445 - */ - @Test + @Test // DATAMONGO-445, DATAMONGO-2264 public void shouldTakeSkipAndLimitSettingsFromGivenPageable() { - Pageable pageable = new PageRequest(3, 5); + Pageable pageable = PageRequest.of(3, 5); NearQuery query = NearQuery.near(new Point(1, 1)).with(pageable); - assertThat(query.getSkip(), is(pageable.getPageNumber() * pageable.getPageSize())); - assertThat((Integer) query.toDBObject().get("num"), is((pageable.getPageNumber() + 1) * pageable.getPageSize())); + assertThat(query.getSkip()).isEqualTo((long) pageable.getPageNumber() * pageable.getPageSize()); + assertThat(query.toDocument().get("num")).isEqualTo((long) pageable.getPageSize()); } - /** - * @see DATAMONGO-445 - */ - @Test + @Test // DATAMONGO-445 public void shouldTakeSkipAndLimitSettingsFromGivenQuery() { int limit = 10; - int skip = 5; - NearQuery query = NearQuery.near(new Point(1, 1)).query( - Query.query(Criteria.where("foo").is("bar")).limit(limit).skip(skip)); + long skip = 5; + NearQuery query = NearQuery.near(new Point(1, 1)) + .query(Query.query(Criteria.where("foo").is("bar")).limit(limit).skip(skip)); - assertThat(query.getSkip(), is(skip)); - assertThat((Integer) query.toDBObject().get("num"), is(limit)); + assertThat(query.getSkip()).isEqualTo(skip); + assertThat((Long) query.toDocument().get("num")).isEqualTo((long) limit); } - /** - * @see DATAMONGO-445 - */ - @Test + @Test // DATAMONGO-445, DATAMONGO-2264 public void shouldTakeSkipAndLimitSettingsFromPageableEvenIfItWasSpecifiedOnQuery() { int limit = 10; int skip = 5; - Pageable pageable = new PageRequest(3, 5); + Pageable pageable = PageRequest.of(3, 5); NearQuery query = NearQuery.near(new Point(1, 1)) .query(Query.query(Criteria.where("foo").is("bar")).limit(limit).skip(skip)).with(pageable); - assertThat(query.getSkip(), is(pageable.getPageNumber() * pageable.getPageSize())); - assertThat((Integer) query.toDBObject().get("num"), is((pageable.getPageNumber() + 1) * pageable.getPageSize())); + assertThat(query.getSkip()).isEqualTo((long) pageable.getPageNumber() * pageable.getPageSize()); + assertThat(query.toDocument().get("num")).isEqualTo((long) pageable.getPageSize()); } - /** - * @see DATAMONGO-829 - */ - @Test + @Test // DATAMONGO-829 public void nearQueryShouldInoreZeroLimitFromQuery() { NearQuery query = NearQuery.near(new Point(1, 2)).query(Query.query(Criteria.where("foo").is("bar"))); - assertThat(query.toDBObject().get("num"), nullValue()); + assertThat(query.toDocument().get("num")).isNull(); } - /** - * @see DATAMONOGO-829 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONOGO-829 public void nearQueryShouldThrowExceptionWhenGivenANullQuery() { - NearQuery.near(new Point(1, 2)).query(null); + assertThatIllegalArgumentException().isThrownBy(() -> NearQuery.near(new Point(1, 2)).query(null)); } - /** - * @see DATAMONGO-829 - */ - @Test + @Test // DATAMONGO-829 public void numShouldNotBeAlteredByQueryWithoutPageable() { - int num = 100; + long num = 100; NearQuery query = NearQuery.near(new Point(1, 2)); - query.num(num); + query.limit(num); query.query(Query.query(Criteria.where("foo").is("bar"))); - assertThat(DBObjectTestUtils.getTypedValue(query.toDBObject(), "num", Integer.class), is(num)); + assertThat(DocumentTestUtils.getTypedValue(query.toDocument(), "num", Long.class)).isEqualTo(num); + } + + @Test // DATAMONGO-1348 + public void shouldNotUseSphericalForLegacyPoint() { + + NearQuery query = NearQuery.near(new Point(27.987901, 86.9165379)); + + assertThat(query.toDocument()).containsEntry("spherical", false); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForLegacyPointIfSet() { + + NearQuery query = NearQuery.near(new Point(27.987901, 86.9165379)); + query.spherical(true); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForGeoJsonData() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForGeoJsonDataIfSphericalIsFalse() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.spherical(false); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonData() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(1); + + double meterToRadianMultiplier = BigDecimal.valueOf(1 / Metrics.KILOMETERS.getMultiplier() / 1000).// + setScale(8, RoundingMode.HALF_UP).// + doubleValue(); + assertThat(query.toDocument()).containsEntry("maxDistance", Metrics.KILOMETERS.getMultiplier() * 1000) + .containsEntry("distanceMultiplier", meterToRadianMultiplier); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonDataWhenDistanceInKilometers() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.KILOMETERS)); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1000D).containsEntry("distanceMultiplier", 0.001D); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonDataWhenDistanceInMiles() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.MILES)); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1609.3438343D).containsEntry("distanceMultiplier", + 0.00062137D); + } + + @Test // DATAMONGO-1348 + public void shouldUseKilometersForDistanceWhenMaxDistanceInMiles() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.MILES)).in(Metrics.KILOMETERS); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1609.3438343D).containsEntry("distanceMultiplier", + 0.001D); + } + + @Test // DATAMONGO-1348 + public void shouldUseMilesForDistanceWhenMaxDistanceInKilometers() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.KILOMETERS)).in(Metrics.MILES); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1000D).containsEntry("distanceMultiplier", 0.00062137D); + } + + @Test // GH-4277 + void fetchesReadPreferenceFromUnderlyingQueryObject() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)) + .query(new Query().withReadPreference(ReadPreference.nearest())); + + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest()); + } + + @Test // GH-4277 + void fetchesReadConcernFromUnderlyingQueryObject() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).query(new Query().withReadConcern(ReadConcern.SNAPSHOT)); + + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void usesReadPreferenceFromNearQueryIfUnderlyingQueryDoesNotDefineAny() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest()) + .query(new Query()); + + assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadPreference()).isNull(); + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest()); + } + + @Test // GH-4277 + void usesReadConcernFromNearQueryIfUnderlyingQueryDoesNotDefineAny() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT).query(new Query()); + + assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadConcern()).isNull(); + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void readPreferenceFromUnderlyingQueryOverridesNearQueryOne() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest()) + .query(new Query().withReadPreference(ReadPreference.primary())); + + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.primary()); + } + + @Test // GH-4277 + void readConcernFromUnderlyingQueryOverridesNearQueryOne() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT) + .query(new Query().withReadConcern(ReadConcern.MAJORITY)); + + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.MAJORITY); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java index d1b4b28c90..fa7a8516ca 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2013 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,202 +15,384 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import java.util.Arrays; - -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; import org.springframework.data.mongodb.core.SpecialDoc; /** * Unit tests for {@link Query}. - * + * * @author Thomas Risberg * @author Oliver Gierke * @author Patryk Wasik * @author Thomas Darimont + * @author Christoph Strobl + * @author Mark Paluch */ -public class QueryTests { - - @Rule public ExpectedException exception = ExpectedException.none(); +class QueryTests { @Test - public void testSimpleQuery() { + void testSimpleQuery() { + Query q = new Query(where("name").is("Thomas").and("age").lt(80)); - String expected = "{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}")); } @Test - public void testQueryWithNot() { + void testQueryWithNot() { + Query q = new Query(where("name").is("Thomas").and("age").not().mod(10, 0)); - String expected = "{ \"name\" : \"Thomas\" , \"age\" : { \"$not\" : { \"$mod\" : [ 10 , 0]}}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()) + .isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$not\" : { \"$mod\" : [ 10 , 0]}}}")); } @Test - public void testInvalidQueryWithNotIs() { - try { - new Query(where("name").not().is("Thomas")); - Assert.fail("This should have caused an InvalidDocumentStoreApiUsageException"); - } catch (InvalidMongoDbApiUsageException e) {} + void testInvalidQueryWithNotIs() { + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) + .isThrownBy(() -> new Query(where("name").not().is("Thomas"))); } @Test - public void testOrQuery() { + void testOrQuery() { + Query q = new Query(new Criteria().orOperator(where("name").is("Sven").and("age").lt(50), where("age").lt(50), where("name").is("Thomas"))); - String expected = "{ \"$or\" : [ { \"name\" : \"Sven\" , \"age\" : { \"$lt\" : 50}} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"name\" : \"Sven\" , \"age\" : { \"$lt\" : 50}} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}")); } @Test - public void testAndQuery() { + void testAndQuery() { + Query q = new Query(new Criteria().andOperator(where("name").is("Sven"), where("age").lt(50))); - String expected = "{ \"$and\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}}]}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + Document expected = Document.parse("{ \"$and\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}}]}"); + assertThat(q.getQueryObject()).isEqualTo(expected); } @Test - public void testNorQuery() { - Query q = new Query(new Criteria().norOperator(where("name").is("Sven"), where("age").lt(50), - where("name").is("Thomas"))); - String expected = "{ \"$nor\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + void testNorQuery() { + + Query q = new Query( + new Criteria().norOperator(where("name").is("Sven"), where("age").lt(50), where("name").is("Thomas"))); + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"$nor\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}")); } - @Test - public void testQueryWithLimit() { + @Test // GH-4584 + void testQueryWithLimit() { + Query q = new Query(where("name").gte("M").lte("T").and("age").not().gt(22)); q.limit(50); - String expected = "{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); - Assert.assertEquals(50, q.getLimit()); + + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + assertThat(q.getLimit()).isEqualTo(50); + + q.limit(Limit.unlimited()); + assertThat(q.getLimit()).isZero(); + assertThat(q.isLimited()).isFalse(); + + q.limit(Limit.of(10)); + assertThat(q.getLimit()).isEqualTo(10); + assertThat(q.isLimited()).isTrue(); + + q.limit(Limit.of(-1)); + assertThat(q.getLimit()).isZero(); + assertThat(q.isLimited()).isFalse(); + + Query other = new Query(where("name").gte("M")).limit(Limit.of(10)); + assertThat(new Query(where("name").gte("M")).limit(10)).isEqualTo(other).hasSameHashCodeAs(other); } @Test - public void testQueryWithFieldsAndSlice() { + void testQueryWithFieldsAndSlice() { + Query q = new Query(where("name").gte("M").lte("T").and("age").not().gt(22)); q.fields().exclude("address").include("name").slice("orders", 10); - String expected = "{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); - String expectedFields = "{ \"address\" : 0 , \"name\" : 1 , \"orders\" : { \"$slice\" : 10}}"; - Assert.assertEquals(expectedFields, q.getFieldsObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + + assertThat(q.getFieldsObject()) + .isEqualTo(Document.parse("{ \"address\" : 0 , \"name\" : 1 , \"orders\" : { \"$slice\" : 10}}")); } - /** - * @see DATAMONGO-652 - */ - @Test - public void testQueryWithFieldsElemMatchAndPositionalOperator() { + @Test // DATAMONGO-652 + void testQueryWithFieldsElemMatchAndPositionalOperator() { Query query = query(where("name").gte("M").lte("T").and("age").not().gt(22)); query.fields().elemMatch("products", where("name").is("milk")).position("comments", 2); - String expected = "{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"; - assertThat(query.getQueryObject().toString(), is(expected)); - String expectedFields = "{ \"products\" : { \"$elemMatch\" : { \"name\" : \"milk\"}} , \"comments.$\" : 2}"; - assertThat(query.getFieldsObject().toString(), is(expectedFields)); + assertThat(query.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + assertThat(query.getFieldsObject()) + .isEqualTo(Document.parse("{ \"products\" : { \"$elemMatch\" : { \"name\" : \"milk\"}} , \"comments.$\" : 2}")); } @Test - public void testSimpleQueryWithChainedCriteria() { + void testSimpleQueryWithChainedCriteria() { + Query q = new Query(where("name").is("Thomas").and("age").lt(80)); - String expected = "{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}")); } @Test - public void testComplexQueryWithMultipleChainedCriteria() { - Query q = new Query(where("name").regex("^T.*").and("age").gt(20).lt(80).and("city") - .in("Stockholm", "London", "New York")); - String expected = "{ \"name\" : { \"$regex\" : \"^T.*\"} , \"age\" : { \"$gt\" : 20 , \"$lt\" : 80} , " - + "\"city\" : { \"$in\" : [ \"Stockholm\" , \"London\" , \"New York\"]}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + void testComplexQueryWithMultipleChainedCriteria() { + + Query q = new Query( + where("name").regex("^T.*").and("age").gt(20).lt(80).and("city").in("Stockholm", "London", "New York")); + assertThat(q.getQueryObject().toJson()).isEqualTo(Document.parse( + "{ \"name\" : { \"$regex\" : \"^T.*\", \"$options\" : \"\" } , \"age\" : { \"$gt\" : 20 , \"$lt\" : 80} , " + + "\"city\" : { \"$in\" : [ \"Stockholm\" , \"London\" , \"New York\"]}}") + .toJson()); } @Test - public void testAddCriteriaWithComplexQueryWithMultipleChainedCriteria() { - Query q1 = new Query(where("name").regex("^T.*").and("age").gt(20).lt(80).and("city") - .in("Stockholm", "London", "New York")); - Query q2 = new Query(where("name").regex("^T.*").and("age").gt(20).lt(80)).addCriteria(where("city").in( - "Stockholm", "London", "New York")); - Assert.assertEquals(q1.getQueryObject().toString(), q2.getQueryObject().toString()); - Query q3 = new Query(where("name").regex("^T.*")).addCriteria(where("age").gt(20).lt(80)).addCriteria( - where("city").in("Stockholm", "London", "New York")); - Assert.assertEquals(q1.getQueryObject().toString(), q3.getQueryObject().toString()); + void testAddCriteriaWithComplexQueryWithMultipleChainedCriteria() { + + Query q1 = new Query( + where("name").regex("^T.*").and("age").gt(20).lt(80).and("city").in("Stockholm", "London", "New York")); + Query q2 = new Query(where("name").regex("^T.*").and("age").gt(20).lt(80)) + .addCriteria(where("city").in("Stockholm", "London", "New York")); + + assertThat(q1.getQueryObject()).hasToString(q2.getQueryObject().toString()); + + Query q3 = new Query(where("name").regex("^T.*")).addCriteria(where("age").gt(20).lt(80)) + .addCriteria(where("city").in("Stockholm", "London", "New York")); + assertThat(q1.getQueryObject()).hasToString(q3.getQueryObject().toString()); } @Test - public void testQueryWithElemMatch() { + void testQueryWithElemMatch() { + Query q = new Query(where("openingHours").elemMatch(where("dayOfWeek").is("Monday").and("open").lte("1800"))); - String expected = "{ \"openingHours\" : { \"$elemMatch\" : { \"dayOfWeek\" : \"Monday\" , \"open\" : { \"$lte\" : \"1800\"}}}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse( + "{ \"openingHours\" : { \"$elemMatch\" : { \"dayOfWeek\" : \"Monday\" , \"open\" : { \"$lte\" : \"1800\"}}}}")); } @Test - public void testQueryWithIn() { + void testQueryWithIn() { + Query q = new Query(where("state").in("NY", "NJ", "PA")); - String expected = "{ \"state\" : { \"$in\" : [ \"NY\" , \"NJ\" , \"PA\"]}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"state\" : { \"$in\" : [ \"NY\" , \"NJ\" , \"PA\"]}}")); } @Test - public void testQueryWithRegex() { + void testQueryWithRegex() { + Query q = new Query(where("name").regex("b.*")); - String expected = "{ \"name\" : { \"$regex\" : \"b.*\"}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject().toJson()) + .isEqualTo(Document.parse("{ \"name\" : { \"$regex\" : \"b.*\", \"$options\" : \"\" }}").toJson()); } @Test - public void testQueryWithRegexAndOption() { + void testQueryWithRegexAndOption() { Query q = new Query(where("name").regex("b.*", "i")); - String expected = "{ \"name\" : { \"$regex\" : \"b.*\" , \"$options\" : \"i\"}}"; - Assert.assertEquals(expected, q.getQueryObject().toString()); + assertThat(q.getQueryObject().toJson()) + .isEqualTo(Document.parse("{ \"name\" : { \"$regex\" : \"b.*\" , \"$options\" : \"i\"}}").toJson()); } - /** - * @see DATAMONGO-538 - */ - @Test - public void addsSortCorrectly() { + @Test // DATAMONGO-538 + void addsSortCorrectly() { - Query query = new Query().with(new Sort(Direction.DESC, "foo")); - assertThat(query.getSortObject().toString(), is("{ \"foo\" : -1}")); + Query query = new Query().with(Sort.by(Direction.DESC, "foo")); + assertThat(query.getSortObject()).isEqualTo(Document.parse("{ \"foo\" : -1}")); } @Test - public void rejectsOrderWithIgnoreCase() { - - exception.expect(IllegalArgumentException.class); - exception.expectMessage("foo"); + void rejectsOrderWithIgnoreCase() { - new Query().with(new Sort(new Sort.Order("foo").ignoreCase())); + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> new Query().with(Sort.by(Order.asc("foo").ignoreCase()))); } - /** - * @see DATAMONGO-709 - */ - @Test - @SuppressWarnings("unchecked") - public void shouldReturnClassHierarchyOfRestrictedTypes() { + @Test // DATAMONGO-709, DATAMONGO-1735, // DATAMONGO-2198 + void shouldReturnClassHierarchyOfRestrictedTypes() { Query query = new Query(where("name").is("foo")).restrict(SpecialDoc.class); - assertThat( - query.toString(), - is("Query: { \"name\" : \"foo\", \"_$RESTRICTED_TYPES\" : [ { $java : class org.springframework.data.mongodb.core.SpecialDoc } ] }, Fields: null, Sort: null")); - assertThat(query.getRestrictedTypes(), is(notNullValue())); - assertThat(query.getRestrictedTypes().size(), is(1)); - assertThat(query.getRestrictedTypes(), hasItems(Arrays.asList(SpecialDoc.class).toArray(new Class[0]))); + + assertThat(query.getRestrictedTypes()).containsExactly(SpecialDoc.class); + } + + @Test // DATAMONGO-1421 + void addCriteriaForSamePropertyMultipleTimesShouldThrowAndSafelySerializeErrorMessage() { + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class).isThrownBy(() -> { + + Query query = new Query(); + query.addCriteria(where("value").is(EnumType.VAL_1)); + query.addCriteria(where("value").is(EnumType.VAL_2)); + }).withMessageContaining("second 'value' criteria") + .withMessageContaining("already contains '{ \"value\" : { \"$java\" : VAL_1 } }'"); + } + + @Test // DATAMONGO-1783 + void queryOfShouldCreateNewQueryWithEqualBehaviour() { + + Query source = new Query(); + source.addCriteria(where("This you must ken").is(EnumType.VAL_1)); + + compareQueries(Query.of(source), source); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnCriteriaFromSource() { + + Query source = new Query(); + source.addCriteria(where("From one make ten").is("and two let be.")); + Query target = Query.of(source); + + assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be.")) + .isNotSameAs(source.getQueryObject()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldAppendCriteria() { + + Query source = new Query(); + source.addCriteria(where("Skip o'er the four").is("From five and six")); + Query target = Query.of(source); + + compareQueries(target, source); + target.addCriteria(where("the Witch's tricks").is("make seven and eight")); + + assertThat(target.getQueryObject()).isEqualTo( + new Document("Skip o'er the four", "From five and six").append("the Witch's tricks", "make seven and eight")); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnCollationFromSource() { + + Query source = new Query().collation(Collation.simple()); + Query target = Query.of(source); + + compareQueries(target, source); + source.collation(Collation.of("Tis finished straight")); + + assertThat(target.getCollation()).contains(Collation.simple()).isNotEqualTo(source.getCollation()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnSortFromSource() { + + Query source = new Query().with(Sort.by("And nine is one")); + Query target = Query.of(source); + + compareQueries(target, source); + source.with(Sort.by("And ten is none")); + + assertThat(target.getSortObject()).isEqualTo(new Document("And nine is one", 1)) + .isNotEqualTo(source.getSortObject()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnFieldsFromSource() { + + Query source = new Query(); + source.fields().include("That is the witch's one-time-one"); + Query target = Query.of(source); + + compareQueries(target, source); + source.fields().exclude("Goethe"); + + assertThat(target.getFieldsObject()).isEqualTo(new Document("That is the witch's one-time-one", 1)) + .isNotEqualTo(source.getFieldsObject()); + } + + @Test // DATAMONGO-1783, DATAMONGO-2572 + void clonedQueryShouldNotDependOnMetaFromSource() { + + Query source = new Query().maxTimeMsec(100); + Query target = Query.of(source); + + compareQueries(target, source); + source.allowSecondaryReads(); + + Meta meta = new Meta(); + meta.setMaxTimeMsec(100); + assertThat(target.getMeta()).isEqualTo(meta).isNotEqualTo(source.getMeta()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnRestrictedTypesFromSource() { + + Query source = new Query(); + source.restrict(EnumType.class); + Query target = Query.of(source); + + compareQueries(target, source); + source.restrict(Query.class); + + assertThat(target.getRestrictedTypes()).containsExactly(EnumType.class).isNotEqualTo(source.getRestrictedTypes()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldApplyRestrictionsFromBasicQuery() { + + BasicQuery source = new BasicQuery("{ 'foo' : 'bar'}"); + Query target = Query.of(source); + + compareQueries(target, source); + + target.addCriteria(where("one").is("10")); + assertThat(target.getQueryObject()).isEqualTo(new Document("foo", "bar").append("one", "10")) + .isNotEqualTo(source.getQueryObject()); + } + + @Test // DATAMONGO-2478 + void queryOfShouldWorkOnProxiedObjects() { + + BasicQuery source = new BasicQuery("{ 'foo' : 'bar'}", "{ '_id' : -1, 'foo' : 1 }"); + source.withHint("the hint"); + source.limit(10); + source.setSortObject(new Document("_id", 1)); + + ProxyFactory proxyFactory = new ProxyFactory(source); + proxyFactory.setInterfaces(new Class[0]); + + Query target = Query.of((Query) proxyFactory.getProxy()); + + compareQueries(target, source); + } + + @Test // GH-4771 + void appliesSortOfUnpagedPageable() { + + Query query = new Query(); + query.with(Pageable.unpaged(Sort.by("sortMe"))); + + assertThat(query.isSorted()).isTrue(); + } + + private void compareQueries(Query actual, Query expected) { + + assertThat(actual.getCollation()).isEqualTo(expected.getCollation()); + assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject()) + .containsAllEntriesOf(expected.getSortObject()); + assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject()) + .containsAllEntriesOf(expected.getFieldsObject()); + assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject()) + .containsAllEntriesOf(expected.getQueryObject()); + assertThat(actual.getHint()).isEqualTo(expected.getHint()); + assertThat(actual.getLimit()).isEqualTo(expected.getLimit()); + assertThat(actual.getSkip()).isEqualTo(expected.getSkip()); + assertThat(actual.getMeta()).isEqualTo(expected.getMeta()); + assertThat(actual.getRestrictedTypes()).isEqualTo(expected.getRestrictedTypes()); + } + + enum EnumType { + VAL_1, VAL_2 } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java index 6450ac7d62..1cf4d8b027 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java @@ -1,53 +1,54 @@ -/* - * Copyright 2010-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import org.junit.Test; -import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Direction; - -/** - * Unit tests for sorting. - * - * @author Oliver Gierke - */ -public class SortTests { - - @Test - public void testWithSortAscending() { - Query s = new Query().with(new Sort(Direction.ASC, "name")); - assertEquals("{ \"name\" : 1}", s.getSortObject().toString()); - } - - @Test - public void testWithSortDescending() { - Query s = new Query().with(new Sort(Direction.DESC, "name")); - assertEquals("{ \"name\" : -1}", s.getSortObject().toString()); - } - - /** - * @see DATADOC-177 - */ - @Test - public void preservesOrderKeysOnMultipleSorts() { - - Query sort = new Query().with(new Sort(Direction.DESC, "foo").and(new Sort(Direction.DESC, "bar"))); - assertThat(sort.getSortObject().toString(), is("{ \"foo\" : -1 , \"bar\" : -1}")); - } -} +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; + +/** + * Unit tests for sorting. + * + * @author Oliver Gierke + * @author Mark Paluch + */ +public class SortTests { + + @Test + public void testWithSortAscending() { + + Query s = new Query().with(Sort.by(Direction.ASC, "name")); + assertThat(s.getSortObject()).isEqualTo(Document.parse("{ \"name\" : 1}")); + } + + @Test + public void testWithSortDescending() { + + Query s = new Query().with(Sort.by(Direction.DESC, "name")); + assertThat(s.getSortObject()).isEqualTo(Document.parse("{ \"name\" : -1}")); + } + + @Test // DATADOC-177 + public void preservesOrderKeysOnMultipleSorts() { + + Query sort = new Query().with(Sort.by(Direction.DESC, "foo").and(Sort.by(Direction.DESC, "bar"))); + assertThat(sort.getSortObject()).isEqualTo(Document.parse("{ \"foo\" : -1 , \"bar\" : -1}")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java index 9a14e7d421..b5da29f5e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,108 +15,119 @@ */ package org.springframework.data.mongodb.core.query; -import org.hamcrest.core.IsEqual; -import org.junit.Assert; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import static org.assertj.core.api.Assertions.*; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.DocumentTestUtils; /** * Unit tests for {@link TextCriteria}. - * + * * @author Christoph Strobl + * @author Daniel Debray */ -public class TextCriteriaUnitTests { +class TextCriteriaUnitTests { - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldNotHaveLanguageField() { + @Test // DATAMONGO-850 + void shouldNotHaveLanguageField() { TextCriteria criteria = TextCriteria.forDefaultLanguage(); - Assert.assertThat(criteria.getCriteriaObject(), IsEqual.equalTo(searchObject("{ }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldNotHaveLanguageForNonDefaultLanguageField() { + @Test // DATAMONGO-850 + void shouldNotHaveLanguageForNonDefaultLanguageField() { TextCriteria criteria = TextCriteria.forLanguage("spanish"); - Assert.assertThat(criteria.getCriteriaObject(), IsEqual.equalTo(searchObject("{ \"$language\" : \"spanish\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$language\" : \"spanish\" }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateSearchFieldForSingleTermCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateSearchFieldForSingleTermCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("cake"); - Assert.assertThat(criteria.getCriteriaObject(), IsEqual.equalTo(searchObject("{ \"$search\" : \"cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"cake\" }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateSearchFieldCorrectlyForMultipleTermsCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateSearchFieldCorrectlyForMultipleTermsCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("bake", "coffee", "cake"); - Assert.assertThat(criteria.getCriteriaObject(), - IsEqual.equalTo(searchObject("{ \"$search\" : \"bake coffee cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"bake coffee cake\" }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateSearchFieldForPhraseCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateSearchFieldForPhraseCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingPhrase("coffee cake"); - Assert.assertThat(DBObjectTestUtils.getAsDBObject(criteria.getCriteriaObject(), "$text"), - IsEqual. equalTo(new BasicDBObject("$search", "\"coffee cake\""))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "\"coffee cake\"")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateNotFieldCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateNotFieldCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatching("cake"); - Assert.assertThat(criteria.getCriteriaObject(), IsEqual.equalTo(searchObject("{ \"$search\" : \"-cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"-cake\" }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateSearchFieldCorrectlyForNotMultipleTermsCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateSearchFieldCorrectlyForNotMultipleTermsCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatchingAny("bake", "coffee", "cake"); - Assert.assertThat(criteria.getCriteriaObject(), - IsEqual.equalTo(searchObject("{ \"$search\" : \"-bake -coffee -cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"-bake -coffee -cake\" }")); } - /** - * @see DATAMONGO-850 - */ - @Test - public void shouldCreateSearchFieldForNotPhraseCorrectly() { + @Test // DATAMONGO-850 + void shouldCreateSearchFieldForNotPhraseCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatchingPhrase("coffee cake"); - Assert.assertThat(DBObjectTestUtils.getAsDBObject(criteria.getCriteriaObject(), "$text"), - IsEqual. equalTo(new BasicDBObject("$search", "-\"coffee cake\""))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "-\"coffee cake\"")); + } + + @Test // DATAMONGO-1455 + void caseSensitiveOperatorShouldBeSetCorrectly() { + + TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("coffee").caseSensitive(true); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "coffee").append("$caseSensitive", true)); + } + + @Test // DATAMONGO-1456 + void diacriticSensitiveOperatorShouldBeSetCorrectly() { + + TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("coffee").diacriticSensitive(true); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "coffee").append("$diacriticSensitive", true)); + } + + @Test // DATAMONGO-2504 + void twoIdenticalCriteriaShouldBeEqual() { + + TextCriteria criteriaOne = TextCriteria.forDefaultLanguage().matching("coffee"); + TextCriteria criteriaTwo = TextCriteria.forDefaultLanguage().matching("coffee"); + + assertThat(criteriaOne).isEqualTo(criteriaTwo); + assertThat(criteriaOne).hasSameHashCodeAs(criteriaTwo); + assertThat(criteriaOne).isNotEqualTo(criteriaTwo.diacriticSensitive(false)); + assertThat(criteriaOne.hashCode()).isNotEqualTo(criteriaTwo.diacriticSensitive(false).hashCode()); } - private DBObject searchObject(String json) { - return new BasicDBObject("$text", JSON.parse(json)); + private Document searchObject(String json) { + return new Document("$text", Document.parse(json)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java index 94b0637ef8..6ea0f5aa9c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,34 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.collection.IsCollectionWithSize.*; -import static org.hamcrest.collection.IsEmptyCollection.*; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.AnyOf.*; -import static org.hamcrest.core.IsCollectionContaining.*; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import java.util.List; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.domain.PageRequest; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; -import org.springframework.data.mongodb.core.IndexOperations; -import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.IndexDefinition; -import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.Language; import org.springframework.data.mongodb.core.mapping.TextScore; -import org.springframework.data.mongodb.core.query.TextCriteria; -import org.springframework.data.mongodb.core.query.TextQuery; import org.springframework.data.mongodb.core.query.TextQueryTests.FullTextDoc.FullTextDocBuilder; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.lang.Nullable; /** * @author Christoph Strobl + * @author Mark Paluch */ -public class TextQueryTests extends AbstractIntegrationTests { - - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); +@ExtendWith(MongoTemplateExtension.class) +public class TextQueryTests { private static final FullTextDoc BAKE = new FullTextDocBuilder().headline("bake").build(); private static final FullTextDoc COFFEE = new FullTextDocBuilder().subHeadline("coffee").build(); @@ -65,19 +53,22 @@ public class TextQueryTests extends AbstractIntegrationTests { private static final FullTextDoc FRENCH_MILK = new FullTextDocBuilder().headline("leche").lanugage("french").build(); private static final FullTextDoc MILK_AND_SUGAR = new FullTextDocBuilder().headline("milk and sugar").build(); - private @Autowired MongoOperations template; + @Template(initialEntitySet = FullTextDoc.class) // + static MongoTestTemplate template; - @Before + @BeforeEach public void setUp() { + template.flush(); + IndexOperations indexOps = template.indexOps(FullTextDoc.class); indexOps.dropAllIndexes(); indexOps.ensureIndex(new IndexDefinition() { @Override - public DBObject getIndexOptions() { - DBObject options = new BasicDBObject(); + public Document getIndexOptions() { + Document options = new Document(); options.put("weights", weights()); options.put("name", "TextQueryTests_TextIndex"); options.put("language_override", "lang"); @@ -86,16 +77,16 @@ public DBObject getIndexOptions() { } @Override - public DBObject getIndexKeys() { - DBObject keys = new BasicDBObject(); + public Document getIndexKeys() { + Document keys = new Document(); keys.put("headline", "text"); keys.put("subheadline", "text"); keys.put("body", "text"); return keys; } - private DBObject weights() { - DBObject weights = new BasicDBObject(); + private Document weights() { + Document weights = new Document(); weights.put("headline", 10); weights.put("subheadline", 5); weights.put("body", 1); @@ -104,35 +95,26 @@ private DBObject weights() { }); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldOnlyFindDocumentsMatchingAnyWordOfGivenQuery() { initWithDefaultDocuments(); List result = template.find(new TextQuery("bake coffee cake"), FullTextDoc.class); - assertThat(result, hasSize(3)); - assertThat(result, hasItems(BAKE, COFFEE, CAKE)); + assertThat(result).hasSize(3); + assertThat(result).contains(BAKE, COFFEE, CAKE); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldNotFindDocumentsWhenQueryDoesNotMatchAnyDocumentInIndex() { initWithDefaultDocuments(); List result = template.find(new TextQuery("tasmanian devil"), FullTextDoc.class); - assertThat(result, hasSize(0)); + assertThat(result).hasSize(0); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldApplySortByScoreCorrectly() { initWithDefaultDocuments(); @@ -140,55 +122,43 @@ public void shouldApplySortByScoreCorrectly() { template.insert(coffee2); List result = template.find(new TextQuery("bake coffee cake").sortByScore(), FullTextDoc.class); - assertThat(result, hasSize(4)); - assertThat(result.get(0), anyOf(equalTo(BAKE), equalTo(coffee2))); - assertThat(result.get(1), anyOf(equalTo(BAKE), equalTo(coffee2))); - assertThat(result.get(2), equalTo(COFFEE)); - assertThat(result.get(3), equalTo(CAKE)); + assertThat(result).hasSize(4); + assertThat(result.get(0)).isIn(BAKE, coffee2); + assertThat(result.get(1)).isIn(BAKE, coffee2); + assertThat(result.get(2)).isEqualTo(COFFEE); + assertThat(result.get(3)).isEqualTo(CAKE); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldFindTextInAnyLanguage() { initWithDefaultDocuments(); List result = template.find(new TextQuery("leche"), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(SPANISH_MILK, FRENCH_MILK)); + assertThat(result).hasSize(2); + assertThat(result).contains(SPANISH_MILK, FRENCH_MILK); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldOnlyFindTextInSpecificLanguage() { initWithDefaultDocuments(); List result = template.find(new TextQuery("leche").addCriteria(where("language").is("spanish")), FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), equalTo(SPANISH_MILK)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(SPANISH_MILK); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldNotFindDocumentsWithNegatedTerms() { initWithDefaultDocuments(); List result = template.find(new TextQuery("bake coffee -cake"), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(BAKE, COFFEE)); + assertThat(result).hasSize(2); + assertThat(result).contains(BAKE, COFFEE); } - /** - * @see DATAMONGO-976 - */ - @Test + @Test // DATAMONGO-976 public void shouldInlcudeScoreCorreclty() { initWithDefaultDocuments(); @@ -196,16 +166,13 @@ public void shouldInlcudeScoreCorreclty() { List result = template.find(new TextQuery("bake coffee -cake").includeScore().sortByScore(), FullTextDoc.class); - assertThat(result, hasSize(2)); + assertThat(result).hasSize(2); for (FullTextDoc scoredDoc : result) { - assertTrue(scoredDoc.score > 0F); + assertThat(scoredDoc.score > 0F).isTrue(); } } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldApplyPhraseCorrectly() { initWithDefaultDocuments(); @@ -213,14 +180,11 @@ public void shouldApplyPhraseCorrectly() { TextQuery query = TextQuery.queryText(TextCriteria.forDefaultLanguage().matchingPhrase("milk and sugar")); List result = template.find(query, FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result, contains(MILK_AND_SUGAR)); + assertThat(result).hasSize(1); + assertThat(result).containsExactly(MILK_AND_SUGAR); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldReturnEmptyListWhenNoDocumentsMatchGivenPhrase() { initWithDefaultDocuments(); @@ -228,28 +192,25 @@ public void shouldReturnEmptyListWhenNoDocumentsMatchGivenPhrase() { TextQuery query = TextQuery.queryText(TextCriteria.forDefaultLanguage().matchingPhrase("milk no sugar")); List result = template.find(query, FullTextDoc.class); - assertThat(result, empty()); + assertThat(result).isEmpty(); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldApplyPaginationCorrectly() { initWithDefaultDocuments(); // page 1 - List result = template.find(new TextQuery("bake coffee cake").sortByScore() - .with(new PageRequest(0, 2)), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, contains(BAKE, COFFEE)); + List result = template.find(new TextQuery("bake coffee cake").sortByScore().with(PageRequest.of(0, 2)), + FullTextDoc.class); + assertThat(result).hasSize(2); + assertThat(result).containsExactly(BAKE, COFFEE); // page 2 - result = template.find(new TextQuery("bake coffee cake").sortByScore().with(new PageRequest(1, 2)), + result = template.find(new TextQuery("bake coffee cake").sortByScore().with(PageRequest.of(1, 2)), FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result, contains(CAKE)); + assertThat(result).hasSize(1); + assertThat(result).containsExactly(CAKE); } private void initWithDefaultDocuments() { @@ -262,7 +223,7 @@ private void initWithDefaultDocuments() { this.template.save(MILK_AND_SUGAR); } - @Document(collection = "fullTextDoc") + @org.springframework.data.mongodb.core.mapping.Document(collection = "fullTextDoc") static class FullTextDoc { @Id String id; @@ -288,7 +249,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } @@ -337,6 +298,10 @@ public boolean equals(Object obj) { return true; } + public String toString() { + return "TextQueryTests.FullTextDoc(id=" + this.id + ", language=" + this.language + ", headline=" + this.headline + ", subheadline=" + this.subheadline + ", body=" + this.body + ", score=" + this.score + ")"; + } + static class FullTextDocBuilder { private FullTextDoc instance; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java index 2a0fa9c0bc..155fcd3f99 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,89 +15,106 @@ */ package org.springframework.data.mongodb.core.query; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.query.IsTextQuery.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; /** * Unit tests for {@link TextQuery}. - * + * * @author Christoph Strobl + * @author Mark Paluch */ public class TextQueryUnitTests { private static final String QUERY = "bake coffee cake"; private static final String LANGUAGE_SPANISH = "spanish"; - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldCreateQueryObjectCorrectly() { - assertThat(new TextQuery(QUERY), isTextQuery().searchingFor(QUERY)); + assertThat(new TextQuery(QUERY).getQueryObject()).containsEntry("$text.$search", QUERY); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldIncludeLanguageInQueryObjectWhenNotNull() { - assertThat(new TextQuery(QUERY, LANGUAGE_SPANISH), isTextQuery().searchingFor(QUERY).inLanguage(LANGUAGE_SPANISH)); + assertThat(new TextQuery(QUERY, LANGUAGE_SPANISH).getQueryObject()).containsEntry("$text.$search", QUERY) + .containsEntry("$text.$language", LANGUAGE_SPANISH); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldIncludeScoreFieldCorrectly() { - assertThat(new TextQuery(QUERY).includeScore(), isTextQuery().searchingFor(QUERY).returningScore()); + + TextQuery textQuery = new TextQuery(QUERY).includeScore(); + assertThat(textQuery.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(textQuery.getFieldsObject()).containsKey("score"); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldNotOverrideExistingProjections() { TextQuery query = new TextQuery(TextCriteria.forDefaultLanguage().matching(QUERY)).includeScore(); query.fields().include("foo"); - assertThat(query, isTextQuery().searchingFor(QUERY).returningScore().includingField("foo")); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("score", "foo"); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldIncludeSortingByScoreCorrectly() { - assertThat(new TextQuery(QUERY).sortByScore(), isTextQuery().searchingFor(QUERY).returningScore().sortingByScore()); + + TextQuery textQuery = new TextQuery(QUERY).sortByScore(); + + assertThat(textQuery.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(textQuery.getFieldsObject()).containsKey("score"); + assertThat(textQuery.getSortObject()).containsKey("score"); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldNotOverrideExistingSort() { TextQuery query = new TextQuery(QUERY); - query.with(new Sort(Direction.DESC, "foo")); + query.with(Sort.by(Direction.DESC, "foo")); query.sortByScore(); - assertThat(query, - isTextQuery().searchingFor(QUERY).returningScore().sortingByScore().sortingBy("foo", Direction.DESC)); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("score"); + assertThat(query.getSortObject()).containsEntry("foo", -1).containsKey("score"); } - /** - * @see DATAMONGO-850 - */ - @Test + @Test // DATAMONGO-850 public void shouldUseCustomFieldnameForScoring() { + TextQuery query = new TextQuery(QUERY).includeScore("customFieldForScore").sortByScore(); - assertThat(query, isTextQuery().searchingFor(QUERY).returningScoreAs("customFieldForScore").sortingByScore()); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("customFieldForScore"); + assertThat(query.getSortObject()).containsKey("customFieldForScore"); + } + + @Test // GH-3896 + public void retainsSortOrderWhenUsingScore() { + + TextQuery query = new TextQuery(QUERY); + query.with(Sort.by(Direction.DESC, "one")); + query.sortByScore(); + query.with(Sort.by(Direction.DESC, "two")); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("one", "score", "two"); + + query = new TextQuery(QUERY); + query.with(Sort.by(Direction.DESC, "one")); + query.sortByScore(); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("one", "score"); + + query = new TextQuery(QUERY); + query.sortByScore(); + query.with(Sort.by(Direction.DESC, "one")); + query.with(Sort.by(Direction.DESC, "two")); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("score", "one", "two"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java new file mode 100644 index 0000000000..91a0e43ee9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java @@ -0,0 +1,185 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.ExampleMatcher; +import org.springframework.data.domain.ExampleMatcher.NullHandler; +import org.springframework.data.domain.ExampleMatcher.StringMatcher; + +/** + * @author Christoph Strobl + */ +public class UntypedExampleMatcherUnitTests { + + ExampleMatcher matcher; + + @BeforeEach + public void setUp() { + matcher = UntypedExampleMatcher.matching(); + } + + @Test // DATAMONGO-1768 + public void defaultStringMatcherShouldReturnDefault() { + assertThat(matcher.getDefaultStringMatcher()).isEqualTo(StringMatcher.DEFAULT); + } + + @Test // DATAMONGO-1768 + public void ignoreCaseShouldReturnFalseByDefault() { + assertThat(matcher.isIgnoreCaseEnabled()).isFalse(); + } + + @Test // DATAMONGO-1768 + public void ignoredPathsIsEmptyByDefault() { + assertThat(matcher.getIgnoredPaths()).isEmpty(); + } + + @Test // DATAMONGO-1768 + public void nullHandlerShouldReturnIgnoreByDefault() { + assertThat(matcher.getNullHandler()).isEqualTo(NullHandler.IGNORE); + } + + @Test // DATAMONGO-1768 + public void ignoredPathsIsNotModifiable() { + assertThatExceptionOfType(UnsupportedOperationException.class) + .isThrownBy(() -> matcher.getIgnoredPaths().add("¯\\_(ツ)_/¯")); + } + + @Test // DATAMONGO-1768 + public void ignoreCaseShouldReturnTrueWhenIgnoreCaseEnabled() { + + matcher = UntypedExampleMatcher.matching().withIgnoreCase(); + + assertThat(matcher.isIgnoreCaseEnabled()).isTrue(); + } + + @Test // DATAMONGO-1768 + public void ignoreCaseShouldReturnTrueWhenIgnoreCaseSet() { + + matcher = UntypedExampleMatcher.matching().withIgnoreCase(true); + + assertThat(matcher.isIgnoreCaseEnabled()).isTrue(); + } + + @Test // DATAMONGO-1768 + public void nullHandlerShouldReturnInclude() throws Exception { + + matcher = UntypedExampleMatcher.matching().withIncludeNullValues(); + + assertThat(matcher.getNullHandler()).isEqualTo(NullHandler.INCLUDE); + } + + @Test // DATAMONGO-1768 + public void nullHandlerShouldReturnIgnore() { + + matcher = UntypedExampleMatcher.matching().withIgnoreNullValues(); + + assertThat(matcher.getNullHandler()).isEqualTo(NullHandler.IGNORE); + } + + @Test // DATAMONGO-1768 + public void nullHandlerShouldReturnConfiguredValue() { + + matcher = UntypedExampleMatcher.matching().withNullHandler(NullHandler.INCLUDE); + + assertThat(matcher.getNullHandler()).isEqualTo(NullHandler.INCLUDE); + } + + @Test // DATAMONGO-1768 + public void ignoredPathsShouldReturnCorrectProperties() { + + matcher = UntypedExampleMatcher.matching().withIgnorePaths("foo", "bar", "baz"); + + assertThat(matcher.getIgnoredPaths()).contains("foo", "bar", "baz"); + assertThat(matcher.getIgnoredPaths()).hasSize(3); + } + + @Test // DATAMONGO-1768 + public void ignoredPathsShouldReturnUniqueProperties() { + + matcher = UntypedExampleMatcher.matching().withIgnorePaths("foo", "bar", "foo"); + + assertThat(matcher.getIgnoredPaths()).contains("foo", "bar"); + assertThat(matcher.getIgnoredPaths()).hasSize(2); + } + + @Test // DATAMONGO-1768 + public void withCreatesNewInstance() { + + matcher = UntypedExampleMatcher.matching().withIgnorePaths("foo", "bar", "foo"); + ExampleMatcher configuredExampleSpec = matcher.withIgnoreCase(); + + assertThat(matcher).isNotSameAs(configuredExampleSpec); + assertThat(matcher.getIgnoredPaths()).hasSize(2); + assertThat(matcher.isIgnoreCaseEnabled()).isFalse(); + + assertThat(configuredExampleSpec.getIgnoredPaths()).hasSize(2); + assertThat(configuredExampleSpec.isIgnoreCaseEnabled()).isTrue(); + } + + @Test // DATAMONGO-1768 + public void defaultMatcherRequiresAllMatching() { + + assertThat(UntypedExampleMatcher.matching().isAllMatching()).isTrue(); + assertThat(UntypedExampleMatcher.matching().isAnyMatching()).isFalse(); + } + + @Test // DATAMONGO-1768 + public void allMatcherRequiresAllMatching() { + + assertThat(UntypedExampleMatcher.matchingAll().isAllMatching()).isTrue(); + assertThat(UntypedExampleMatcher.matchingAll().isAnyMatching()).isFalse(); + } + + @Test // DATAMONGO-1768 + public void anyMatcherYieldsAnyMatching() { + + assertThat(UntypedExampleMatcher.matchingAny().isAnyMatching()).isTrue(); + assertThat(UntypedExampleMatcher.matchingAny().isAllMatching()).isFalse(); + } + + @Test // DATAMONGO-1768 + public void shouldCompareUsingHashCodeAndEquals() { + + matcher = UntypedExampleMatcher.matching() // + .withIgnorePaths("foo", "bar", "baz") // + .withNullHandler(NullHandler.IGNORE) // + .withIgnoreCase("ignored-case") // + .withMatcher("hello", ExampleMatcher.GenericPropertyMatchers.contains().caseSensitive()) // + .withMatcher("world", matcher -> matcher.endsWith()); + + ExampleMatcher sameAsMatcher = UntypedExampleMatcher.matching() // + .withIgnorePaths("foo", "bar", "baz") // + .withNullHandler(NullHandler.IGNORE) // + .withIgnoreCase("ignored-case") // + .withMatcher("hello", ExampleMatcher.GenericPropertyMatchers.contains().caseSensitive()) // + .withMatcher("world", matcher -> matcher.endsWith()); + + ExampleMatcher different = UntypedExampleMatcher.matching() // + .withIgnorePaths("foo", "bar", "baz") // + .withNullHandler(NullHandler.IGNORE) // + .withMatcher("hello", ExampleMatcher.GenericPropertyMatchers.contains().ignoreCase()); + + assertThat(matcher.hashCode()).isEqualTo(sameAsMatcher.hashCode()); + assertThat(matcher.hashCode()).isNotEqualTo(different.hashCode()); + assertThat(matcher).isEqualTo(sameAsMatcher); + assertThat(matcher).isNotEqualTo(different); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java index 37d9370326..f5b8684687 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,26 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; +import java.util.Date; import java.util.Map; -import org.joda.time.DateTime; -import org.junit.Test; -import org.springframework.data.mongodb.core.DBObjectTestUtils; - -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.query.Update.Position; /** * Test cases for {@link Update}. - * + * * @author Oliver Gierke * @author Thomas Risberg * @author Becca Gaspard * @author Christoph Strobl * @author Thomas Darimont + * @author Alexey Plotnik */ public class UpdateTests { @@ -44,103 +42,77 @@ public class UpdateTests { public void testSet() { Update u = new Update().set("directory", "/Users/Test/Desktop"); - assertThat(u.getUpdateObject().toString(), is("{ \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); + assertThat(u.getUpdateObject()) + .isEqualTo(Document.parse("{ \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); } @Test public void testSetSet() { Update u = new Update().set("directory", "/Users/Test/Desktop").set("size", 0); - assertThat(u.getUpdateObject().toString(), - is("{ \"$set\" : { \"directory\" : \"/Users/Test/Desktop\" , \"size\" : 0}}")); + assertThat(u.getUpdateObject()) + .isEqualTo((Document.parse("{ \"$set\" : { \"directory\" : \"/Users/Test/Desktop\" , \"size\" : 0}}"))); } @Test public void testInc() { Update u = new Update().inc("size", 1); - assertThat(u.getUpdateObject().toString(), is("{ \"$inc\" : { \"size\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$inc\" : { \"size\" : 1}}")); } @Test public void testIncInc() { Update u = new Update().inc("size", 1).inc("count", 1); - assertThat(u.getUpdateObject().toString(), is("{ \"$inc\" : { \"size\" : 1 , \"count\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$inc\" : { \"size\" : 1 , \"count\" : 1}}")); } @Test public void testIncAndSet() { Update u = new Update().inc("size", 1).set("directory", "/Users/Test/Desktop"); - assertThat(u.getUpdateObject().toString(), - is("{ \"$inc\" : { \"size\" : 1} , \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); + assertThat(u.getUpdateObject()).isEqualTo( + Document.parse("{ \"$inc\" : { \"size\" : 1} , \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); } @Test public void testUnset() { Update u = new Update().unset("directory"); - assertThat(u.getUpdateObject().toString(), is("{ \"$unset\" : { \"directory\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$unset\" : { \"directory\" : 1}}")); } @Test public void testPush() { Update u = new Update().push("authors", Collections.singletonMap("name", "Sven")); - assertThat(u.getUpdateObject().toString(), is("{ \"$push\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); - } - - @Test - public void testPushAll() { - - Map m1 = Collections.singletonMap("name", "Sven"); - Map m2 = Collections.singletonMap("name", "Maria"); - - Update u = new Update().pushAll("authors", new Object[] { m1, m2 }); - assertThat(u.getUpdateObject().toString(), - is("{ \"$pushAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}]}}")); - } - - /** - * @see DATAMONGO-354 - */ - @Test - public void testMultiplePushAllShouldBePossibleWhenUsingDifferentFields() { - - Map m1 = Collections.singletonMap("name", "Sven"); - Map m2 = Collections.singletonMap("name", "Maria"); - - Update u = new Update().pushAll("authors", new Object[] { m1, m2 }); - u.pushAll("books", new Object[] { "Spring in Action" }); - - assertThat( - u.getUpdateObject().toString(), - is("{ \"$pushAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}] , \"books\" : [ \"Spring in Action\"]}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$push\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); } @Test public void testAddToSet() { Update u = new Update().addToSet("authors", Collections.singletonMap("name", "Sven")); - assertThat(u.getUpdateObject().toString(), is("{ \"$addToSet\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); + assertThat(u.getUpdateObject()) + .isEqualTo(Document.parse("{ \"$addToSet\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); } @Test public void testPop() { Update u = new Update().pop("authors", Update.Position.FIRST); - assertThat(u.getUpdateObject().toString(), is("{ \"$pop\" : { \"authors\" : -1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$pop\" : { \"authors\" : -1}}")); u = new Update().pop("authors", Update.Position.LAST); - assertThat(u.getUpdateObject().toString(), is("{ \"$pop\" : { \"authors\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$pop\" : { \"authors\" : 1}}")); } @Test public void testPull() { Update u = new Update().pull("authors", Collections.singletonMap("name", "Sven")); - assertThat(u.getUpdateObject().toString(), is("{ \"$pull\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$pull\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); } @Test @@ -150,154 +122,116 @@ public void testPullAll() { Map m2 = Collections.singletonMap("name", "Maria"); Update u = new Update().pullAll("authors", new Object[] { m1, m2 }); - assertThat(u.getUpdateObject().toString(), - is("{ \"$pullAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}]}}")); + assertThat(u.getUpdateObject()).isEqualTo( + Document.parse("{ \"$pullAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}]}}")); } @Test public void testRename() { Update u = new Update().rename("directory", "folder"); - assertThat(u.getUpdateObject().toString(), is("{ \"$rename\" : { \"directory\" : \"folder\"}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$rename\" : { \"directory\" : \"folder\"}}")); } @Test public void testBasicUpdateInc() { Update u = new Update().inc("size", 1); - assertThat(u.getUpdateObject().toString(), is("{ \"$inc\" : { \"size\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$inc\" : { \"size\" : 1}}")); } @Test public void testBasicUpdateIncAndSet() { Update u = new BasicUpdate("{ \"$inc\" : { \"size\" : 1}}").set("directory", "/Users/Test/Desktop"); - assertThat(u.getUpdateObject().toString(), - is("{ \"$inc\" : { \"size\" : 1} , \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); + assertThat(u.getUpdateObject()).isEqualTo( + Document.parse("{ \"$inc\" : { \"size\" : 1} , \"$set\" : { \"directory\" : \"/Users/Test/Desktop\"}}")); } - /** - * @see DATAMONGO-630 - */ - @Test + @Test // DATAMONGO-630 public void testSetOnInsert() { Update u = new Update().setOnInsert("size", 1); - assertThat(u.getUpdateObject().toString(), is("{ \"$setOnInsert\" : { \"size\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$setOnInsert\" : { \"size\" : 1}}")); } - /** - * @see DATAMONGO-630 - */ - @Test + @Test // DATAMONGO-630 public void testSetOnInsertSetOnInsert() { Update u = new Update().setOnInsert("size", 1).setOnInsert("count", 1); - assertThat(u.getUpdateObject().toString(), is("{ \"$setOnInsert\" : { \"size\" : 1 , \"count\" : 1}}")); + assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$setOnInsert\" : { \"size\" : 1 , \"count\" : 1}}")); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // DATAMONGO-852 public void testUpdateAffectsFieldShouldReturnTrueWhenMultiFieldOperationAddedForField() { Update update = new Update().set("foo", "bar"); - assertThat(update.modifies("foo"), is(true)); + assertThat(update.modifies("foo")).isTrue(); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // DATAMONGO-852 public void testUpdateAffectsFieldShouldReturnFalseWhenMultiFieldOperationAddedForField() { Update update = new Update().set("foo", "bar"); - assertThat(update.modifies("oof"), is(false)); + assertThat(update.modifies("oof")).isFalse(); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // DATAMONGO-852 public void testUpdateAffectsFieldShouldReturnTrueWhenSingleFieldOperationAddedForField() { Update update = new Update().pullAll("foo", new Object[] { "bar" }); - assertThat(update.modifies("foo"), is(true)); + assertThat(update.modifies("foo")).isTrue(); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // DATAMONGO-852 public void testUpdateAffectsFieldShouldReturnFalseWhenSingleFieldOperationAddedForField() { Update update = new Update().pullAll("foo", new Object[] { "bar" }); - assertThat(update.modifies("oof"), is(false)); + assertThat(update.modifies("oof")).isFalse(); } - /** - * @see DATAMONGO-852 - */ - @Test + @Test // DATAMONGO-852 public void testUpdateAffectsFieldShouldReturnFalseWhenCalledOnEmptyUpdate() { - assertThat(new Update().modifies("foo"), is(false)); + assertThat(new Update().modifies("foo")).isFalse(); } - /** - * @see DATAMONGO-852 - */ - @Test - public void testUpdateAffectsFieldShouldReturnTrueWhenUpdateWithKeyCreatedFromDbObject() { + @Test // DATAMONGO-852 + public void testUpdateAffectsFieldShouldReturnTrueWhenUpdateWithKeyCreatedFromDocument() { Update update = new Update().set("foo", "bar"); - Update clone = Update.fromDBObject(update.getUpdateObject()); + Update clone = Update.fromDocument(update.getUpdateObject()); - assertThat(clone.modifies("foo"), is(true)); + assertThat(clone.modifies("foo")).isTrue(); } - /** - * @see DATAMONGO-852 - */ - @Test - public void testUpdateAffectsFieldShouldReturnFalseWhenUpdateWithoutKeyCreatedFromDbObject() { + @Test // DATAMONGO-852 + public void testUpdateAffectsFieldShouldReturnFalseWhenUpdateWithoutKeyCreatedFromDocument() { Update update = new Update().set("foo", "bar"); - Update clone = Update.fromDBObject(update.getUpdateObject()); + Update clone = Update.fromDocument(update.getUpdateObject()); - assertThat(clone.modifies("oof"), is(false)); + assertThat(clone.modifies("oof")).isFalse(); } - /** - * @see DATAMONGO-853 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-853 public void testAddingMultiFieldOperationThrowsExceptionWhenCalledWithNullKey() { - new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException."); + assertThatIllegalArgumentException().isThrownBy( + () -> new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException.")); } - /** - * @see DATAMONGO-853 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-853 public void testAddingSingleFieldOperationThrowsExceptionWhenCalledWithNullKey() { - new Update().addFieldOperation("$op", null, "exprected to throw IllegalArgumentException."); + assertThatIllegalArgumentException().isThrownBy( + () -> new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException.")); } - /** - * @see DATAMONGO-853 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-853 public void testCreatingUpdateWithNullKeyThrowsException() { - Update.update(null, "value"); + assertThatIllegalArgumentException().isThrownBy(() -> Update.update(null, "value")); } - /** - * @see DATAMONGO-953 - */ - @Test + @Test // DATAMONGO-953 public void testEquality() { - Update actualUpdate = new Update() // .inc("size", 1) // .set("nl", null) // @@ -314,16 +248,13 @@ public void testEquality() { .pop("authors", Update.Position.FIRST) // .set("foo", "bar"); - assertThat(actualUpdate, is(equalTo(actualUpdate))); - assertThat(actualUpdate.hashCode(), is(equalTo(actualUpdate.hashCode()))); - assertThat(actualUpdate, is(equalTo(expectedUpdate))); - assertThat(actualUpdate.hashCode(), is(equalTo(expectedUpdate.hashCode()))); + assertThat(actualUpdate).isEqualTo(actualUpdate); + assertThat(actualUpdate.hashCode()).isEqualTo(actualUpdate.hashCode()); + assertThat(actualUpdate).isEqualTo(expectedUpdate); + assertThat(actualUpdate.hashCode()).isEqualTo(expectedUpdate.hashCode()); } - /** - * @see DATAMONGO-953 - */ - @Test + @Test // DATAMONGO-953 public void testToString() { Update actualUpdate = new Update() // @@ -342,166 +273,228 @@ public void testToString() { .pop("authors", Update.Position.FIRST) // .set("foo", "bar"); - assertThat(actualUpdate.toString(), is(equalTo(expectedUpdate.toString()))); - assertThat(actualUpdate.toString(), is("{ \"$inc\" : { \"size\" : 1} ," // + assertThat(actualUpdate.toString()).isEqualTo(expectedUpdate.toString()); + assertThat(actualUpdate.getUpdateObject()).isEqualTo(Document.parse("{ \"$inc\" : { \"size\" : 1} ," // + " \"$set\" : { \"nl\" : null , \"directory\" : \"/Users/Test/Desktop\" , \"foo\" : \"bar\"} , " // + "\"$push\" : { \"authors\" : { \"name\" : \"Sven\"}} " // + ", \"$pop\" : { \"authors\" : -1}}")); // } - /** - * @see DATAMONGO-944 - */ - @Test + @Test // DATAMONGO-944 public void getUpdateObjectShouldReturnCurrentDateCorrectlyForSingleFieldWhenUsingDate() { Update update = new Update().currentDate("foo"); - assertThat(update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$currentDate", new BasicDBObject("foo", true)).get())); + assertThat(update.getUpdateObject()).isEqualTo(new Document().append("$currentDate", new Document("foo", true))); } - /** - * @see DATAMONGO-944 - */ - @Test + @Test // DATAMONGO-944 public void getUpdateObjectShouldReturnCurrentDateCorrectlyForMultipleFieldsWhenUsingDate() { Update update = new Update().currentDate("foo").currentDate("bar"); - assertThat(update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$currentDate", new BasicDBObject("foo", true).append("bar", true)) - .get())); + assertThat(update.getUpdateObject()) + .isEqualTo(new Document().append("$currentDate", new Document("foo", true).append("bar", true))); } - /** - * @see DATAMONGO-944 - */ - @Test + @Test // DATAMONGO-944 public void getUpdateObjectShouldReturnCurrentDateCorrectlyForSingleFieldWhenUsingTimestamp() { Update update = new Update().currentTimestamp("foo"); - assertThat( - update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$currentDate", - new BasicDBObject("foo", new BasicDBObject("$type", "timestamp"))).get())); + assertThat(update.getUpdateObject()) + .isEqualTo(new Document().append("$currentDate", new Document("foo", new Document("$type", "timestamp")))); } - /** - * @see DATAMONGO-944 - */ - @Test + @Test // DATAMONGO-944 public void getUpdateObjectShouldReturnCurrentDateCorrectlyForMultipleFieldsWhenUsingTimestamp() { Update update = new Update().currentTimestamp("foo").currentTimestamp("bar"); - assertThat( - update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add( - "$currentDate", - new BasicDBObject("foo", new BasicDBObject("$type", "timestamp")).append("bar", new BasicDBObject("$type", - "timestamp"))).get())); + assertThat(update.getUpdateObject()).isEqualTo(new Document().append("$currentDate", + new Document("foo", new Document("$type", "timestamp")).append("bar", new Document("$type", "timestamp")))); } - /** - * @see DATAMONGO-944 - */ - @Test + @Test // DATAMONGO-944 public void getUpdateObjectShouldReturnCurrentDateCorrectlyWhenUsingMixedDateAndTimestamp() { Update update = new Update().currentDate("foo").currentTimestamp("bar"); - assertThat( - update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$currentDate", - new BasicDBObject("foo", true).append("bar", new BasicDBObject("$type", "timestamp"))).get())); + assertThat(update.getUpdateObject()).isEqualTo(new Document().append("$currentDate", + new Document("foo", true).append("bar", new Document("$type", "timestamp")))); } - /** - * @see DATAMONGO-1002 - */ - @Test + @Test // DATAMONGO-1002 public void toStringWorksForUpdateWithComplexObject() { - Update update = new Update().addToSet("key", new DateTime()); - assertThat(update.toString(), is(notNullValue())); + Update update = new Update().addToSet("key", new Date()); + assertThat(update.toString()).isNotNull(); } - /** - * @see DATAMONGO-1097 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-1097 public void multiplyShouldThrowExceptionForNullMultiplier() { - new Update().multiply("key", null); + assertThatIllegalArgumentException().isThrownBy(() -> new Update().multiply("key", null)); } - /** - * @see DATAMONGO-1097 - */ - @Test + @Test // DATAMONGO-1097 public void multiplyShouldAddMultiplierAsItsDoubleValue() { Update update = new Update().multiply("key", 10); - assertThat(update.getUpdateObject(), equalTo(new BasicDBObjectBuilder().add("$mul", new BasicDBObject("key", 10D)) - .get())); + assertThat(update.getUpdateObject()).isEqualTo(new Document().append("$mul", new Document("key", 10D))); } - /** - * @see DATAMONGO-1101 - */ - @Test + @Test // DATAMONGO-1101 public void getUpdateObjectShouldReturnCorrectRepresentationForBitwiseAnd() { Update update = new Update().bitwise("key").and(10L); - assertThat(update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$bit", new BasicDBObject("key", new BasicDBObject("and", 10L))).get())); + assertThat(update.getUpdateObject()) + .isEqualTo(new Document().append("$bit", new Document("key", new Document("and", 10L)))); } - /** - * @see DATAMONGO-1101 - */ - @Test + @Test // DATAMONGO-1101 public void getUpdateObjectShouldReturnCorrectRepresentationForBitwiseOr() { Update update = new Update().bitwise("key").or(10L); - assertThat(update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$bit", new BasicDBObject("key", new BasicDBObject("or", 10L))).get())); + assertThat(update.getUpdateObject()) + .isEqualTo(new Document().append("$bit", new Document("key", new Document("or", 10L)))); } - /** - * @see DATAMONGO-1101 - */ - @Test + @Test // DATAMONGO-1101 public void getUpdateObjectShouldReturnCorrectRepresentationForBitwiseXor() { Update update = new Update().bitwise("key").xor(10L); - assertThat(update.getUpdateObject(), - equalTo(new BasicDBObjectBuilder().add("$bit", new BasicDBObject("key", new BasicDBObject("xor", 10L))).get())); - } - - /** - * @see DATAMONGO-943 - */ - @Test(expected = IllegalArgumentException.class) - public void pushShouldThrowExceptionWhenGivenNegativePosition() { - new Update().push("foo").atPosition(-1).each("booh"); + assertThat(update.getUpdateObject()) + .isEqualTo(new Document().append("$bit", new Document("key", new Document("xor", 10L)))); } - /** - * @see DATAMONGO-1346 - */ - @Test + @Test // DATAMONGO-1346 public void registersMultiplePullAllClauses() { Update update = new Update(); update.pullAll("field1", new String[] { "foo" }); update.pullAll("field2", new String[] { "bar" }); - DBObject updateObject = update.getUpdateObject(); + Document updateObject = update.getUpdateObject(); + + Document pullAll = DocumentTestUtils.getAsDocument(updateObject, "$pullAll"); + + assertThat(pullAll.get("field1")).isNotNull(); + assertThat(pullAll.get("field2")).isNotNull(); + } + + @Test // DATAMONGO-1404 + public void maxShouldThrowExceptionForNullMultiplier() { + assertThatIllegalArgumentException().isThrownBy(() -> new Update().max("key", null)); + } + + @Test // DATAMONGO-1404 + public void minShouldThrowExceptionForNullMultiplier() { + assertThatIllegalArgumentException().isThrownBy(() -> new Update().min("key", null)); + } + + @Test // DATAMONGO-1404 + public void getUpdateObjectShouldReturnCorrectRepresentationForMax() { + + Update update = new Update().max("key", 10); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$max", new Document("key", 10))); + } + + @Test // DATAMONGO-1404 + public void getUpdateObjectShouldReturnCorrectRepresentationForMin() { + + Update update = new Update().min("key", 10); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$min", new Document("key", 10))); + } + + @Test // DATAMONGO-1404 + public void shouldSuppressPreviousValueForMax() { + + Update update = new Update().max("key", 10); + update.max("key", 99); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$max", new Document("key", 99))); + } + + @Test // DATAMONGO-1404 + public void shouldSuppressPreviousValueForMin() { + + Update update = new Update().min("key", 10); + update.min("key", 99); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$min", new Document("key", 99))); + } + + @Test // DATAMONGO-1404 + public void getUpdateObjectShouldReturnCorrectDateRepresentationForMax() { + + Date date = new Date(); + Update update = new Update().max("key", date); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$max", new Document("key", date))); + } + + @Test // DATAMONGO-1404 + public void getUpdateObjectShouldReturnCorrectDateRepresentationForMin() { + + Date date = new Date(); + Update update = new Update().min("key", date); + + assertThat(update.getUpdateObject()).isEqualTo(new Document("$min", new Document("key", date))); + } + + @Test // DATAMONGO-1777, DATAMONGO-2199 + public void toStringShouldPrettyPrintModifiers() { + + assertThat(new Update().push("key").atPosition(Position.FIRST).value("Arya").toString()).isEqualTo( + "{ \"$push\" : { \"key\" : { \"$java\" : { \"$position\" : { \"$java\" : { \"$position\" : 0} }, \"$each\" : { \"$java\" : { \"$each\" : [ \"Arya\" ] } } } } } }"); + } + + @Test // DATAMONGO-1777, DATAMONGO-2198 + public void toStringConsidersIsolated() { + + assertThat(new Update().set("key", "value").isolated().toString()).contains("\"$isolated\""); + } + + @Test // DATAMONGO-1778 + public void equalsShouldConsiderModifiers() { + + Update update1 = new Update().inc("version", 1).push("someField").slice(-10).each("test"); + Update update2 = new Update().inc("version", 1).push("someField").slice(-10).each("test"); + Update update3 = new Update().inc("version", 1).push("someField").slice(10).each("test"); + + assertThat(update1).isEqualTo(update2); + assertThat(update1).isNotEqualTo(update3); + } + + @Test // DATAMONGO-1778 + public void equalsShouldConsiderIsolated() { + + Update update1 = new Update().inc("version", 1).isolated(); + Update update2 = new Update().inc("version", 1).isolated(); + + assertThat(update1).isEqualTo(update2); + } + + @Test // DATAMONGO-1778 + public void hashCodeShouldConsiderModifiers() { + + Update update1 = new Update().inc("version", 1).push("someField").slice(-10).each("test"); + Update update2 = new Update().inc("version", 1).push("someField").slice(-10).each("test"); + Update update3 = new Update().inc("version", 1).push("someField").slice(10).each("test"); + + assertThat(update1.hashCode()).isEqualTo(update2.hashCode()); + assertThat(update1.hashCode()).isNotEqualTo(update3.hashCode()); + } + + @Test // DATAMONGO-1778 + public void hashCodeShouldConsiderIsolated() { - DBObject pullAll = DBObjectTestUtils.getAsDBObject(updateObject, "$pullAll"); + Update update1 = new Update().inc("version", 1).isolated(); + Update update2 = new Update().inc("version", 1).isolated(); + Update update3 = new Update().inc("version", 1); - assertThat(pullAll.get("field1"), is(notNullValue())); - assertThat(pullAll.get("field2"), is(notNullValue())); + assertThat(update1.hashCode()).isEqualTo(update2.hashCode()); + assertThat(update1.hashCode()).isNotEqualTo(update3.hashCode()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java new file mode 100644 index 0000000000..0c411dcb4f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java @@ -0,0 +1,393 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.springframework.data.domain.Range.from; +import static org.springframework.data.domain.Range.Bound.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.array; +import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.of; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.*; + +/** + * Tests verifying {@link org.bson.Document} representation of {@link JsonSchemaObject}s. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Michał Kurcius + */ +class JsonSchemaObjectUnitTests { + + // ----------------- + // type from class + // ----------------- + + @Test // DATAMONGO-1849 + void primitiveType() { + + assertThat(JsonSchemaObject.of(boolean.class).getTypes()).containsExactly(Type.booleanType()); + assertThat(JsonSchemaObject.of(int.class).getTypes()).containsExactly(Type.intType()); + assertThat(JsonSchemaObject.of(long.class).getTypes()).containsExactly(Type.longType()); + assertThat(JsonSchemaObject.of(float.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(double.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(short.class).getTypes()).containsExactly(Type.numberType()); + } + + @Test // DATAMONGO-1849 + void objectType() { + + assertThat(JsonSchemaObject.of(Object.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Map.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Document.class).getTypes()).containsExactly(Type.objectType()); + } + + @Test // DATAMONGO-1849 + void binaryData() { + assertThat(JsonSchemaObject.of(byte[].class).getTypes()).containsExactly(Type.binaryType()); + } + + @Test // DATAMONGO-1849 + void collectionType() { + + assertThat(JsonSchemaObject.of(Object[].class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Collection.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(List.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Set.class).getTypes()).containsExactly(Type.arrayType()); + } + + @Test // DATAMONGO-1849 + void dateType() { + assertThat(JsonSchemaObject.of(Date.class).getTypes()).containsExactly(Type.dateType()); + } + + // ----------------- + // type : 'object' + // ----------------- + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderTypeCorrectly() { + + assertThat(object().generatedDescription().toDocument()) + .isEqualTo(new Document("type", "object").append("description", "Must be an object.")); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderNrPropertiesCorrectly() { + + assertThat(object().propertiesCount(from(inclusive(10)).to(inclusive(20))).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "object").append("description", "Must be an object with [10-20] properties.") + .append("minProperties", 10).append("maxProperties", 20)); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderRequiredPropertiesCorrectly() { + + assertThat(object().required("spring", "data", "mongodb").generatedDescription().toDocument()) + .isEqualTo(new Document("type", "object") + .append("description", "Must be an object where spring, data, mongodb are mandatory.") + .append("required", Arrays.asList("spring", "data", "mongodb"))); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderAdditionalPropertiesCorrectlyWhenBoolean() { + + assertThat(object().additionalProperties(true).generatedDescription().toDocument()).isEqualTo( + new Document("type", "object").append("description", "Must be an object allowing additional properties.") + .append("additionalProperties", true)); + + assertThat(object().additionalProperties(false).generatedDescription().toDocument()).isEqualTo( + new Document("type", "object").append("description", "Must be an object not allowing additional properties.") + .append("additionalProperties", false)); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderPropertiesCorrectly() { + + Document expected = new Document("type", "object") + .append("description", "Must be an object defining restrictions for name, active.").append("properties", + new Document("name", new Document("type", "string") + .append("description", "Must be a string with length unbounded-10].").append("maxLength", 10)) + .append("active", new Document("type", "boolean"))); + + assertThat(object().generatedDescription() + .properties(JsonSchemaProperty.string("name").maxLength(10).generatedDescription(), + JsonSchemaProperty.bool("active")) + .generatedDescription().toDocument()).isEqualTo(expected); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { + + Document expected = new Document("type", "object") + .append("description", "Must be an object defining restrictions for address.") + .append("properties", new Document("address", + new Document("type", "object").append("description", "Must be an object defining restrictions for city.") + .append("properties", new Document("city", new Document("type", "string") + .append("description", "Must be a string with length [3-unbounded.").append("minLength", 3))))); + + assertThat(object() + .properties(JsonSchemaProperty.object("address") + .properties(JsonSchemaProperty.string("city").minLength(3).generatedDescription()).generatedDescription()) + .generatedDescription().toDocument()).isEqualTo(expected); + } + + @Test // DATAMONGO-1835 + void objectObjectShouldRenderPatternPropertiesCorrectly() { + + Document expected = new Document("type", "object") + .append("description", "Must be an object defining restrictions for patterns na.*.") + .append("patternProperties", new Document("na.*", new Document("type", "string") + .append("description", "Must be a string with length unbounded-10].").append("maxLength", 10))); + + assertThat(object().patternProperties(JsonSchemaProperty.string("na.*").maxLength(10).generatedDescription()) + .generatedDescription().toDocument()).isEqualTo(expected); + } + + @Test // DATAMONGO-1849 + void objectShouldIncludeRequiredNestedCorrectly() { + + assertThat(object() // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).toDocument()) + .isEqualTo(new Document("type", "object").append("required", Collections.singletonList("lastname")) + .append("properties", new Document("lastname", new Document("type", "string")))); + } + + // ----------------- + // type : 'string' + // ----------------- + + @Test // DATAMONGO-1835 + void stringObjectShouldRenderTypeCorrectly() { + + assertThat(string().generatedDescription().toDocument()) + .isEqualTo(new Document("type", "string").append("description", "Must be a string.")); + } + + @Test // DATAMONGO-1835 + void stringObjectShouldRenderDescriptionCorrectly() { + + assertThat(string().description("error msg").toDocument()) + .isEqualTo(new Document("type", "string").append("description", "error msg")); + } + + @Test // DATAMONGO-1835 + void stringObjectShouldRenderRangeCorrectly() { + + assertThat(string().length(from(inclusive(10)).to(inclusive(20))).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "string").append("description", "Must be a string with length [10-20].") + .append("minLength", 10).append("maxLength", 20)); + } + + @Test // DATAMONGO-1835 + void stringObjectShouldRenderPatternCorrectly() { + + assertThat(string().matching("^spring$").generatedDescription().toDocument()) + .isEqualTo(new Document("type", "string").append("description", "Must be a string matching ^spring$.") + .append("pattern", "^spring$")); + } + + // ----------------- + // type : 'number' + // ----------------- + + @Test // DATAMONGO-1835 + void numberObjectShouldRenderMultipleOfCorrectly() { + + assertThat(number().multipleOf(3.141592F).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "number").append("description", "Must be a numeric value multiple of 3.141592.") + .append("multipleOf", 3.141592F)); + } + + @Test // DATAMONGO-1835 + void numberObjectShouldRenderMaximumCorrectly() { + + assertThat( + number().within(Range.of(Bound.unbounded(), Bound.inclusive(3.141592F))).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "number") + .append("description", "Must be a numeric value within range unbounded-3.141592].") + .append("maximum", 3.141592F)); + + assertThat( + number().within(Range.of(Bound.unbounded(), Bound.exclusive(3.141592F))).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "number") + .append("description", "Must be a numeric value within range unbounded-3.141592).") + .append("maximum", 3.141592F).append("exclusiveMaximum", true)); + } + + @Test // DATAMONGO-1835 + void numberObjectShouldRenderMinimumCorrectly() { + + assertThat( + number().within(Range.of(Bound.inclusive(3.141592F), Bound.unbounded())).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "number") + .append("description", "Must be a numeric value within range [3.141592-unbounded.") + .append("minimum", 3.141592F)); + + assertThat( + number().within(Range.of(Bound.exclusive(3.141592F), Bound.unbounded())).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "number") + .append("description", "Must be a numeric value within range (3.141592-unbounded.") + .append("minimum", 3.141592F).append("exclusiveMinimum", true)); + } + + // ----------------- + // type : 'arrays' + // ----------------- + + @Test // DATAMONGO-1835 + void arrayObjectShouldRenderItemsCorrectly() { + + assertThat(array().items(Arrays.asList(string(), bool())).toDocument()).isEqualTo(new Document("type", "array") + .append("items", Arrays.asList(new Document("type", "string"), new Document("type", "boolean")))); + } + + @Test // DATAMONGO-2613 + void arrayObjectShouldRenderItemsCorrectlyAsObjectIfContainsOnlyOneElement() { + + assertThat(array().items(Collections.singletonList(string())).toDocument()) + .isEqualTo(new Document("type", "array").append("items", new Document("type", "string"))); + } + + @Test // DATAMONGO-1835 + void arrayObjectShouldRenderMaxItemsCorrectly() { + + assertThat(array().maxItems(5).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") + .append("description", "Must be an array having size unbounded-5].").append("maxItems", 5)); + } + + @Test // DATAMONGO-1835 + void arrayObjectShouldRenderMinItemsCorrectly() { + + assertThat(array().minItems(5).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") + .append("description", "Must be an array having size [5-unbounded.").append("minItems", 5)); + } + + @Test // DATAMONGO-1835 + void arrayObjectShouldRenderUniqueItemsCorrectly() { + + assertThat(array().uniqueItems(true).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") + .append("description", "Must be an array of unique values.").append("uniqueItems", true)); + } + + @Test // DATAMONGO-1835 + void arrayObjectShouldRenderAdditionalItemsItemsCorrectly() { + + assertThat(array().additionalItems(true).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "array").append("description", "Must be an array with additional items.") + .append("additionalItems", true)); + assertThat(array().additionalItems(false).generatedDescription().toDocument()) + .isEqualTo(new Document("type", "array").append("description", "Must be an array with no additional items.") + .append("additionalItems", false)); + } + + // ----------------- + // type : 'boolean' + // ----------------- + + @Test // DATAMONGO-1835 + void booleanShouldRenderCorrectly() { + + assertThat(bool().generatedDescription().toDocument()) + .isEqualTo(new Document("type", "boolean").append("description", "Must be a boolean")); + } + + // ----------------- + // type : 'null' + // ----------------- + + @Test // DATAMONGO-1835 + void nullShouldRenderCorrectly() { + + assertThat(nil().generatedDescription().toDocument()) + .isEqualTo(new Document("type", "null").append("description", "Must be null")); + } + + // ----------------- + // type : 'date' + // ----------------- + + @Test // DATAMONGO-1877 + void dateShouldRenderCorrectly() { + + assertThat(date().generatedDescription().toDocument()) + .isEqualTo(new Document("bsonType", "date").append("description", "Must be a date")); + } + + // ----------------- + // type : 'timestamp' + // ----------------- + + @Test // DATAMONGO-1877 + void timestampShouldRenderCorrectly() { + + assertThat(timestamp().generatedDescription().toDocument()) + .isEqualTo(new Document("bsonType", "timestamp").append("description", "Must be a timestamp")); + } + + // ----------------- + // type : 'any' + // ----------------- + + @Test // DATAMONGO-1835 + void typedObjectShouldRenderEnumCorrectly() { + + assertThat(of(String.class).possibleValues(Arrays.asList("one", "two")).toDocument()) + .isEqualTo(new Document("type", "string").append("enum", Arrays.asList("one", "two"))); + } + + @Test // DATAMONGO-1835 + void typedObjectShouldRenderAllOfCorrectly() { + + assertThat(of(Object.class).allOf(Arrays.asList(string())).toDocument()) + .isEqualTo(new Document("type", "object").append("allOf", Arrays.asList(new Document("type", "string")))); + } + + @Test // DATAMONGO-1835 + void typedObjectShouldRenderAnyOfCorrectly() { + + assertThat(of(String.class).anyOf(Arrays.asList(string())).toDocument()) + .isEqualTo(new Document("type", "string").append("anyOf", Arrays.asList(new Document("type", "string")))); + } + + @Test // DATAMONGO-1835 + void typedObjectShouldRenderOneOfCorrectly() { + + assertThat(of(String.class).oneOf(Arrays.asList(string())).toDocument()) + .isEqualTo(new Document("type", "string").append("oneOf", Arrays.asList(new Document("type", "string")))); + } + + @Test // DATAMONGO-1835 + void typedObjectShouldRenderNotCorrectly() { + + assertThat(untyped().notMatch(string()).toDocument()) + .isEqualTo(new Document("not", new Document("type", "string"))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java new file mode 100644 index 0000000000..c9f6934d9b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; + +/** + * Unit tests for {@link JsonSchemaProperty}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +public class JsonSchemaPropertyUnitTests { + + @Test // DATAMONGO-1835 + public void shouldRenderInt32Correctly() { + assertThat(JsonSchemaProperty.int32("foo").toDocument()).containsEntry("foo.bsonType", "int"); + } + + @Test // DATAMONGO-1835 + public void shouldRenderInt64Correctly() { + assertThat(JsonSchemaProperty.int64("foo").toDocument()).containsEntry("foo.bsonType", "long"); + } + + @Test // DATAMONGO-1835 + public void shouldRenderDecimal128Correctly() { + assertThat(JsonSchemaProperty.decimal128("foo").toDocument()).containsEntry("foo.bsonType", "decimal"); + } + + @Test // DATAMONGO-1835 + public void shouldRenderNullCorrectly() { + assertThat(JsonSchemaProperty.nil("foo").toDocument()).containsEntry("foo.type", "null"); + } + + @Test // DATAMONGO-1835 + public void shouldRenderUntypedCorrectly() { + assertThat(JsonSchemaProperty.named("foo").ofType(Type.binaryType()).toDocument()).containsEntry("foo.bsonType", + "binData"); + } + + @Test // DATAMONGO-1877 + public void shouldRenderDateCorrectly() { + assertThat(JsonSchemaProperty.date("foo").toDocument()).containsEntry("foo.bsonType", "date"); + } + + @Test // DATAMONGO-1877 + public void shouldRenderTimestampCorrectly() { + assertThat(JsonSchemaProperty.timestamp("foo").toDocument()).containsEntry("foo.bsonType", "timestamp"); + } + + @Test // DATAMONGO-2282 + public void objectIdShouldBeRenderedCorrectly() { + assertThat(JsonSchemaProperty.objectId("_id").toDocument()).containsEntry("_id.bsonType", "objectId"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java new file mode 100644 index 0000000000..a7cf75366f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java @@ -0,0 +1,226 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.ValidationAction; +import com.mongodb.client.model.ValidationLevel; +import com.mongodb.client.model.ValidationOptions; + +/** + * Integration tests for {@link MongoJsonSchema}. + * + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration +public class MongoJsonSchemaTests { + + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "json-schema-tests"; + } + + } + + @Autowired MongoTemplate template; + + @BeforeEach + public void setUp() { + + template.dropCollection(Person.class); + } + + @Test // DATAMONGO-1835 + public void writeSchemaViaTemplate() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .properties( // + JsonSchemaProperty.string("firstname").possibleValues("luke", "han").maxLength(10), // + JsonSchemaProperty.object("address") // + .properties(JsonSchemaProperty.string("postCode").minLength(4).maxLength(5)) + + ).build(); + + template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); + + Document $jsonSchema = new MongoJsonSchemaMapper(template.getConverter()).mapSchema(schema.toDocument(), + Person.class); + + Document fromDb = readSchemaFromDatabase("persons"); + assertThat(fromDb).isEqualTo($jsonSchema); + } + + @Test // DATAMONGO-1835 + public void writeSchemaInDocumentValidatorCorrectly() { + + Document unmappedSchema = new Document("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("firstname"))); + + Document mappedSchema = new Document("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("first_name"))); + + template.createCollection(Person.class, CollectionOptions.empty().validator(Validator.document(unmappedSchema))); + + assertThat(readSchemaFromDatabase("persons")).isEqualTo(mappedSchema); + } + + @Test // DATAMONGO-1835 + public void nonMappedSchema() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .properties( // + JsonSchemaProperty.string("firstname").possibleValues("luke", "han").maxLength(10), // + JsonSchemaProperty.object("address") // + .properties(JsonSchemaProperty.string("postCode").minLength(4).maxLength(5)) + + ).build(); + + template.createCollection("persons", CollectionOptions.empty().schema(schema)); + + Document fromDb = readSchemaFromDatabase("persons"); + assertThat(fromDb) + .isNotEqualTo(new MongoJsonSchemaMapper(template.getConverter()).mapSchema(schema.toDocument(), Person.class)); + } + + @Test // DATAMONGO-1835 + public void writeSchemaManually() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .properties( // + JsonSchemaProperty.string("firstname").possibleValues("luke", "han").maxLength(10), // + JsonSchemaProperty.object("address") // + .properties(JsonSchemaProperty.string("postCode").minLength(4).maxLength(5)) + + ).build(); + + Document $jsonSchema = new MongoJsonSchemaMapper(template.getConverter()).mapSchema(schema.toDocument(), + Person.class); + + ValidationOptions options = new ValidationOptions(); + options.validationLevel(ValidationLevel.MODERATE); + options.validationAction(ValidationAction.ERROR); + options.validator($jsonSchema); + + CreateCollectionOptions cco = new CreateCollectionOptions(); + cco.validationOptions(options); + + MongoDatabase db = template.getDb(); + db.createCollection("persons", cco); + + Document fromDb = readSchemaFromDatabase("persons"); + assertThat(fromDb).isEqualTo($jsonSchema); + } + + Document readSchemaFromDatabase(String collectionName) { + + Document collectionInfo = template + .executeCommand(new Document("listCollections", 1).append("filter", new Document("name", collectionName))); + + if (collectionInfo.containsKey("cursor")) { + collectionInfo = (Document) collectionInfo.get("cursor", Document.class).get("firstBatch", List.class).iterator() + .next(); + } + + if (!collectionInfo.containsKey("options")) { + return new Document(); + } + + return collectionInfo.get("options", Document.class).get("validator", Document.class); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "persons") + static class Person { + + @Field("first_name") String firstname; + String lastname; + Address address; + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Address getAddress() { + return this.address; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAddress(Address address) { + this.address = address; + } + + public String toString() { + return "MongoJsonSchemaTests.Person(firstname=" + this.getFirstname() + ", lastname=" + this.getLastname() + + ", address=" + this.getAddress() + ")"; + } + } + + static class Address { + + String city; + String street; + + @Field("post_code") String postCode; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java new file mode 100644 index 0000000000..1691305617 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java @@ -0,0 +1,151 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.encrypted; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +/** + * Unit tests for {@link MongoJsonSchema}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class MongoJsonSchemaUnitTests { + + @Test // DATAMONGO-1835 + void toDocumentRendersSchemaCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")))); + } + + @Test // DATAMONGO-1835 + void rendersDocumentBasedSchemaCorrectly() { + + Document document = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .build().toDocument(); + + MongoJsonSchema jsonSchema = MongoJsonSchema.of(document.get("$jsonSchema", Document.class)); + + assertThat(jsonSchema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")))); + } + + @Test // DATAMONGO-1849 + void rendersRequiredPropertiesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname") // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")).append("properties", + new Document("lastname", new Document("type", "string"))))); + } + + @Test // DATAMONGO-2306 + void rendersEncryptedPropertyCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + encrypted(string("ssn")) // + .aead_aes_256_cbc_hmac_sha_512_deterministic() // + .keyId("*key0_id") // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("properties", + new Document("ssn", new Document("encrypt", new Document("keyId", "*key0_id") + .append("algorithm", "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic").append("bsonType", "string")))))); + } + + @Test // DATAMONGO-2306 + void rendersEncryptedPropertyWithKeyIdCorrectly() { + + UUID uuid = UUID.randomUUID(); + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + encrypted(string("ssn")) // + .aead_aes_256_cbc_hmac_sha_512_deterministic() // + .keys(uuid) // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("properties", + new Document("ssn", new Document("encrypt", new Document("keyId", Collections.singletonList(uuid)) + .append("algorithm", "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic").append("bsonType", "string")))))); + } + + @Test // GH-4185 + void rendersQueryablePropertyCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + queryable(rangeEncrypted(number("ssn")), + List.of(QueryCharacteristics.range().contention(0).trimFactor(1).sparsity(1).min(0).max(200)))) + .build(); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(""" + { + "type": "object", + "properties": { + "ssn": { + "encrypt": { + "bsonType": "long", + "algorithm": "Range", + "queries": [{ + "queryType": "range", + "contention": {$numberLong: "0"}, + "trimFactor": 1, + "sparsity": {$numberLong: "1"}, + "min": 0, + "max": 200 + }] + } + } + } + } + """); + } + + @Test // DATAMONGO-1835 + void throwsExceptionOnNullRoot() { + assertThatIllegalArgumentException().isThrownBy(() -> MongoJsonSchema.of((JsonSchemaObject) null)); + } + + @Test // DATAMONGO-1835 + void throwsExceptionOnNullDocument() { + assertThatIllegalArgumentException().isThrownBy(() -> MongoJsonSchema.of((Document) null)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java new file mode 100644 index 0000000000..4615568d10 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java @@ -0,0 +1,171 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.DataRetrievalFailureException; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link MongoJsonSchema} using reactive infrastructure. + * + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration +public class ReactiveMongoJsonSchemaTests { + + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "json-schema-tests"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } + } + + @Autowired ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + template.dropCollection(Person.class).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1835 + public void writeSchemaViaTemplate() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname", "lastname") // + .properties( // + JsonSchemaProperty.string("firstname").possibleValues("luke", "han").maxLength(10), // + JsonSchemaProperty.object("address") // + .properties(JsonSchemaProperty.string("postCode").minLength(4).maxLength(5)) + + ).build(); + + template.createCollection(Person.class, CollectionOptions.empty().schema(schema)).as(StepVerifier::create) + .expectNextCount(1).verifyComplete(); + + Document $jsonSchema = new MongoJsonSchemaMapper(template.getConverter()).mapSchema(schema.toDocument(), + Person.class); + + Document fromDb = readSchemaFromDatabase("persons"); + assertThat(fromDb).isEqualTo($jsonSchema); + } + + Document readSchemaFromDatabase(String collectionName) { + + Document collectionInfo = template + .executeCommand(new Document("listCollections", 1).append("filter", new Document("name", collectionName))) + .block(Duration.ofSeconds(5)); + + if (collectionInfo == null) { + throw new DataRetrievalFailureException(String.format("Collection %s was not found.", collectionName)); + } + + if (collectionInfo.containsKey("cursor")) { + collectionInfo = (Document) collectionInfo.get("cursor", Document.class).get("firstBatch", List.class).iterator() + .next(); + } + + if (!collectionInfo.containsKey("options")) { + return new Document(); + } + + return collectionInfo.get("options", Document.class).get("validator", Document.class); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "persons") + static class Person { + + @Field("first_name") String firstname; + String lastname; + Address address; + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Address getAddress() { + return this.address; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAddress(Address address) { + this.address = address; + } + + public String toString() { + return "ReactiveMongoJsonSchemaTests.Person(firstname=" + this.getFirstname() + ", lastname=" + this.getLastname() + + ", address=" + this.getAddress() + ")"; + } + } + + static class Address { + + String city; + String street; + + @Field("post_code") String postCode; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java new file mode 100644 index 0000000000..6db26a1250 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java @@ -0,0 +1,138 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; + +/** + * Unit tests for {@link TypeUnifyingMergeFunction}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +public class TypeUnifyingMergeFunctionUnitTests { + + @Mock ConflictResolutionFunction crf; + + TypeUnifyingMergeFunction mergeFunction; + + @BeforeEach + void beforeEach() { + mergeFunction = new TypeUnifyingMergeFunction(crf); + } + + @Test // GH-3870 + void nonOverlapping() { + + Map a = new LinkedHashMap<>(); + a.put("a", "a-value"); + Map b = new LinkedHashMap<>(); + b.put("b", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("a", "a-value").containsEntry("b", "b-value"); + } + + @Test // GH-3870 + void resolvesNonConflictingTypeKeys/* type vs bsonType */() { + + Map a = new LinkedHashMap<>(); + a.put("type", "string"); + Map b = new LinkedHashMap<>(); + b.put("bsonType", "string"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("type", "string").doesNotContainKey("bsonType"); + } + + @Test // GH-3870 + void nonOverlappingNestedMap() { + + Map a = new LinkedHashMap<>(); + a.put("a", Collections.singletonMap("nested", "value")); + Map b = new LinkedHashMap<>(); + b.put("b", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("a", Collections.singletonMap("nested", "value")).containsEntry("b", "b-value"); + } + + @Test // GH-3870 + void nonOverlappingNestedMaps() { + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + Map b = new LinkedHashMap<>(); + b.put("nested", Collections.singletonMap("b", "b-value")); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("nested.a", "a-value").containsEntry("nested.b", "b-value"); + } + + @Test // GH-3870 + void delegatesConflictToResolutionFunction() { + + ArgumentCaptor aValueCaptor = ArgumentCaptor.forClass(Object.class); + ArgumentCaptor bValueCaptor = ArgumentCaptor.forClass(Object.class); + + when(crf.resolveConflict(any(), aValueCaptor.capture(), bValueCaptor.capture())).thenReturn(Resolution.ofValue("nested", "from-function")); + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + Map b = new LinkedHashMap<>(); + b.put("nested", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("nested", "from-function") // + .doesNotContainKey("nested.a"); + + assertThat(aValueCaptor.getValue()).isEqualTo(a); + assertThat(bValueCaptor.getValue()).isEqualTo(b); + } + + @Test // GH-3870 + void skipsConflictItemsWhenAdvised() { + + ArgumentCaptor aValueCaptor = ArgumentCaptor.forClass(Object.class); + ArgumentCaptor bValueCaptor = ArgumentCaptor.forClass(Object.class); + + when(crf.resolveConflict(any(), aValueCaptor.capture(), bValueCaptor.capture())).thenReturn(Resolution.SKIP); + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + a.put("some", "value"); + Map b = new LinkedHashMap<>(); + b.put("nested", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).hasSize(1).containsEntry("some", "value"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java index a8fda1fad1..c2f31b829f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,67 +15,34 @@ */ package org.springframework.data.mongodb.core.script; -import static org.hamcrest.core.IsEqual.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.util.ObjectUtils; +import org.junit.jupiter.api.Test; /** * @author Christoph Strobl */ -public class ExecutableMongoScriptUnitTests { +class ExecutableMongoScriptUnitTests { - public @Rule ExpectedException expectedException = ExpectedException.none(); - - /** - * @see DATAMONGO-479 - */ - @Test - public void constructorShouldThrowExceptionWhenRawScriptIsNull() { - - expectException(IllegalArgumentException.class, "must not be", "null"); - - new ExecutableMongoScript(null); + @Test // DATAMONGO-479 + void constructorShouldThrowExceptionWhenRawScriptIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMongoScript(null)) + .withMessageContaining("must not be").withMessageContaining("null"); } - /** - * @see DATAMONGO-479 - */ - @Test - public void constructorShouldThrowExceptionWhenRawScriptIsEmpty() { - - expectException(IllegalArgumentException.class, "must not be", "empty"); - - new ExecutableMongoScript(""); + @Test // DATAMONGO-479 + void constructorShouldThrowExceptionWhenRawScriptIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMongoScript("")) + .withMessageContaining("must not be").withMessageContaining("empty"); } - /** - * @see DATAMONGO-479 - */ - @Test - public void getCodeShouldReturnCodeRepresentationOfRawScript() { + @Test // DATAMONGO-479 + void getCodeShouldReturnCodeRepresentationOfRawScript() { String jsFunction = "function(x) { return x; }"; ExecutableMongoScript script = new ExecutableMongoScript(jsFunction); - assertThat(script.getCode(), notNullValue()); - assertThat(script.getCode().toString(), equalTo(jsFunction)); + assertThat(script.getCode()).isNotNull().hasToString(jsFunction); } - - private void expectException(Class type, String... messageFragments) { - - expectedException.expect(IllegalArgumentException.class); - - if (!ObjectUtils.isEmpty(messageFragments)) { - for (String fragment : messageFragments) { - expectedException.expectMessage(fragment); - } - } - } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java index 32803881dc..3eca8e90b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,39 @@ */ package org.springframework.data.mongodb.core.script; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link NamedMongoScript}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 */ public class NamedMongoScriptUnitTests { - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenScriptNameIsNull() { - new NamedMongoScript(null, "return 1;"); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript(null, "return 1;")); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenScriptNameIsEmptyString() { - new NamedMongoScript("", "return 1"); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript("", "return 1")); } - /** - * @see DATAMONGO-479 - */ - @Test(expected = IllegalArgumentException.class) + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenRawScriptIsEmptyString() { - new NamedMongoScript("foo", ""); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript("foo", "")); } - /** - * @see DATAMONGO-479 - */ - @Test + @Test // DATAMONGO-479 public void getCodeShouldReturnCodeRepresentationOfRawScript() { String jsFunction = "function(x) { return x; }"; - assertThat(new NamedMongoScript("echo", jsFunction).getCode(), is(jsFunction)); + assertThat(new NamedMongoScript("echo", jsFunction).getCode()).isEqualTo(jsFunction); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java index 0f6d4b9b09..b9b2f18c73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,17 @@ */ package org.springframework.data.mongodb.core.spel; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collection; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.expression.spel.ExpressionState; import org.springframework.expression.spel.SpelNode; import org.springframework.expression.spel.ast.OpDivide; @@ -35,12 +35,11 @@ /** * Unit tests for {@link ExpressionNode}. - * - * @see DATAMONGO-774 + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class ExpressionNodeUnitTests { +@ExtendWith(MockitoExtension.class) +class ExpressionNodeUnitTests { @Mock ExpressionState state; @@ -49,18 +48,18 @@ public class ExpressionNodeUnitTests { @Mock OpDivide divide; @Mock OpMultiply multiply; - Collection operators; + private Collection operators; - @Before - public void setUp() { + @BeforeEach + void setUp() { this.operators = Arrays.asList(minus, plus, divide, multiply); } - @Test - public void createsOperatorNodeForOperations() { + @Test // DATAMONGO-774 + void createsOperatorNodeForOperations() { for (SpelNode operator : operators) { - assertThat(ExpressionNode.from(operator, state), is(instanceOf(OperatorNode.class))); + assertThat(ExpressionNode.from(operator, state)).isInstanceOf(OperatorNode.class); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/temp/QueryByExampleTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/temp/QueryByExampleTests.java deleted file mode 100644 index 57a92e08c0..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/temp/QueryByExampleTests.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright 2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.temp; - -import java.net.UnknownHostException; -import java.util.List; - -import org.hamcrest.core.Is; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.springframework.data.annotation.Id; -import org.springframework.data.domain.Example; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.mongodb.core.query.Criteria; -import org.springframework.data.mongodb.core.query.Query; - -import com.mongodb.MongoClient; - -/** - * @author Christoph Strobl - * @author Mark Paluch - */ -public class QueryByExampleTests { - - MongoTemplate template; - - @Before - public void setUp() throws UnknownHostException { - - template = new MongoTemplate(new MongoClient(), "query-by-example"); - template.remove(new Query(), Person.class); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldWorkForSimpleProperty() { - - init(); - - Person sample = new Person(); - sample.lastname = "stark"; - - Query query = new Query(new Criteria().alike(Example.of(sample))); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(2)); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldWorkForMultipleProperties() { - - init(); - - Person sample = new Person(); - sample.lastname = "stark"; - sample.firstname = "arya"; - - Query query = new Query(new Criteria().alike(Example.of(sample))); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(1)); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldWorkForIdProperty() { - - init(); - - Person p4 = new Person(); - template.save(p4); - - Person sample = new Person(); - sample.id = p4.id; - - Query query = new Query(new Criteria().alike(Example.of(sample))); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(1)); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldReturnEmptyListIfNotMatching() { - - init(); - - Person sample = new Person(); - sample.firstname = "jon"; - sample.firstname = "stark"; - - - Query query = new Query(new Criteria().alike(Example.of(sample))); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(0)); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldReturnEverythingWhenSampleIsEmpty() { - - init(); - - Person sample = new Person(); - - Query query = new Query(new Criteria().alike(Example.of(sample))); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(3)); - } - - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleWithCriteria() { - - init(); - - Person sample = new Person(); - sample.lastname = "stark"; - - Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex("^ary*")); - - List result = template.find(query, Person.class); - Assert.assertThat(result.size(), Is.is(1)); - } - - public void init() { - - Person p1 = new Person(); - p1.firstname = "bran"; - p1.lastname = "stark"; - - Person p2 = new Person(); - p2.firstname = "jon"; - p2.lastname = "snow"; - - Person p3 = new Person(); - p3.firstname = "arya"; - p3.lastname = "stark"; - - template.save(p1); - template.save(p2); - template.save(p3); - } - - @Document(collection = "dramatis-personae") - static class Person { - - @Id String id; - String firstname; - - @Field("last_name") String lastname; - - @Override - public String toString() { - return "Person [id=" + id + ", firstname=" + firstname + ", lastname=" + lastname + "]"; - } - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java new file mode 100644 index 0000000000..f044646037 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.validation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link CriteriaValidator}. + * + * @author Andreas Zink + * @author Christoph Strobl + */ +public class CriteriaValidatorUnitTests { + + @Test // DATAMONGO-1322 + public void testSimpleCriteria() { + + Criteria criteria = Criteria.where("nonNullString").ne(null).type(2).and("rangedInteger").type(16).gte(0).lte(122); + Document validator = CriteriaValidator.of(criteria).toDocument(); + + assertThat(validator.get("nonNullString")).isEqualTo(new Document("$ne", null).append("$type", 2)); + assertThat(validator.get("rangedInteger")) + .isEqualTo(new Document("$type", 16).append("$gte", 0).append("$lte", 122)); + } + + @Test // DATAMONGO-1322 + public void testFailOnNull() { + assertThatIllegalArgumentException().isThrownBy(() -> CriteriaValidator.of(null)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java new file mode 100644 index 0000000000..c68dc5018f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; + +import com.mongodb.client.MongoClients; + +public class MongoApplication { + + public static void main(String[] args) throws Exception { + + MongoOperations mongoOps = new MongoTemplate(MongoClients.create(), "database"); + mongoOps.insert(new Person("Joe", 34)); + + System.out.println(mongoOps.query(Person.class).matching(where("name").is("Joe")).firstValue()); + + mongoOps.dropCollection("person"); + } +} +//end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java new file mode 100644 index 0000000000..8c1e9e4d81 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +// tag::class[] +public class Person { + + private String id; + private String name; + private int age; + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public int getAge() { + return age; + } + + @Override + public String toString() { + return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; + } +} +// end::class[] +// end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java new file mode 100644 index 0000000000..8022f45ab7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +import com.mongodb.reactivestreams.client.MongoClients; + +public class ReactiveMongoApplication { + + public static void main(String[] args) throws Exception { + + ReactiveMongoOperations mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + mongoOps.insert(new Person("Joe", 34)) + .then(mongoOps.query(Person.class).matching(where("name").is("Joe")).first()) + .doOnNext(System.out::println) + .block(); + + mongoOps.dropCollection("person").block(); + } +} +// end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java index 942d6821ed..516a1890bb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,15 @@ */ package org.springframework.data.mongodb.gridfs; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.regex.Pattern; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link AntPath}. - * + * * @author Oliver Gierke */ public class AntPathUnitTests { @@ -35,8 +34,8 @@ public void buildRegexCorrectly() { AntPath path = new AntPath("**/foo/*-bar.xml"); String regex = path.toRegex(); - assertThat(Pattern.matches(regex, "foo/bar/foo/foo-bar.xml"), is(true)); - assertThat(Pattern.matches(regex, "foo/bar/foo/bar/foo-bar.xml"), is(false)); - assertThat(regex, is(".*\\Q/foo/\\E[^/]*\\Q-bar.xml\\E")); + assertThat(Pattern.matches(regex, "foo/bar/foo/foo-bar.xml")).isTrue(); + assertThat(Pattern.matches(regex, "foo/bar/foo/bar/foo-bar.xml")).isFalse(); + assertThat(regex).isEqualTo(".*\\Q/foo/\\E[^/]*\\Q-bar.xml\\E"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java new file mode 100644 index 0000000000..708ec4967c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; + +import java.io.FileNotFoundException; +import java.util.Date; + +import org.bson.BsonObjectId; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import com.mongodb.MongoGridFSException; +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Unit tests for {@link GridFsResource}. + * + * @author Mark Paluch + * @auhtor Christoph Strobl + */ +public class GridFsResourceUnitTests { + + @Test // DATAMONGO-1850 + public void shouldReadContentTypeCorrectly() { + + Document metadata = new Document(GridFsResource.CONTENT_TYPE_FIELD, "text/plain"); + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), metadata); + GridFsResource resource = new GridFsResource(file); + + assertThat(resource.getContentType()).isEqualTo("text/plain"); + } + + @Test // DATAMONGO-2240 + public void shouldReturnGridFSFile() { + + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), new Document()); + GridFsResource resource = new GridFsResource(file); + + assertThat(resource.getGridFSFile()).isSameAs(file); + } + + @Test // DATAMONGO-1850 + public void shouldThrowExceptionOnEmptyContentType() { + + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), null); + GridFsResource resource = new GridFsResource(file); + + assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class); + } + + @Test // DATAMONGO-1850 + public void shouldThrowExceptionOnEmptyContentTypeInMetadata() { + + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), new Document()); + GridFsResource resource = new GridFsResource(file); + + assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class); + } + + @Test // DATAMONGO-1914 + public void gettersThrowExceptionForAbsentResource() { + + GridFsResource absent = GridFsResource.absent("foo"); + + assertThat(absent.exists()).isFalse(); + assertThat(absent.getDescription()).contains("GridFs resource [foo]"); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(absent::getContentType); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(absent::getId); + + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::contentLength); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getInputStream); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::lastModified); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getURI); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getURL); + } + + @Test // DATAMONGO-1914 + public void shouldReturnFilenameForAbsentResource() { + + GridFsResource absent = GridFsResource.absent("foo"); + + assertThat(absent.exists()).isFalse(); + assertThat(absent.getDescription()).contains("GridFs resource [foo]"); + assertThat(absent.getFilename()).isEqualTo("foo"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java index 6803069c6e..f6c96e44a4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,218 +15,322 @@ */ package org.springframework.data.mongodb.gridfs; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; +import java.io.ByteArrayInputStream; import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.List; - +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Stream; + +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.types.ObjectId; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.query.Query; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.StreamUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.gridfs.GridFSDBFile; -import com.mongodb.gridfs.GridFSFile; +import com.mongodb.MongoGridFSException; +import com.mongodb.client.gridfs.GridFSFindIterable; +import com.mongodb.client.gridfs.model.GridFSFile; /** * Integration tests for {@link GridFsTemplate}. - * + * * @author Oliver Gierke * @author Philipp Schneider * @author Thomas Darimont * @author Martin Baumgartner + * @author Hartmut Lang + * @author Mark Paluch + * @author Denis Zavedeev */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:gridfs/gridfs.xml") public class GridFsTemplateIntegrationTests { Resource resource = new ClassPathResource("gridfs/gridfs.xml"); @Autowired GridFsOperations operations; + @Autowired SimpleMongoClientDatabaseFactory mongoClient; @Before public void setUp() { - operations.delete(null); + operations.delete(new Query()); } - /** - * @see DATAMONGO-6 - */ - @Test + @Test // DATAMONGO-6 public void storesAndFindsSimpleDocument() throws IOException { - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); - List result = operations.find(null); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(where("_id").is(reference))); + result.into(files); + assertThat(files).hasSize(1); + assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } - /** - * @see DATAMONGO-6 - */ - @Test + // @Test // DATAMONGO-2392 + // public void storesAndFindsByUUID() throws IOException { + // + // UUID uuid = UUID.randomUUID(); + // + // GridFSFile fs = new GridFSFile(new BsonObjectId(new ObjectId(uuid.to)) + // GridFSInputFile in = fs.createFile(resource.getInputStream(), "gridfs.xml"); + // + // in.put("_id", uuid); + // in.put("contentType", "application/octet-stream"); + // in.save(); + // + // GridFSFile file = operations.findOne(query(where("_id").is(uuid))); + // GridFsResource resource = operations.getResource(file); + // + // assertThat(resource.exists()).isTrue(); + // } + + @Test // DATAMONGO-6 public void writesMetadataCorrectly() throws IOException { - DBObject metadata = new BasicDBObject("key", "value"); - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml", metadata); + Document metadata = new Document("key", "value"); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml", metadata); + + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value"))); + result.into(files); - List result = operations.find(query(whereMetaData("key").is("value"))); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + assertThat(files).hasSize(1); + assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } - /** - * @see DATAMONGO-6 - */ - @Test + @Test // DATAMONGO-6 public void marshalsComplexMetadata() throws IOException { Metadata metadata = new Metadata(); metadata.version = "1.0"; - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml", metadata); - List result = operations.find(query(whereFilename().is("foo.xml"))); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml", metadata); + + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(whereFilename().is("foo.xml"))); + result.into(files); + + assertThat(files).hasSize(1); + assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } - /** - * @see DATAMONGO-6 - */ - @Test + @Test // DATAMONGO-6 public void findsFilesByResourcePattern() throws IOException { - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); GridFsResource[] resources = operations.getResources("*.xml"); - assertThat(resources.length, is(1)); - assertThat(resources[0].getId(), is(reference.getId())); - assertThat(resources[0].contentLength(), is(reference.getLength())); - assertThat(resources[0].getContentType(), is(reference.getContentType())); + + assertThat(resources).hasSize(1); + assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference); + assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength()); } - /** - * @see DATAMONGO-6 - */ - @Test + @Test // DATAMONGO-6 public void findsFilesByResourceLocation() throws IOException { - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); GridFsResource[] resources = operations.getResources("foo.xml"); - assertThat(resources.length, is(1)); - assertThat(resources[0].getId(), is(reference.getId())); - assertThat(resources[0].contentLength(), is(reference.getLength())); - assertThat(resources[0].getContentType(), is(reference.getContentType())); + assertThat(resources).hasSize(1); + assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference); + assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength()); } - /** - * @see DATAMONGO-503 - */ - @Test + @Test // DATAMONGO-503 public void storesContentType() throws IOException { - GridFSFile reference = operations.store(resource.getInputStream(), "foo2.xml", "application/xml"); + ObjectId reference = operations.store(resource.getInputStream(), "foo2.xml", "application/xml"); - List result = operations.find(query(whereContentType().is("application/xml"))); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(whereContentType().is("application/xml"))); + result.into(files); + + assertThat(files).hasSize(1); + assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } - /** - * @see DATAMONGO-534 - */ - @Test + @Test // DATAMONGO-534 public void considersSortWhenQueryingFiles() throws IOException { - GridFSFile second = operations.store(resource.getInputStream(), "foo.xml"); - GridFSFile third = operations.store(resource.getInputStream(), "foobar.xml"); - GridFSFile first = operations.store(resource.getInputStream(), "bar.xml"); + ObjectId second = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId third = operations.store(resource.getInputStream(), "foobar.xml"); + ObjectId first = operations.store(resource.getInputStream(), "bar.xml"); + + Query query = new Query().with(Sort.by(Direction.ASC, "filename")); - Query query = new Query().with(new Sort(Direction.ASC, "filename")); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query); + result.into(files); - List result = operations.find(query); - assertThat(result, hasSize(3)); - assertSame(result.get(0), first); - assertSame(result.get(1), second); - assertSame(result.get(2), third); + assertThat(files).hasSize(3).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(first, second, + third); } - /** - * @see DATAMONGO-534 - */ - @Test - public void queryingWithNullQueryReturnsAllFiles() throws IOException { + @Test // DATAMONGO-534, DATAMONGO-1762 + public void queryingWithEmptyQueryReturnsAllFiles() throws IOException { - GridFSFile reference = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); - List result = operations.find(null); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(new Query()); + result.into(files); - assertThat(result, hasSize(1)); - assertSame(result.get(0), reference); + assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference); } - /** - * @see DATAMONGO-813 - */ - @Test - public void getResourceShouldReturnNullForNonExistingResource() { - assertThat(operations.getResource("doesnotexist"), is(nullValue())); + @Test // DATAMONGO-1762 + public void queryingWithNullQueryThrowsException() { + assertThatIllegalArgumentException().isThrownBy(() -> operations.find(null)); } - /** - * @see DATAMONGO-809 - */ - @Test - public void storesAndFindsSimpleDocumentWithMetadataDBObject() throws IOException { + @Test // DATAMONGO-813, DATAMONGO-1914 + public void getResourceShouldReturnAbsentResourceForNonExistingResource() { + assertThat(operations.getResource("doesnotexist")).isEqualTo(GridFsResource.absent("doesnotexist")); + } - DBObject metadata = new BasicDBObject("key", "value"); - GridFSFile reference = operations.store(resource.getInputStream(), metadata); + @Test // DATAMONGO-809 + public void storesAndFindsSimpleDocumentWithMetadataDocument() throws IOException { - List result = operations.find(query(whereMetaData("key").is("value"))); + Document metadata = new Document("key", "value"); + ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value"))); + result.into(files); + + assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference); } - /** - * @see DATAMONGO-809 - */ - @Test + @Test // DATAMONGO-809 public void storesAndFindsSimpleDocumentWithMetadataObject() throws IOException { Metadata metadata = new Metadata(); metadata.version = "1.0"; - GridFSFile reference = operations.store(resource.getInputStream(), metadata); + ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); - List result = operations.find(query(whereMetaData("version").is("1.0"))); + List files = new ArrayList<>(); + GridFSFindIterable result = operations.find(query(whereMetaData("version").is("1.0"))); + result.into(files); - assertThat(result.size(), is(1)); - assertSame(result.get(0), reference); + assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference); } - private static void assertSame(GridFSFile left, GridFSFile right) { + @Test // DATAMONGO-1695 + public void readsContentTypeCorrectly() throws IOException { + + operations.store(resource.getInputStream(), "someName", "contentType"); - assertThat(left.getId(), is(right.getId())); - assertThat(left.getMD5(), is(right.getMD5())); - assertThat(left.getMetaData(), is(right.getMetaData())); + assertThat(operations.getResource("someName").getContentType()).isEqualTo("contentType"); } - class Metadata { + @Test // DATAMONGO-1850 + public void failsOnNonExistingContentTypeRetrieval() throws IOException { + + operations.store(resource.getInputStream(), "no-content-type", (String) null); + GridFsResource result = operations.getResource("no-content-type"); + + assertThatThrownBy(result::getContentType).isInstanceOf(MongoGridFSException.class); + } + + @Test // DATAMONGO-1813 + public void convertFileToResource() throws IOException { + + Document metadata = new Document("key", "value"); + ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); + + GridFSFile file = operations.findOne(query(whereMetaData("key").is("value"))); + GridFsResource result = operations.getResource(file); + + assertThat(result.contentLength()).isEqualTo(resource.contentLength()); + assertThat(((BsonObjectId) result.getId()).getValue()).isEqualTo(reference); + } + + @Test // DATAMONGO-2021 + public void getResourceShouldRetrieveContentByIdentity() throws IOException { + ClassPathResource secondResource = new ClassPathResource("gridfs/another-resource.xml"); + + ObjectId reference1 = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference2 = operations.store(secondResource.getInputStream(), "foo.xml"); + + Map fixture = new LinkedHashMap<>(); + fixture.put(reference1, resource); + fixture.put(reference2, secondResource); + + for (Entry entry : fixture.entrySet()) { + + GridFsResource fsFile = operations.getResource(operations.findOne(query(where("_id").is(entry.getKey())))); + byte[] content = StreamUtils.copyToByteArray(fsFile.getInputStream()); + + assertThat(content).isEqualTo(StreamUtils.copyToByteArray(entry.getValue().getInputStream())); + } + } + + @Test // DATAMONGO-625 + public void storeSavesGridFsUploadWithGivenIdCorrectly() throws IOException { + + String id = "id-1"; + + GridFsUpload upload = GridFsUpload.fromStream(resource.getInputStream()) // + .id(id) // + .filename("gridFsUpload.xml") // + .contentType("xml") // + .build(); + + assertThat(operations.store(upload)).isEqualTo(id); + + GridFsResource fsFile = operations.getResource(operations.findOne(query(where("_id").is(id)))); + byte[] content = StreamUtils.copyToByteArray(fsFile.getInputStream()); + + assertThat(content).isEqualTo(StreamUtils.copyToByteArray(resource.getInputStream())); + assertThat(fsFile.getFilename()).isEqualTo("gridFsUpload.xml"); + assertThat(fsFile.getId()).isEqualTo(new BsonString(id)); + assertThat(fsFile.getFileId()).isEqualTo(id); + assertThat(fsFile.getContentType()).isEqualTo("xml"); + } + + @Test // DATAMONGO-765 + public void considersSkipLimitWhenQueryingFiles() { + + Stream.of("a", "aa", "aaa", // + "b", "bb", "bb", // + "c", "cc", "ccc", // + "d", "dd", "ddd") // + .forEach(filename -> operations.store(new ByteArrayInputStream(new byte[0]), filename)); + + PageRequest pageRequest = PageRequest.of(2, 3, Direction.ASC, "filename"); + List filenames = operations.find(new Query().with(pageRequest)) // + .map(GridFSFile::getFilename) // + .into(new ArrayList<>()); + + assertThat(filenames).containsExactly("c", "cc", "ccc"); + } + + class Metadata { String version; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java new file mode 100644 index 0000000000..25693d10b7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; + +/** + * @author Christoph Strobl + */ +class GridFsTemplateUnitTests { + + private GridFsTemplateStub template; + + @BeforeEach + void beforeEach() { + template = new GridFsTemplateStub(); + } + + @Test // DATAMONGO-2574 + void contentMetadataDoesNotOverrideContentTypeIfSet() { + + template.onStoreReturn(new ObjectId()); + template.store(new ByteArrayInputStream(new byte[] {}), "filename", "json", new Document("meta", "data")); + + assertThat(template.capturedUpload().getOptions().getContentType()).isEqualTo("json"); + assertThat(template.capturedUpload().getOptions().getMetadata()).containsEntry("meta", "data"); + } + + private static class GridFsTemplateStub extends GridFsTemplate { + + private Object onStoreResult; + private GridFsObject capturedUpload; + + GridFsTemplateStub() { + super(mock(MongoDatabaseFactory.class), mock(MongoConverter.class)); + } + + @Override + public T store(GridFsObject upload) { + + this.capturedUpload = upload; + return (T) onStoreResult; + } + + GridFsTemplateStub onStoreReturn(Object result) { + + this.onStoreResult = result; + return this; + } + + GridFsObject capturedUpload() { + return capturedUpload; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java new file mode 100644 index 0000000000..ca408bb502 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java @@ -0,0 +1,98 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.nio.ByteBuffer; + +import org.junit.jupiter.api.Test; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.reactivestreams.client.gridfs.GridFSDownloadPublisher; + +/** + * Unit tests for {@link ReactiveGridFsResource}. + * + * @author Christoph Strobl + */ +class ReactiveGridFsResourceUnitTests { + + @Test // DATAMONGO-2427 + void streamCanOnlyBeConsumedOnce() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", new StubGridFSDownloadPublisher()); + + assertThat(resource.exists()).isTrue(); + + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getInputStream().as(StepVerifier::create).verifyError(IllegalStateException.class); + resource.getDownloadStream().as(StepVerifier::create).verifyError(IllegalStateException.class); + } + + @Test // DATAMONGO-2427 + void existReturnsFalseForNullPublisher() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", null); + + assertThat(resource.exists()).isFalse(); + } + + @Test // DATAMONGO-2427 + void nonExistingResourceProducesEmptyDownloadStream() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", null); + + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getDownloadStream().as(StepVerifier::create).verifyComplete(); + } + + private static class StubGridFSDownloadPublisher implements GridFSDownloadPublisher { + + @Override + public Publisher getGridFSFile() { + return Mono.empty(); + } + + @Override + public GridFSDownloadPublisher bufferSizeBytes(int bufferSizeBytes) { + return null; + } + + @Override + public void subscribe(Subscriber s) { + + s.onSubscribe(new Subscription() { + @Override + public void request(long n) { + s.onComplete(); + } + + @Override + public void cancel() { + + } + }); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java new file mode 100644 index 0000000000..d0b87cf5fd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DataBufferUtils; +import org.springframework.core.io.buffer.DefaultDataBuffer; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.FileCopyUtils; +import org.springframework.util.StreamUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.internal.HexUtils; + +/** + * Integration tests for {@link ReactiveGridFsTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Nick Stolwijk + * @author Denis Zavedeev + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:gridfs/reactive-gridfs.xml") +public class ReactiveGridFsTemplateTests { + + Resource resource = new ClassPathResource("gridfs/gridfs.xml"); + + @Autowired ReactiveGridFsOperations operations; + @Autowired SimpleMongoClientDatabaseFactory mongoClient; + @Autowired ReactiveMongoDatabaseFactory dbFactory; + @Autowired MongoConverter mongoConverter; + + @Before + public void setUp() { + + operations.delete(new Query()) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void storesAndFindsSimpleDocument() { + + DefaultDataBufferFactory factory = new DefaultDataBufferFactory(); + DefaultDataBuffer first = factory.wrap("first".getBytes()); + DefaultDataBuffer second = factory.wrap("second".getBytes()); + + ObjectId reference = operations.store(Flux.just(first, second), "foo.xml").block(); + + operations.find(query(where("_id").is(reference))) // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(((BsonObjectId) actual.getId()).getValue()).isEqualTo(reference); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void storesAndLoadsLargeFileCorrectly() { + + ByteBuffer buffer = ByteBuffer.allocate(1000 * 1000); // 1 mb + int i = 0; + while (buffer.remaining() != 0) { + buffer.put(HexUtils.toHex(new byte[] { (byte) (i++ % 16) }).getBytes()); + } + buffer.flip(); + + DefaultDataBufferFactory factory = new DefaultDataBufferFactory(); + + ObjectId reference = operations.store(Flux.just(factory.wrap(buffer)), "large.txt").block(); + + buffer.clear(); + + // default chunk size + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + assertThat(dataBuffer.readableByteCount()).isEqualTo(buffer.remaining()); + assertThat(dataBuffer.asByteBuffer()).isEqualTo(buffer); + }).verifyComplete(); + + // small chunk size + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(reactiveGridFsResource -> reactiveGridFsResource.getDownloadStream(256)) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + assertThat(dataBuffer.readableByteCount()).isEqualTo(buffer.remaining()); + assertThat(dataBuffer.asByteBuffer()).isEqualTo(buffer); + }).verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void writesMetadataCorrectly() throws IOException { + + Document metadata = new Document("key", "value"); + + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", "binary/octet-stream", metadata).block(); + + operations.find(query(whereMetaData("key").is("value"))) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getObjectId()).isEqualTo(reference); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void marshalsComplexMetadata() { + + Metadata metadata = new Metadata(); + metadata.version = "1.0"; + + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", "binary/octet-stream", metadata).block(); + + operations.find(query(whereMetaData("version").is("1.0"))) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getObjectId()).isEqualTo(reference); + assertThat(actual.getMetadata()).containsEntry("version", "1.0"); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void getResourceShouldRetrieveContentByIdentity() throws IOException { + + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", null, null).block(); + + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855, DATAMONGO-2240 + public void shouldEmitFirstEntryWhenFindFirstRetrievesMoreThanOneResult() throws IOException { + + Flux upload1 = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + Flux upload2 = DataBufferUtils.read(new ClassPathResource("gridfs/another-resource.xml"), + new DefaultDataBufferFactory(), 256); + + operations.store(upload1, "foo.xml", null, null).block(); + operations.store(upload2, "foo2.xml", null, null).block(); + + operations.findFirst(query(where("filename").regex("foo*"))) // + .flatMap(operations::getResource) // + .as(StepVerifier::create) // + .assertNext(actual -> { + + assertThat(actual.getGridFSFile()).isNotNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2240 + public void shouldReturnNoGridFsFileWhenAbsent() { + + operations.getResource("absent") // + .as(StepVerifier::create) // + .assertNext(actual -> { + + assertThat(actual.exists()).isFalse(); + assertThat(actual.getGridFSFile()).isEqualTo(Mono.empty()); + }).verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void shouldEmitErrorWhenFindOneRetrievesMoreThanOneResult() throws IOException { + + Flux upload1 = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + Flux upload2 = DataBufferUtils.read(new ClassPathResource("gridfs/another-resource.xml"), + new DefaultDataBufferFactory(), 256); + + operations.store(upload1, "foo.xml", null, null).block(); + operations.store(upload2, "foo2.xml", null, null).block(); + + operations.findOne(query(where("filename").regex("foo*"))) // + .as(StepVerifier::create) // + .expectError(IncorrectResultSizeDataAccessException.class) // + .verify(); + } + + @Test // DATAMONGO-1855 + public void getResourcesByPattern() throws IOException { + + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux upload = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + + operations.store(upload, "foo.xml", null, null).block(); + + operations.getResources("foo*") // + .flatMap(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-625 + public void storeSavesGridFsUploadWithGivenIdCorrectly() throws IOException { + + String id = "id-1"; + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux data = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + + ReactiveGridFsUpload upload = ReactiveGridFsUpload.fromPublisher(data) // + .id(id) // + .filename("gridFsUpload.xml") // + .contentType("xml") // + .build(); + + operations.store(upload).as(StepVerifier::create).expectNext(id).verifyComplete(); + + operations.findOne(query(where("_id").is(id))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + + operations.findOne(query(where("_id").is(id))).as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getFilename()).isEqualTo("gridFsUpload.xml"); + assertThat(it.getId()).isEqualTo(new BsonString(id)); + assertThat(it.getMetadata()).containsValue("xml"); + }).verifyComplete(); + } + + @Test // DATAMONGO-765 + public void considersSkipLimitWhenQueryingFiles() { + + DataBufferFactory bufferFactory = new DefaultDataBufferFactory(); + DataBuffer buffer = bufferFactory.allocateBuffer(0); + Flux.just("a", "aa", "aaa", // + "b", "bb", "bbb", // + "c", "cc", "ccc", // + "d", "dd", "ddd") // + .flatMap(fileName -> operations.store(Mono.just(buffer), fileName)) // + .as(StepVerifier::create) // + .expectNextCount(12) // + .verifyComplete(); + + PageRequest pageRequest = PageRequest.of(2, 3, Sort.Direction.ASC, "filename"); + operations.find(new Query().with(pageRequest)) // + .map(GridFSFile::getFilename) // + .as(StepVerifier::create) // + .expectNext("c", "cc", "ccc") // + .verifyComplete(); + } + + static class Metadata { + String version; + } + + public static String readToString(DataBuffer dataBuffer) { + try { + return FileCopyUtils.copyToString(new InputStreamReader(dataBuffer.asInputStream())); + } catch (IOException e) { + return e.getMessage(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java new file mode 100644 index 0000000000..b8f0ab98a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; + +/** + * @author Christoph Strobl + */ +class ReactiveGridFsTemplateUnitTests { + + private ReactiveGridFsTemplateStub template; + + @BeforeEach + void beforeEach() { + template = new ReactiveGridFsTemplateStub(); + } + + @Test // DATAMONGO-2574 + void contentMetadataDoesNotOverrideContentTypeIfSet() { + + template.onStoreReturn(new ObjectId()); + template.store(Flux.empty(), "filename", "json", new Document("meta", "data")); + + assertThat(template.capturedUpload().getOptions().getContentType()).isEqualTo("json"); + assertThat(template.capturedUpload().getOptions().getMetadata()).containsEntry("meta", "data"); + } + + private static class ReactiveGridFsTemplateStub extends ReactiveGridFsTemplate { + + private Object onStoreResult; + private GridFsObject> capturedUpload; + + ReactiveGridFsTemplateStub() { + super(mock(ReactiveMongoDatabaseFactory.class), mock(MongoConverter.class)); + } + + @Override + public Mono store(GridFsObject> upload) { + + capturedUpload = upload; + return Mono.just((T) onStoreResult); + } + + ReactiveGridFsTemplateStub onStoreReturn(Object result) { + + this.onStoreResult = result; + return this; + } + + GridFsObject> capturedUpload() { + return capturedUpload; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java index da9290058f..e70b398f7f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java @@ -1,62 +1,53 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.monitor; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import static org.assertj.core.api.Assertions.*; import java.net.UnknownHostException; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import com.mongodb.Mongo; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; -/** - * This test class assumes that you are already running the MongoDB server. - * - * @author Mark Pollack +/** + * This test class assumes that you are already running the MongoDB server. + * + * @author Mark Pollack * @author Thomas Darimont - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class MongoMonitorIntegrationTests { - - @Autowired Mongo mongo; - - @Test - public void serverInfo() { - ServerInfo serverInfo = new ServerInfo(mongo); - serverInfo.getVersion(); - Assert.isTrue(StringUtils.hasText("1.")); - } - - /** - * @throws UnknownHostException - * @see DATAMONGO-685 - */ + * @author Mark Paluch + */ +@ExtendWith(MongoClientExtension.class) +public class MongoMonitorIntegrationTests { + + static @Client MongoClient mongoClient; + @Test + public void serverInfo() { + ServerInfo serverInfo = new ServerInfo(mongoClient); + serverInfo.getVersion(); + } + + @Test // DATAMONGO-685 public void getHostNameShouldReturnServerNameReportedByMongo() throws UnknownHostException { - ServerInfo serverInfo = new ServerInfo(mongo); + ServerInfo serverInfo = new ServerInfo(mongoClient); String hostName = null; try { @@ -65,13 +56,13 @@ public void getHostNameShouldReturnServerNameReportedByMongo() throws UnknownHos throw e; } - assertThat(hostName, is(notNullValue())); - assertThat(hostName, is("127.0.0.1")); + assertThat(hostName).isNotNull(); + assertThat(hostName).isEqualTo("127.0.0.1:27017"); } - @Test - public void operationCounters() { - OperationCounters operationCounters = new OperationCounters(mongo); - operationCounters.getInsertCount(); - } + @Test + public void operationCounters() { + OperationCounters operationCounters = new OperationCounters(mongoClient); + operationCounters.getInsertCount(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java new file mode 100644 index 0000000000..1fdbb1f188 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java @@ -0,0 +1,27 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.monitor; + +import java.util.function.Supplier; + +/** + * @author Christoph Strobl + * @since 2018/01 + */ +interface Resumeable { + + void resumeAt(Supplier token); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java new file mode 100644 index 0000000000..c1b1b4851e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.exporter.FinishedSpan; +import io.micrometer.tracing.test.SampleTestRunner; + +import java.util.List; + +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +/** + * Collection of tests that log metrics and tracing with an external tracing tool. + * + * @author Greg Turnquist + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = TestConfig.class) +public class ImperativeIntegrationTests extends SampleTestRunner { + + @Autowired PersonRepository repository; + + ImperativeIntegrationTests() { + super(SampleRunnerConfig.builder().build()); + } + + @Override + protected MeterRegistry createMeterRegistry() { + return TestConfig.METER_REGISTRY; + } + + @Override + protected ObservationRegistry createObservationRegistry() { + return TestConfig.OBSERVATION_REGISTRY; + } + + @Override + public SampleTestRunnerConsumer yourCode() { + + return (tracer, meterRegistry) -> { + + repository.deleteAll(); + repository.save(new Person("Dave", "Matthews", 42)); + List people = repository.findByLastname("Matthews"); + + assertThat(people).hasSize(1); + assertThat(people.get(0)).extracting("firstname", "lastname").containsExactly("Dave", "Matthews"); + + repository.deleteAll(); + + System.out.println(((SimpleMeterRegistry) meterRegistry).getMetersAsString()); + + assertThat(tracer.getFinishedSpans()).hasSize(5).extracting(FinishedSpan::getName).contains("person.delete", + "person.update", "person.find"); + + for (FinishedSpan span : tracer.getFinishedSpans()) { + + assertThat(span.getTags()).containsEntry("db.system", "mongodb").containsEntry("net.transport", "IP.TCP"); + + if (MongoClientVersion.isVersion5orNewer()) { + assertThat(span.getTags()).containsKeys("db.connection_string", "db.name", "db.operation", + "db.mongodb.collection", "net.peer.name", "net.peer.port"); + } else { + assertThat(span.getTags()).containsKeys("db.connection_string", "db.name", "db.operation", + "db.mongodb.collection", "net.peer.name", "net.peer.port", "net.sock.peer.addr", "net.sock.peer.port"); + } + } + }; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java new file mode 100644 index 0000000000..5c2cb0b701 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java @@ -0,0 +1,251 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static io.micrometer.core.tck.MeterRegistryAssert.*; +import static org.mockito.Mockito.*; + +import io.micrometer.common.KeyValues; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.observability.MongoObservation.LowCardinalityCommandKeyNames; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.ServerAddress; +import com.mongodb.client.SynchronousContextProvider; +import com.mongodb.connection.ClusterId; +import com.mongodb.connection.ConnectionDescription; +import com.mongodb.connection.ServerId; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * Series of test cases exercising {@link MongoObservationCommandListener}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author Mark Paluch + * @author François Kha + */ +class MongoObservationCommandListenerTests { + + ObservationRegistry observationRegistry; + MeterRegistry meterRegistry; + + MongoObservationCommandListener listener; + + @BeforeEach + void setup() { + + this.meterRegistry = new SimpleMeterRegistry(); + this.observationRegistry = ObservationRegistry.create(); + this.observationRegistry.observationConfig().observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + + this.listener = new MongoObservationCommandListener(observationRegistry); + } + + @Test + void commandStartedShouldNotInstrumentWhenAdminDatabase() { + + // when + listener.commandStarted(new CommandStartedEvent(null, 0, 0, null, "admin", "", null)); + + // then + assertThat(meterRegistry).hasNoMetrics(); + } + + @Test + void commandStartedShouldNotInstrumentWhenNoRequestContext() { + + // when + listener.commandStarted(new CommandStartedEvent(null, 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasNoMetrics(); + } + + @Test + void commandStartedShouldNotInstrumentWhenNoParentSampleInRequestContext() { + + // when + listener.commandStarted(new CommandStartedEvent(new MapRequestContext(), 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasMeterWithName("spring.data.mongodb.command.active"); + } + + @Test + void successfullyCompletedCommandShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), "database", "insert", // + new BsonDocument("collection", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test + void successfullyCompletedCommandWithCollectionHavingCommandNameShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), // + "database", "aggregate", // + new BsonDocument("aggregate", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "aggregate", null, null, 0)); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test + void successfullyCompletedCommandWithoutClusterInformationShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, null, "database", "insert", + new BsonDocument("collection", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + assertThat(meterRegistry).hasTimerWithNameAndTags(MongoObservation.MONGODB_COMMAND_OBSERVATION.getName(), + KeyValues.of(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue("user"), + LowCardinalityCommandKeyNames.DB_NAME.withValue("database"), + LowCardinalityCommandKeyNames.MONGODB_COMMAND.withValue("insert"), + LowCardinalityCommandKeyNames.DB_SYSTEM.withValue("mongodb")).and("error", "none")); + } + + @Test + void commandWithErrorShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), // + "database", "insert", // + new BsonDocument("collection", new BsonString("user")))); + listener.commandFailed( // + new CommandFailedEvent(traceRequestContext, 0, 0, null, "db", "insert", 0, new IllegalAccessException())); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test // GH-4481 + void completionShouldIgnoreIncompatibleObservationContext() { + + // given + RequestContext traceRequestContext = getContext(); + + Observation observation = mock(Observation.class); + traceRequestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + // when + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + verify(observation).getContext(); + verifyNoMoreInteractions(observation); + } + + @Test // GH-4481 + void failureShouldIgnoreIncompatibleObservationContext() { + + // given + RequestContext traceRequestContext = getContext(); + + Observation observation = mock(Observation.class); + traceRequestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + // when + listener.commandFailed(new CommandFailedEvent(traceRequestContext, 0, 0, null, "db", "insert", 0, null)); + + verify(observation).getContext(); + verifyNoMoreInteractions(observation); + } + + @Test // GH-4321 + void shouldUseObservationConvention() { + + // given + MongoHandlerObservationConvention customObservationConvention = new MongoHandlerObservationConvention() { + @Override + public boolean supportsContext(Observation.Context context) { + return MongoHandlerObservationConvention.super.supportsContext(context); + } + + @Override + public String getName() { + return "custom.name"; + } + }; + this.listener = new MongoObservationCommandListener(observationRegistry, mock(ConnectionString.class), + customObservationConvention); + + // when + listener.commandStarted(new CommandStartedEvent(new MapRequestContext(), 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasMeterWithName("custom.name.active"); + } + + private RequestContext getContext() { + return ((SynchronousContextProvider) ContextProviderFactory.create(observationRegistry)).getContext(); + } + + private void assertThatTimerRegisteredWithTags() { + + assertThat(meterRegistry) // + .hasTimerWithNameAndTags(MongoObservation.MONGODB_COMMAND_OBSERVATION.getName(), + KeyValues.of(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue("user"))); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java new file mode 100644 index 0000000000..9bfe5a8ce2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java @@ -0,0 +1,89 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; +import io.micrometer.tracing.exporter.FinishedSpan; +import io.micrometer.tracing.test.SampleTestRunner; +import reactor.test.StepVerifier; +import reactor.util.context.Context; + +/** + * Collection of tests that log metrics and tracing with an external tracing tool. + * + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = TestConfig.class) +public class ReactiveIntegrationTests extends SampleTestRunner { + + @Autowired ReactivePersonRepository repository; + + ReactiveIntegrationTests() { + super(SampleRunnerConfig.builder().build()); + } + + @Override + protected MeterRegistry createMeterRegistry() { + return TestConfig.METER_REGISTRY; + } + + @Override + protected ObservationRegistry createObservationRegistry() { + return TestConfig.OBSERVATION_REGISTRY; + } + + @Override + public SampleTestRunnerConsumer yourCode() { + + return (tracer, meterRegistry) -> { + + Observation intermediate = Observation.start("intermediate", createObservationRegistry()); + + repository.deleteAll() // + .then(repository.save(new Person("Dave", "Matthews", 42))) // + .contextWrite(Context.of(ObservationThreadLocalAccessor.KEY, intermediate)) // + .as(StepVerifier::create).expectNextCount(1)// + .verifyComplete(); + + repository.findByLastname("Matthews") // + .contextWrite(Context.of(ObservationThreadLocalAccessor.KEY, intermediate)) // + .as(StepVerifier::create).assertNext(actual -> { + + assertThat(actual).extracting("firstname", "lastname").containsExactly("Dave", "Matthews"); + }).verifyComplete(); + + intermediate.stop(); + System.out.println(((SimpleMeterRegistry) meterRegistry).getMetersAsString()); + + assertThat(tracer.getFinishedSpans()).hasSize(5).extracting(FinishedSpan::getName).contains("person.delete", + "person.update", "person.find"); + }; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java new file mode 100644 index 0000000000..7e7e2c636c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java @@ -0,0 +1,165 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import java.util.Properties; + +import org.springframework.beans.factory.config.PropertiesFactoryBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.core.support.PropertiesBasedNamedQueries; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.MongoClients; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.test.simple.SimpleTracer; + +/** + * @author Mark Paluch + */ +@Configuration +class TestConfig { + + static final MeterRegistry METER_REGISTRY = new SimpleMeterRegistry(); + static final ObservationRegistry OBSERVATION_REGISTRY = ObservationRegistry.create(); + + static { + OBSERVATION_REGISTRY.observationConfig().observationHandler(new DefaultMeterObservationHandler(METER_REGISTRY)); + } + + @Bean + MongoDatabaseFactory mongoDatabaseFactory(MongoClientSettings settings) { + return new SimpleMongoClientDatabaseFactory(MongoClients.create(settings), "observable"); + } + + @Bean + ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory(MongoClientSettings settings) { + return new SimpleReactiveMongoDatabaseFactory(com.mongodb.reactivestreams.client.MongoClients.create(settings), + "observable"); + } + + @Bean + MongoClientSettings mongoClientSettings(ObservationRegistry observationRegistry) { + + ConnectionString connectionString = new ConnectionString( + String.format("mongodb://%s:%s/?w=majority&uuidrepresentation=javaLegacy", "127.0.0.1", 27017)); + + MongoClientSettings settings = MongoClientSettings.builder() // + .addCommandListener(new MongoObservationCommandListener(observationRegistry, connectionString)) // + .contextProvider(ContextProviderFactory.create(observationRegistry)) // + .applyConnectionString(connectionString) // + .build(); + + return settings; + } + + @Bean + MappingMongoConverter mongoConverter(MongoMappingContext mappingContext, MongoDatabaseFactory factory) { + return new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + } + + @Bean + MongoMappingContext mappingContext() { + return new MongoMappingContext(); + } + + @Bean + MongoTemplate mongoTemplate(MongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter) { + + MongoTemplate template = new MongoTemplate(mongoDatabaseFactory, mongoConverter); + return template; + } + + @Bean + ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, + MongoConverter mongoConverter) { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoDatabaseFactory, mongoConverter); + return template; + } + + @Bean + public PropertiesFactoryBean namedQueriesProperties() { + + PropertiesFactoryBean bean = new PropertiesFactoryBean(); + bean.setLocation(new ClassPathResource("META-INF/mongo-named-queries.properties")); + return bean; + } + + @Bean + MongoRepositoryFactoryBean personRepositoryFactoryBean(MongoOperations operations, + Properties namedQueriesProperties) { + + MongoRepositoryFactoryBean factoryBean = new MongoRepositoryFactoryBean<>( + PersonRepository.class); + factoryBean.setNamedQueries(new PropertiesBasedNamedQueries(namedQueriesProperties)); + factoryBean.setMongoOperations(operations); + factoryBean.setCreateIndexesForQueryMethods(true); + return factoryBean; + } + + @Bean + ReactiveMongoRepositoryFactoryBean reactivePersonRepositoryFactoryBean( + ReactiveMongoOperations operations, Properties namedQueriesProperties) { + + ReactiveMongoRepositoryFactoryBean factoryBean = new ReactiveMongoRepositoryFactoryBean<>( + ReactivePersonRepository.class); + factoryBean.setNamedQueries(new PropertiesBasedNamedQueries(namedQueriesProperties)); + factoryBean.setReactiveMongoOperations(operations); + factoryBean.setCreateIndexesForQueryMethods(true); + return factoryBean; + } + + @Bean + SampleEvaluationContextExtension contextExtension() { + return new SampleEvaluationContextExtension(); + } + + @Bean + ObservationRegistry registry() { + return OBSERVATION_REGISTRY; + } + + @Bean + Tracer tracer() { + return new SimpleTracer(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java index 47cf1eec96..e815cc6e7c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2015 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,55 +17,45 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import static org.springframework.util.Assert.*; import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; +import java.util.*; import java.util.regex.Pattern; +import java.util.stream.Collectors; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.core.Constants; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; +import org.springframework.util.Assert; import org.springframework.util.StopWatch; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; /** * Test class to execute performance tests for plain MongoDB driver usage, {@link MongoTemplate} and the repositories * abstraction. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public class PerformanceTests { @@ -74,36 +64,39 @@ public class PerformanceTests { private static final int ITERATIONS = 50; private static final StopWatch watch = new StopWatch(); private static final Collection IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE", - "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED"); - private static final int COLLECTION_SIZE = 1024 * 1024 * 256; // 256 MB + "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED", "W2", "W3"); + private static final int COLLECTION_SIZE = 1024 - 2018 * 1024 - 2018 * 256; // 256 MB private static final Collection COLLECTION_NAMES = Arrays.asList("template", "driver", "person"); - Mongo mongo; + MongoClient mongo; MongoTemplate operations; PersonRepository repository; MongoConverter converter; - @Before + @BeforeEach public void setUp() throws Exception { - this.mongo = new MongoClient(); + this.mongo = MongoClients.create(); - SimpleMongoDbFactory mongoDbFactory = new SimpleMongoDbFactory(this.mongo, DATABASE_NAME); + SimpleMongoClientDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(this.mongo, DATABASE_NAME); MongoMappingContext context = new MongoMappingContext(); context.setInitialEntitySet(Collections.singleton(Person.class)); context.afterPropertiesSet(); this.converter = new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), context); - this.operations = new MongoTemplate(new SimpleMongoDbFactory(this.mongo, DATABASE_NAME), converter); + this.operations = new MongoTemplate(new SimpleMongoClientDatabaseFactory(this.mongo, DATABASE_NAME), converter); - MongoRepositoryFactoryBean factory = new MongoRepositoryFactoryBean(); + MongoRepositoryFactoryBean factory = new MongoRepositoryFactoryBean( + PersonRepository.class); factory.setMongoOperations(operations); - factory.setRepositoryInterface(PersonRepository.class); factory.afterPropertiesSet(); this.repository = factory.getObject(); + } + void afterEach() { + mongo.close(); } @Test @@ -112,11 +105,11 @@ public void writeWithWriteConcerns() { public void doWithWriteConcern(String constantName, WriteConcern concern) { writeHeadline("WriteConcern: " + constantName); System.out.println(String.format("Writing %s objects using plain driver took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS))); + writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); System.out.println(String.format("Writing %s objects using template took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS))); + writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); System.out.println(String.format("Writing %s objects using repository took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingRepositories(NUMBER_OF_PERSONS))); + writingObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); writeFooter(); } }); @@ -125,20 +118,20 @@ public void doWithWriteConcern(String constantName, WriteConcern concern) { @Test public void plainConversion() throws InterruptedException { - Statistics statistics = new Statistics("Plain conversion of " + NUMBER_OF_PERSONS * 100 - + " persons - After %s iterations"); + Statistics statistics = new Statistics( + "Plain conversion of " + NUMBER_OF_PERSONS * 100 + " persons - After %s iterations"); - List dbObjects = getPersonDBObjects(NUMBER_OF_PERSONS * 100); + List documents = getPersonDocuments(NUMBER_OF_PERSONS * 100); for (int i = 0; i < ITERATIONS; i++) { - statistics.registerTime(Api.DIRECT, Mode.READ, convertDirectly(dbObjects)); - statistics.registerTime(Api.CONVERTER, Mode.READ, convertUsingConverter(dbObjects)); + statistics.registerTime(Api.DIRECT, Mode.READ, convertDirectly(documents)); + statistics.registerTime(Api.CONVERTER, Mode.READ, convertUsingConverter(documents)); } statistics.printResults(ITERATIONS); } - private long convertDirectly(final List dbObjects) { + private long convertDirectly(final List documents) { executeWatched(new WatchCallback>() { @@ -147,8 +140,8 @@ public List doInWatch() { List persons = new ArrayList(); - for (DBObject dbObject : dbObjects) { - persons.add(Person.from(dbObject)); + for (Document document : documents) { + persons.add(Person.from(document)); } return persons; @@ -158,7 +151,7 @@ public List doInWatch() { return watch.getLastTaskTimeMillis(); } - private long convertUsingConverter(final List dbObjects) { + private long convertUsingConverter(final List documents) { executeWatched(new WatchCallback>() { @@ -167,8 +160,8 @@ public List doInWatch() { List persons = new ArrayList(); - for (DBObject dbObject : dbObjects) { - persons.add(converter.read(Person.class, dbObject)); + for (Document document : documents) { + persons.add(converter.read(Person.class, document)); } return persons; @@ -180,13 +173,10 @@ public List doInWatch() { @Test public void writeAndRead() throws Exception { - - mongo.setWriteConcern(WriteConcern.SAFE); - - readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS); + readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.ACKNOWLEDGED); } - private void readsAndWrites(int numberOfPersons, int iterations) { + private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern writeConcern) { Statistics statistics = new Statistics("Reading " + numberOfPersons + " - After %s iterations"); @@ -194,9 +184,11 @@ private void readsAndWrites(int numberOfPersons, int iterations) { setupCollections(); - statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons)); - statistics.registerTime(Api.TEMPLATE, Mode.WRITE, writingObjectsUsingMongoTemplate(numberOfPersons)); - statistics.registerTime(Api.REPOSITORY, Mode.WRITE, writingObjectsUsingRepositories(numberOfPersons)); + statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons, writeConcern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE, + writingObjectsUsingMongoTemplate(numberOfPersons, writeConcern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE, + writingObjectsUsingRepositories(numberOfPersons, writeConcern)); statistics.registerTime(Api.DRIVER, Mode.READ, readingUsingPlainDriver()); statistics.registerTime(Api.TEMPLATE, Mode.READ, readingUsingTemplate()); @@ -224,22 +216,13 @@ private void writeFooter() { } private long queryUsingTemplate() { - executeWatched(new WatchCallback>() { - public List doInWatch() { - Query query = query(where("addresses.zipCode").regex(".*1.*")); - return operations.find(query, Person.class, "template"); - } - }); + executeWatched(() -> operations.find(query(where("addresses.zipCode").regex(".*1.*")), Person.class, "template")); return watch.getLastTaskTimeMillis(); } private long queryUsingRepository() { - executeWatched(new WatchCallback>() { - public List doInWatch() { - return repository.findByAddressesZipCodeContaining("1"); - } - }); + executeWatched(() -> repository.findByAddressesZipCodeContaining("1")); return watch.getLastTaskTimeMillis(); } @@ -255,8 +238,6 @@ private void executeWithWriteConcerns(WriteConcernCallback callback) { } WriteConcern writeConcern = (WriteConcern) constants.asObject(constantName); - mongo.setWriteConcern(writeConcern); - setupCollections(); callback.doWithWriteConcern(constantName, writeConcern); @@ -265,114 +246,97 @@ private void executeWithWriteConcerns(WriteConcernCallback callback) { private void setupCollections() { - DB db = this.mongo.getDB(DATABASE_NAME); + MongoDatabase db = this.mongo.getDatabase(DATABASE_NAME); for (String collectionName : COLLECTION_NAMES) { - DBCollection collection = db.getCollection(collectionName); + + MongoCollection collection = db.getCollection(collectionName); collection.drop(); - collection.getDB().command(getCreateCollectionCommand(collectionName)); - collection.createIndex(new BasicDBObject("firstname", -1)); - collection.createIndex(new BasicDBObject("lastname", -1)); + + CreateCollectionOptions collectionOptions = new CreateCollectionOptions(); + collectionOptions.capped(false); + collectionOptions.sizeInBytes(COLLECTION_SIZE); + + db.createCollection(collectionName, collectionOptions); + + collection.createIndex(new Document("firstname", -1)); + collection.createIndex(new Document("lastname", -1)); } } - private DBObject getCreateCollectionCommand(String name) { - BasicDBObject dbObject = new BasicDBObject(); - dbObject.put("createCollection", name); - dbObject.put("capped", false); - dbObject.put("size", COLLECTION_SIZE); - return dbObject; + private Document getCreateCollectionCommand(String name) { + Document document = new Document(); + document.put("createCollection", name); + document.put("capped", false); + document.put("size", COLLECTION_SIZE); + return document; } - private long writingObjectsUsingPlainDriver(int numberOfPersons) { + private long writingObjectsUsingPlainDriver(int numberOfPersons, WriteConcern writeConcern) { - final DBCollection collection = mongo.getDB(DATABASE_NAME).getCollection("driver"); - final List persons = getPersonObjects(numberOfPersons); + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver") + .withWriteConcern(writeConcern); + ; + List persons = getPersonObjects(numberOfPersons); - executeWatched(new WatchCallback() { - public Void doInWatch() { - for (Person person : persons) { - collection.save(person.toDBObject()); - } - return null; - } - }); + executeWatched(() -> persons.stream().map(Person::toDocument).map(it -> { + + collection.insertOne(it); + return true; + })); return watch.getLastTaskTimeMillis(); } - private long writingObjectsUsingRepositories(int numberOfPersons) { + private long writingObjectsUsingRepositories(int numberOfPersons, WriteConcern writeConcern) { - final List persons = getPersonObjects(numberOfPersons); + List persons = getPersonObjects(numberOfPersons); - executeWatched(new WatchCallback() { - public Void doInWatch() { - repository.save(persons); - return null; - } - }); + executeWatched(() -> repository.saveAll(persons)); return watch.getLastTaskTimeMillis(); } - private long writingObjectsUsingMongoTemplate(int numberOfPersons) { + private long writingObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern writeConcern) { - final List persons = getPersonObjects(numberOfPersons); + List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(writeConcern); - executeWatched(new WatchCallback() { - public Void doInWatch() { - for (Person person : persons) { - operations.save(person, "template"); - } - return null; - } - }); + executeWatched(() -> persons.stream()// + .peek(it -> operations.save(it, "template"))// + .collect(Collectors.toList())); return watch.getLastTaskTimeMillis(); } private long readingUsingPlainDriver() { - executeWatched(new WatchCallback>() { - public List doInWatch() { - return toPersons(mongo.getDB(DATABASE_NAME).getCollection("driver").find()); - } - }); + executeWatched(() -> toPersons(mongo.getDatabase(DATABASE_NAME).getCollection("driver").find())); return watch.getLastTaskTimeMillis(); } private long readingUsingTemplate() { - executeWatched(new WatchCallback>() { - public List doInWatch() { - return operations.findAll(Person.class, "template"); - } - }); + executeWatched(() -> operations.findAll(Person.class, "template")); return watch.getLastTaskTimeMillis(); } private long readingUsingRepository() { - executeWatched(new WatchCallback>() { - public List doInWatch() { - return repository.findAll(); - } - }); + executeWatched(repository::findAll); return watch.getLastTaskTimeMillis(); } private long queryUsingPlainDriver() { - executeWatched(new WatchCallback>() { - public List doInWatch() { + executeWatched(() -> { - DBCollection collection = mongo.getDB(DATABASE_NAME).getCollection("driver"); + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver"); - DBObject regex = new BasicDBObject("$regex", Pattern.compile(".*1.*")); - DBObject query = new BasicDBObject("addresses.zipCode", regex); - return toPersons(collection.find(query)); - } + Document regex = new Document("$regex", Pattern.compile(".*1.*")); + Document query = new Document("addresses.zipCode", regex); + return toPersons(collection.find(query)); }); return watch.getLastTaskTimeMillis(); @@ -402,15 +366,15 @@ private List getPersonObjects(int numberOfPersons) { return result; } - private List getPersonDBObjects(int numberOfPersons) { + private List getPersonDocuments(int numberOfPersons) { - List dbObjects = new ArrayList(numberOfPersons); + List documents = new ArrayList(numberOfPersons); for (Person person : getPersonObjects(numberOfPersons)) { - dbObjects.add(person.toDBObject()); + documents.add(person.toDocument()); } - return dbObjects; + return documents; } private T executeWatched(WatchCallback callback) { @@ -424,12 +388,13 @@ private T executeWatched(WatchCallback callback) { } } - private static List toPersons(DBCursor cursor) { + private static List toPersons(FindIterable cursor) { List persons = new ArrayList(); - while (cursor.hasNext()) { - persons.add(Person.from(cursor.next())); + Iterator it = cursor.iterator(); + while (it.hasNext()) { + persons.add(Person.from(it.next())); } return persons; @@ -449,18 +414,18 @@ public Person(String firstname, String lastname, List
                    addresses) { this.orders = new HashSet(); } - public static Person from(DBObject source) { + public static Person from(Document source) { - BasicDBList addressesSource = (BasicDBList) source.get("addresses"); + List addressesSource = (List) source.get("addresses"); List
                    addresses = new ArrayList
                    (addressesSource.size()); for (Object addressSource : addressesSource) { - addresses.add(Address.from((DBObject) addressSource)); + addresses.add(Address.from((Document) addressSource)); } - BasicDBList ordersSource = (BasicDBList) source.get("orders"); + List ordersSource = (List) source.get("orders"); Set orders = new HashSet(ordersSource.size()); for (Object orderSource : ordersSource) { - orders.add(Order.from((DBObject) orderSource)); + orders.add(Order.from((Document) orderSource)); } Person person = new Person((String) source.get("firstname"), (String) source.get("lastname"), addresses); @@ -468,14 +433,14 @@ public static Person from(DBObject source) { return person; } - public DBObject toDBObject() { + public Document toDocument() { - DBObject dbObject = new BasicDBObject(); - dbObject.put("firstname", firstname); - dbObject.put("lastname", lastname); - dbObject.put("addresses", writeAll(addresses)); - dbObject.put("orders", writeAll(orders)); - return dbObject; + Document document = new Document(); + document.put("firstname", firstname); + document.put("lastname", lastname); + document.put("addresses", writeAll(addresses)); + document.put("orders", writeAll(orders)); + return document; } } @@ -496,24 +461,24 @@ public Address(String zipCode, String city, Set types) { this.types = types; } - public static Address from(DBObject source) { + public static Address from(Document source) { String zipCode = (String) source.get("zipCode"); String city = (String) source.get("city"); - BasicDBList types = (BasicDBList) source.get("types"); + List types = (List) source.get("types"); return new Address(zipCode, city, new HashSet(readFromBasicDBList(types, AddressType.class))); } - public DBObject toDBObject() { - BasicDBObject dbObject = new BasicDBObject(); - dbObject.put("zipCode", zipCode); - dbObject.put("city", city); - dbObject.put("types", toBasicDBList(types)); - return dbObject; + public Document toDocument() { + Document document = new Document(); + document.put("zipCode", zipCode); + document.put("city", city); + document.put("types", toBasicDBList(types)); + return document; } } - private static > List readFromBasicDBList(BasicDBList source, Class type) { + private static > List readFromBasicDBList(List source, Class type) { List result = new ArrayList(source.size()); for (Object object : source) { @@ -522,8 +487,8 @@ private static > List readFromBasicDBList(BasicDBList sourc return result; } - private static > BasicDBList toBasicDBList(Collection enums) { - BasicDBList result = new BasicDBList(); + private static > List toBasicDBList(Collection enums) { + List result = new ArrayList<>(); for (T element : enums) { result.add(element.toString()); } @@ -554,12 +519,12 @@ public Order(List lineItems, Date createdAt, Status status) { this.status = status; } - public static Order from(DBObject source) { + public static Order from(Document source) { - BasicDBList lineItemsSource = (BasicDBList) source.get("lineItems"); + List lineItemsSource = (List) source.get("lineItems"); List lineItems = new ArrayList(lineItemsSource.size()); for (Object lineItemSource : lineItemsSource) { - lineItems.add(LineItem.from((DBObject) lineItemSource)); + lineItems.add(LineItem.from((Document) lineItemSource)); } Date date = (Date) source.get("createdAt"); @@ -571,8 +536,8 @@ public Order(List lineItems) { this(lineItems, new Date()); } - public DBObject toDBObject() { - DBObject result = new BasicDBObject(); + public Document toDocument() { + Document result = new Document(); result.put("createdAt", createdAt); result.put("lineItems", writeAll(lineItems)); result.put("status", status.toString()); @@ -601,7 +566,7 @@ public static List generate() { return pickRandomNumerOfItemsFrom(Arrays.asList(iPad, iPhone, macBook)); } - public static LineItem from(DBObject source) { + public static LineItem from(Document source) { String description = (String) source.get("description"); double price = (Double) source.get("price"); @@ -610,19 +575,19 @@ public static LineItem from(DBObject source) { return new LineItem(description, amount, price); } - public DBObject toDBObject() { + public Document toDocument() { - BasicDBObject dbObject = new BasicDBObject(); - dbObject.put("description", description); - dbObject.put("price", price); - dbObject.put("amount", amount); - return dbObject; + Document document = new Document(); + document.put("description", description); + document.put("price", price); + document.put("amount", amount); + return document; } } private static List pickRandomNumerOfItemsFrom(List source) { - isTrue(!source.isEmpty()); + Assert.isTrue(!source.isEmpty(), "Source must not be empty"); Random random = new Random(); int numberOfItems = random.nextInt(source.size()); @@ -659,13 +624,13 @@ private interface PersonRepository extends MongoRepository { private interface Convertible { - DBObject toDBObject(); + Document toDocument(); } - private static BasicDBList writeAll(Collection convertibles) { - BasicDBList result = new BasicDBList(); + private static List writeAll(Collection convertibles) { + List result = new ArrayList<>(); for (Convertible convertible : convertibles) { - result.add(convertible.toDBObject()); + result.add(convertible.toDocument()); } return result; } @@ -836,14 +801,10 @@ public String print(double referenceAverage, double referenceMedian) { String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n'; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { - return times.isEmpty() ? "" : String.format("%s, %s: %s", api, mode, - StringUtils.collectionToCommaDelimitedString(times)) + '\n'; + return times.isEmpty() ? "" + : String.format("%s, %s: %s", api, mode, StringUtils.collectionToCommaDelimitedString(times)) + '\n'; } } @@ -895,10 +856,6 @@ public String print() { return builder.toString(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java new file mode 100644 index 0000000000..edda1aad01 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -0,0 +1,930 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.performance; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.text.DecimalFormat; +import java.util.*; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.core.Constants; +import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DbRefProxyHandler; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StopWatch; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBList; +import com.mongodb.BasicDBObject; +import com.mongodb.DBRef; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Test class to execute performance tests for plain Reactive Streams MongoDB driver usage, + * {@link ReactiveMongoOperations} and the repositories abstraction. + * + * @author Mark Paluch + */ +public class ReactivePerformanceTests { + + private static final String DATABASE_NAME = "performance"; + private static final int NUMBER_OF_PERSONS = 300; + private static final int ITERATIONS = 50; + private static final StopWatch watch = new StopWatch(); + private static final Collection IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE", + "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED"); + private static final int COLLECTION_SIZE = 1024 - 2018 * 1024 - 2018 * 256; // 256 MB + private static final Collection COLLECTION_NAMES = Arrays.asList("template", "driver", "person"); + + MongoClient mongo; + ReactiveMongoTemplate operations; + ReactivePersonRepository repository; + MongoConverter converter; + + @BeforeEach + public void setUp() throws Exception { + + mongo = MongoClients.create(); + + SimpleReactiveMongoDatabaseFactory mongoDbFactory = new SimpleReactiveMongoDatabaseFactory(this.mongo, + DATABASE_NAME); + + MongoMappingContext context = new MongoMappingContext(); + context.setInitialEntitySet(Collections.singleton(Person.class)); + context.afterPropertiesSet(); + + converter = new MappingMongoConverter(new DbRefResolver() { + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + return null; + } + + @Override + public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + DbRefProxyHandler proxyHandler) { + return null; + } + + @Override + public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, + MongoPersistentEntity entity, Object id) { + return null; + } + + @Override + public Document fetch(DBRef dbRef) { + return null; + } + + @Override + public List bulkFetch(List dbRefs) { + return null; + } + + }, context); + operations = new ReactiveMongoTemplate(mongoDbFactory, converter); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(operations); + repository = factory.getRepository(ReactivePersonRepository.class); + } + + @AfterEach + void afterEach() { + mongo.close(); + } + + @Test // DATAMONGO-1444 + public void writeWithWriteConcerns() { + executeWithWriteConcerns((constantName, concern) -> { + + writeHeadline("WriteConcern: " + constantName); + System.out.println(String.format("Writing %s objects using plain driver took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects using template took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects using repository took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); + + System.out.println(String.format("Writing %s objects async using plain driver took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects async using template took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects async using repository took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); + writeFooter(); + }); + } + + @Test + public void plainConversion() throws InterruptedException { + + Statistics statistics = new Statistics( + "Plain conversion of " + NUMBER_OF_PERSONS * 100 + " persons - After %s iterations"); + + List dbObjects = getPersonDocuments(NUMBER_OF_PERSONS * 100); + + for (int i = 0; i < ITERATIONS; i++) { + statistics.registerTime(Api.DIRECT, Mode.READ, convertDirectly(dbObjects)); + statistics.registerTime(Api.CONVERTER, Mode.READ, convertUsingConverter(dbObjects)); + } + + statistics.printResults(ITERATIONS); + } + + private long convertDirectly(final List dbObjects) { + + executeWatched(() -> { + + List persons = new ArrayList(); + + for (Document dbObject : dbObjects) { + persons.add(Person.from(new Document(dbObject))); + } + + return persons; + }); + + return watch.getLastTaskTimeMillis(); + } + + private long convertUsingConverter(final List dbObjects) { + + executeWatched(() -> { + + List persons = new ArrayList(); + + for (Document dbObject : dbObjects) { + persons.add(converter.read(Person.class, dbObject)); + } + + return persons; + }); + + return watch.getLastTaskTimeMillis(); + } + + @Test // DATAMONGO-1444 + public void writeAndRead() throws Exception { + + readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.ACKNOWLEDGED); + } + + private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern concern) { + + Statistics statistics = new Statistics("Reading " + numberOfPersons + " - After %s iterations"); + + for (int i = 0; i < iterations; i++) { + + setupCollections(); + + statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons, concern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE, writingObjectsUsingMongoTemplate(numberOfPersons, concern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE, writingObjectsUsingRepositories(numberOfPersons, concern)); + + statistics.registerTime(Api.DRIVER, Mode.WRITE_ASYNC, + writingAsyncObjectsUsingPlainDriver(numberOfPersons, concern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE_ASYNC, + writingAsyncObjectsUsingMongoTemplate(numberOfPersons, concern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE_ASYNC, + writingAsyncObjectsUsingRepositories(numberOfPersons, concern)); + + statistics.registerTime(Api.DRIVER, Mode.READ, readingUsingPlainDriver()); + statistics.registerTime(Api.TEMPLATE, Mode.READ, readingUsingTemplate()); + statistics.registerTime(Api.REPOSITORY, Mode.READ, readingUsingRepository()); + + statistics.registerTime(Api.DRIVER, Mode.QUERY, queryUsingPlainDriver()); + statistics.registerTime(Api.TEMPLATE, Mode.QUERY, queryUsingTemplate()); + statistics.registerTime(Api.REPOSITORY, Mode.QUERY, queryUsingRepository()); + + if (i > 0 && i % (iterations / 10) == 0) { + statistics.printResults(i); + } + } + + statistics.printResults(iterations); + } + + private void writeHeadline(String headline) { + System.out.println(headline); + System.out.println(createUnderline(headline)); + } + + private void writeFooter() { + System.out.println(); + } + + private long queryUsingTemplate() { + executeWatched(() -> { + Query query = query(where("addresses.zipCode").regex(".*1.*")); + return operations.find(query, Person.class, "template").collectList().block(); + }); + + return watch.getLastTaskTimeMillis(); + } + + private long queryUsingRepository() { + executeWatched(() -> repository.findByAddressesZipCodeContaining("1").collectList().block()); + + return watch.getLastTaskTimeMillis(); + } + + private void executeWithWriteConcerns(WriteConcernCallback callback) { + + Constants constants = new Constants(WriteConcern.class); + + for (String constantName : constants.getNames(null)) { + + if (IGNORED_WRITE_CONCERNS.contains(constantName)) { + continue; + } + + WriteConcern writeConcern = (WriteConcern) constants.asObject(constantName); + + setupCollections(); + + callback.doWithWriteConcern(constantName, writeConcern); + } + } + + private void setupCollections() { + + MongoDatabase db = this.mongo.getDatabase(DATABASE_NAME); + + for (String collectionName : COLLECTION_NAMES) { + MongoCollection collection = db.getCollection(collectionName); + Mono.from(collection.drop()).block(); + Mono.from(db.createCollection(collectionName, getCreateCollectionOptions())).block(); + collection.createIndex(new BasicDBObject("firstname", -1)); + collection.createIndex(new BasicDBObject("lastname", -1)); + } + } + + private CreateCollectionOptions getCreateCollectionOptions() { + CreateCollectionOptions options = new CreateCollectionOptions(); + return options.sizeInBytes(COLLECTION_SIZE).capped(false); + } + + private long writingObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) { + + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver") + .withWriteConcern(concern); + List persons = getPersonObjects(numberOfPersons); + + executeWatched( + () -> persons.stream().map(it -> Mono.from(collection.insertOne(new Document(it.toDocument()))).block())); + + return watch.getLastTaskTimeMillis(); + } + + private long writingObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(concern); + executeWatched(() -> persons.stream().map(it -> repository.save(it).block())); + + return watch.getLastTaskTimeMillis(); + } + + private long writingObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + + executeWatched(() -> { + operations.setWriteConcern(concern); + return persons.stream().map(it -> operations.save(it, "template").block()); + }); + + return watch.getLastTaskTimeMillis(); + } + + private long writingAsyncObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) { + + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver") + .withWriteConcern(concern); + List persons = getPersonObjects(numberOfPersons); + + executeWatched(() -> Flux + .from(collection + .insertMany(persons.stream().map(person -> new Document(person.toDocument())).collect(Collectors.toList()))) + .then().block()); + + return watch.getLastTaskTimeMillis(); + } + + private long writingAsyncObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) { + + List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(concern); + executeWatched(() -> repository.saveAll(persons).then().block()); + + return watch.getLastTaskTimeMillis(); + } + + private long writingAsyncObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) { + + List persons = getPersonObjects(numberOfPersons); + + executeWatched(() -> { + operations.setWriteConcern(concern); + return Flux.from(operations.insertAll(persons)).then().block(); + }); + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingPlainDriver() { + + executeWatched(() -> Flux.from(mongo.getDatabase(DATABASE_NAME).getCollection("driver").find()).map(Person::from) + .collectList().block()); + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingTemplate() { + executeWatched(() -> operations.findAll(Person.class, "template").collectList().block()); + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingRepository() { + executeWatched(() -> repository.findAll().collectList().block()); + + return watch.getLastTaskTimeMillis(); + } + + private long queryUsingPlainDriver() { + + executeWatched(() -> { + + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver"); + + Document regex = new Document("$regex", Pattern.compile(".*1.*")); + Document query = new Document("addresses.zipCode", regex); + return Flux.from(collection.find(query)).map(Person::from).collectList().block(); + }); + + return watch.getLastTaskTimeMillis(); + } + + private List getPersonObjects(int numberOfPersons) { + + List result = new ArrayList(); + + for (int i = 0; i < numberOfPersons; i++) { + + List
                    addresses = new ArrayList
                    (); + + for (int a = 0; a < 5; a++) { + addresses.add(new Address("zip" + a, "city" + a)); + } + + Person person = new Person("Firstname" + i, "Lastname" + i, addresses); + + for (int o = 0; o < 10; o++) { + person.orders.add(new Order(LineItem.generate())); + } + + result.add(person); + } + + return result; + } + + private List getPersonDocuments(int numberOfPersons) { + + List dbObjects = new ArrayList(numberOfPersons); + + for (Person person : getPersonObjects(numberOfPersons)) { + dbObjects.add(person.toDocument()); + } + + return dbObjects; + } + + private T executeWatched(WatchCallback callback) { + + watch.start(); + + try { + return callback.doInWatch(); + } finally { + watch.stop(); + } + } + + static class Person { + + ObjectId id; + String firstname, lastname; + List
                    addresses; + Set orders; + + public Person(String firstname, String lastname, List
                    addresses) { + this.firstname = firstname; + this.lastname = lastname; + this.addresses = addresses; + this.orders = new HashSet(); + } + + public static Person from(Document source) { + + List addressesSource = (List) source.get("addresses"); + List
                    addresses = new ArrayList
                    (addressesSource.size()); + for (Object addressSource : addressesSource) { + addresses.add(Address.from((Document) addressSource)); + } + + List ordersSource = (List) source.get("orders"); + Set orders = new HashSet(ordersSource.size()); + for (Object orderSource : ordersSource) { + orders.add(Order.from((Document) orderSource)); + } + + Person person = new Person((String) source.get("firstname"), (String) source.get("lastname"), addresses); + person.orders.addAll(orders); + return person; + } + + public Document toDocument() { + + Document dbObject = new Document(); + dbObject.put("firstname", firstname); + dbObject.put("lastname", lastname); + dbObject.put("addresses", writeAll(addresses)); + dbObject.put("orders", writeAll(orders)); + return dbObject; + } + } + + static class Address implements Convertible { + + final String zipCode; + final String city; + final Set types; + + public Address(String zipCode, String city) { + this(zipCode, city, new HashSet(pickRandomNumerOfItemsFrom(Arrays.asList(AddressType.values())))); + } + + @PersistenceConstructor + public Address(String zipCode, String city, Set types) { + this.zipCode = zipCode; + this.city = city; + this.types = types; + } + + public static Address from(Document source) { + String zipCode = (String) source.get("zipCode"); + String city = (String) source.get("city"); + List types = (List) source.get("types"); + + return new Address(zipCode, city, new HashSet(fromList(types, AddressType.class))); + } + + public Document toDocument() { + Document dbObject = new Document(); + dbObject.put("zipCode", zipCode); + dbObject.put("city", city); + dbObject.put("types", toList(types)); + return dbObject; + } + } + + private static > List fromList(List source, Class type) { + + List result = new ArrayList(source.size()); + for (Object object : source) { + result.add(Enum.valueOf(type, object.toString())); + } + return result; + } + + private static > List toList(Collection enums) { + List result = new ArrayList<>(); + for (T element : enums) { + result.add(element.toString()); + } + + return result; + } + + static class Order implements Convertible { + + enum Status { + ORDERED, PAYED, SHIPPED + } + + Date createdAt; + List lineItems; + Status status; + + public Order(List lineItems, Date createdAt) { + this.lineItems = lineItems; + this.createdAt = createdAt; + this.status = Status.ORDERED; + } + + @PersistenceConstructor + public Order(List lineItems, Date createdAt, Status status) { + this.lineItems = lineItems; + this.createdAt = createdAt; + this.status = status; + } + + public static Order from(Document source) { + + List lineItemsSource = (List) source.get("lineItems"); + List lineItems = new ArrayList(lineItemsSource.size()); + for (Object lineItemSource : lineItemsSource) { + lineItems.add(LineItem.from((Document) lineItemSource)); + } + + Date date = (Date) source.get("createdAt"); + Status status = Status.valueOf((String) source.get("status")); + return new Order(lineItems, date, status); + } + + public Order(List lineItems) { + this(lineItems, new Date()); + } + + public Document toDocument() { + Document result = new Document(); + result.put("createdAt", createdAt); + result.put("lineItems", writeAll(lineItems)); + result.put("status", status.toString()); + return result; + } + } + + static class LineItem implements Convertible { + + String description; + double price; + int amount; + + public LineItem(String description, int amount, double price) { + this.description = description; + this.amount = amount; + this.price = price; + } + + public static List generate() { + + LineItem iPad = new LineItem("iPad", 1, 649); + LineItem iPhone = new LineItem("iPhone", 1, 499); + LineItem macBook = new LineItem("MacBook", 2, 1299); + + return pickRandomNumerOfItemsFrom(Arrays.asList(iPad, iPhone, macBook)); + } + + public static LineItem from(Document source) { + + String description = (String) source.get("description"); + double price = (Double) source.get("price"); + int amount = (Integer) source.get("amount"); + + return new LineItem(description, amount, price); + } + + public Document toDocument() { + + Document dbObject = new Document(); + dbObject.put("description", description); + dbObject.put("price", price); + dbObject.put("amount", amount); + return dbObject; + } + } + + private static List pickRandomNumerOfItemsFrom(List source) { + + Assert.isTrue(!source.isEmpty(), "Source must not be empty"); + + Random random = new Random(); + int numberOfItems = random.nextInt(source.size()); + numberOfItems = numberOfItems == 0 ? 1 : numberOfItems; + + List result = new ArrayList(numberOfItems); + while (result.size() < numberOfItems) { + int index = random.nextInt(source.size()); + T candidate = source.get(index); + if (!result.contains(candidate)) { + result.add(candidate); + } + } + + return result; + } + + enum AddressType { + SHIPPING, BILLING + } + + private interface WriteConcernCallback { + void doWithWriteConcern(String constantName, WriteConcern concern); + } + + private interface WatchCallback { + T doInWatch(); + } + + private interface ReactivePersonRepository extends ReactiveMongoRepository { + + Flux findByAddressesZipCodeContaining(String parameter); + } + + private interface Convertible { + + Document toDocument(); + } + + private static BasicDBList writeAll(Collection convertibles) { + BasicDBList result = new BasicDBList(); + for (Convertible convertible : convertibles) { + result.add(convertible.toDocument()); + } + return result; + } + + enum Api { + DRIVER, TEMPLATE, REPOSITORY, DIRECT, CONVERTER + } + + enum Mode { + WRITE, READ, QUERY, WRITE_ASYNC + } + + private static class Statistics { + + private final String headline; + private final Map times; + + public Statistics(String headline) { + + this.headline = headline; + this.times = new HashMap(); + + for (Mode mode : Mode.values()) { + times.put(mode, new ModeTimes(mode)); + } + } + + public void registerTime(Api api, Mode mode, double time) { + times.get(mode).add(api, time); + } + + public void printResults(int iterations) { + + String title = String.format(headline, iterations); + + System.out.println(title); + System.out.println(createUnderline(title)); + + StringBuilder builder = new StringBuilder(); + for (Mode mode : Mode.values()) { + String print = times.get(mode).print(); + if (!print.isEmpty()) { + builder.append(print).append('\n'); + } + } + + System.out.println(builder.toString()); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(times.size()); + + for (ModeTimes times : this.times.values()) { + builder.append(times.toString()); + } + + return builder.toString(); + } + } + + private static String createUnderline(String input) { + + StringBuilder builder = new StringBuilder(input.length()); + + for (int i = 0; i < input.length(); i++) { + builder.append("-"); + } + + return builder.toString(); + } + + static class ApiTimes { + + private static final String TIME_TEMPLATE = "%s %s time -\tAverage: %sms%s,%sMedian: %sms%s"; + + private static final DecimalFormat TIME_FORMAT; + private static final DecimalFormat DEVIATION_FORMAT; + + static { + + TIME_FORMAT = new DecimalFormat("0.00"); + + DEVIATION_FORMAT = new DecimalFormat("0.00"); + DEVIATION_FORMAT.setPositivePrefix("+"); + } + + private final Api api; + private final Mode mode; + private final List times; + + public ApiTimes(Api api, Mode mode) { + this.api = api; + this.mode = mode; + this.times = new ArrayList(); + } + + public void add(double time) { + this.times.add(time); + } + + public boolean hasTimes() { + return !times.isEmpty(); + } + + public double getAverage() { + + double result = 0; + + for (Double time : times) { + result += time; + } + + return result == 0.0 ? 0.0 : result / times.size(); + } + + public double getMedian() { + + if (times.isEmpty()) { + return 0.0; + } + + ArrayList list = new ArrayList(times); + Collections.sort(list); + + int size = list.size(); + + if (size % 2 == 0) { + return (list.get(size / 2 - 1) + list.get(size / 2)) / 2; + } else { + return list.get(size / 2); + } + } + + private double getDeviationFrom(double otherAverage) { + + double average = getAverage(); + return average * 100 / otherAverage - 100; + } + + private double getMediaDeviationFrom(double otherMedian) { + double median = getMedian(); + return median * 100 / otherMedian - 100; + } + + public String print() { + + if (times.isEmpty()) { + return ""; + } + + return basicPrint("", "\t\t", "") + '\n'; + } + + private String basicPrint(String extension, String middle, String foo) { + return String.format(TIME_TEMPLATE, api, mode, TIME_FORMAT.format(getAverage()), extension, middle, + TIME_FORMAT.format(getMedian()), foo); + } + + public String print(double referenceAverage, double referenceMedian) { + + if (times.isEmpty()) { + return ""; + } + + return basicPrint(String.format(" %s%%", DEVIATION_FORMAT.format(getDeviationFrom(referenceAverage))), "\t", + String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n'; + } + + @Override + public String toString() { + return times.isEmpty() ? "" + : String.format("%s, %s: %s", api, mode, StringUtils.collectionToCommaDelimitedString(times)) + '\n'; + } + } + + static class ModeTimes { + + private final Map times; + + public ModeTimes(Mode mode) { + + this.times = new HashMap(); + + for (Api api : Api.values()) { + this.times.put(api, new ApiTimes(api, mode)); + } + } + + public void add(Api api, double time) { + times.get(api).add(time); + } + + @SuppressWarnings("null") + public String print() { + + if (times.isEmpty()) { + return ""; + } + + Double previousTime = null; + Double previousMedian = null; + StringBuilder builder = new StringBuilder(); + + for (Api api : Api.values()) { + + ApiTimes apiTimes = times.get(api); + + if (!apiTimes.hasTimes()) { + continue; + } + + if (previousTime == null) { + builder.append(apiTimes.print()); + previousTime = apiTimes.getAverage(); + previousMedian = apiTimes.getMedian(); + } else { + builder.append(apiTimes.print(previousTime, previousMedian)); + } + } + + return builder.toString(); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(times.size()); + + for (ApiTimes times : this.times.values()) { + builder.append(times.toString()); + } + + return builder.toString(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index b924d81883..3f2e60f4c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,30 +16,33 @@ package org.springframework.data.mongodb.repository; import static java.util.Arrays.*; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assumptions.*; import static org.springframework.data.geo.Metrics.*; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.Date; import java.util.HashSet; import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; -import org.springframework.data.domain.Example; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Range; -import org.springframework.data.domain.Slice; -import org.springframework.data.domain.Sort; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.*; +import org.springframework.data.domain.ExampleMatcher.GenericPropertyMatcher; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; @@ -50,24 +53,37 @@ import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension.SampleSecurityContextHolder; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.DirtiesState; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.ProvidesState; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import org.springframework.data.querydsl.QSort; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.ReflectionTestUtils; /** * Base class for tests for {@link PersonRepository}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Fırat KÜÇÜK + * @author Edward Prentice */ -@RunWith(SpringJUnit4ClassRunner.class) -public abstract class AbstractPersonRepositoryIntegrationTests { +@ExtendWith({ SpringExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public abstract class AbstractPersonRepositoryIntegrationTests implements DirtiesStateExtension.StateFunctions { @Autowired protected PersonRepository repository; @@ -78,211 +94,268 @@ public abstract class AbstractPersonRepositoryIntegrationTests { List all; - @Before - public void setUp() throws InterruptedException { + public void clear() { + repository.deleteAll(); + } + + public void setupState() { repository.deleteAll(); dave = new Person("Dave", "Matthews", 42); oliver = new Person("Oliver August", "Matthews", 4); carter = new Person("Carter", "Beauford", 49); - Thread.sleep(10); + carter.setSkills(Arrays.asList("Drums", "percussion", "vocals")); + boyd = new Person("Boyd", "Tinsley", 45); + boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar")); stefan = new Person("Stefan", "Lessard", 34); leroi = new Person("Leroi", "Moore", 41); - alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); - person = new QPerson("person"); - all = repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + Arrays.asList(boyd, stefan, leroi, alicia).forEach(it -> { + it.createdAt = new Date(dave.createdAt.getTime() + 1000L); + }); + + List toSave = asList(oliver, dave, carter, boyd, stefan, leroi, alicia); + toSave.forEach(it -> it.setId(null)); + + all = repository.saveAll(toSave); } @Test - public void findsPersonById() throws Exception { + void findsPersonById() { - assertThat(repository.findOne(dave.getId().toString()), is(dave)); + assertThat(repository.findById(dave.getId())).contains(dave); } @Test - public void findsAllMusicians() throws Exception { + void findsAllMusicians() { List result = repository.findAll(); - assertThat(result.size(), is(all.size())); - assertThat(result.containsAll(all), is(true)); + assertThat(result).hasSameSizeAs(all).containsAll(all); } @Test - public void findsAllWithGivenIds() { + void findsAllWithGivenIds() { - Iterable result = repository.findAll(Arrays.asList(dave.id, boyd.id)); - assertThat(result, hasItems(dave, boyd)); - assertThat(result, not(hasItems(oliver, carter, stefan, leroi, alicia))); + Iterable result = repository.findAllById(asList(dave.id, boyd.id)); + assertThat(result).contains(dave, boyd).doesNotContain(oliver, carter, stefan, leroi, alicia); } @Test - public void deletesPersonCorrectly() throws Exception { + @DirtiesState + void deletesPersonCorrectly() { repository.delete(dave); List result = repository.findAll(); - assertThat(result.size(), is(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(all.size() - 1).doesNotContain(dave); } @Test - public void deletesPersonByIdCorrectly() { + @DirtiesState + void deletesPersonByIdCorrectly() { - repository.delete(dave.getId().toString()); + repository.deleteById(dave.getId()); List result = repository.findAll(); - assertThat(result.size(), is(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(all.size() - 1).doesNotContain(dave); } @Test - public void findsPersonsByLastname() throws Exception { + void findsPersonsByLastname() { List result = repository.findByLastname("Beauford"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(carter)); + assertThat(result).hasSize(1).contains(carter); } @Test - public void findsPersonsByFirstname() { + void findsPersonsByFirstname() { List result = repository.findByThePersonsFirstname("Leroi"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(leroi)); - assertThat(result.get(0).getAge(), is(nullValue())); + assertThat(result).hasSize(1).contains(leroi); + assertThat(result.get(0).getAge()).isNull(); } @Test - public void findsPersonsByFirstnameLike() throws Exception { + void findsPersonsByFirstnameLike() { List result = repository.findByFirstnameLike("Bo*"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(boyd)); + assertThat(result).hasSize(1).contains(boyd); + } + + @Test // DATAMONGO-1608 + void findByFirstnameLikeWithNull() { + + assertThatIllegalArgumentException().isThrownBy(() -> repository.findByFirstnameLike(null)); } @Test - public void findsPagedPersons() throws Exception { + void findsPagedPersons() { + + Page result = repository.findAll(PageRequest.of(1, 2, Direction.ASC, "lastname", "firstname")); + assertThat(result.isFirst()).isFalse(); + assertThat(result.isLast()).isFalse(); + assertThat(result).contains(dave, stefan); + } + + @Test // GH-4308 + void appliesScrollPositionCorrectly() { + + Window page = repository.findTop2ByLastnameLikeOrderByLastnameAscFirstnameAsc("*a*", + ScrollPosition.keyset()); + + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + assertThat(page).contains(carter); + } + + @Test // GH-4397 + void appliesLimitToScrollingCorrectly() { + + Window page = repository.findByLastnameLikeOrderByLastnameAscFirstnameAsc("*a*", + ScrollPosition.keyset(), Limit.of(2)); - Page result = repository.findAll(new PageRequest(1, 2, Direction.ASC, "lastname", "firstname")); - assertThat(result.isFirst(), is(false)); - assertThat(result.isLast(), is(false)); - assertThat(result, hasItems(dave, stefan)); + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + assertThat(page).contains(carter); + } + + @Test // GH-4308 + void appliesScrollPositionWithProjectionCorrectly() { + + Window page = repository.findCursorProjectionByLastnameLike("*a*", + PageRequest.of(0, 2, Sort.by(Direction.ASC, "lastname", "firstname"))); + + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + + assertThat(page).element(0).isEqualTo(new PersonSummaryDto(carter.getFirstname(), carter.getLastname())); } @Test - public void executesPagedFinderCorrectly() throws Exception { + void executesPagedFinderCorrectly() { Page page = repository.findByLastnameLike("*a*", - new PageRequest(0, 2, Direction.ASC, "lastname", "firstname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page, hasItems(carter, stefan)); + PageRequest.of(0, 2, Direction.ASC, "lastname", "firstname")); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page).contains(carter, stefan); + } + + @Test // GH-4397 + void executesFinderCorrectlyWithSortAndLimit() { + + List page = repository.findByLastnameLike("*a*", Sort.by(Direction.ASC, "lastname", "firstname"), Limit.of(2)); + + assertThat(page).containsExactly(carter, stefan); } @Test - public void executesPagedFinderWithAnnotatedQueryCorrectly() throws Exception { + void executesPagedFinderWithAnnotatedQueryCorrectly() { Page page = repository.findByLastnameLikeWithPageable(".*a.*", - new PageRequest(0, 2, Direction.ASC, "lastname", "firstname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page, hasItems(carter, stefan)); + PageRequest.of(0, 2, Direction.ASC, "lastname", "firstname")); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page).contains(carter, stefan); } @Test - public void findsPersonInAgeRangeCorrectly() throws Exception { + void findsPersonInAgeRangeCorrectly() { List result = repository.findByAgeBetween(40, 45); - assertThat(result.size(), is(2)); - assertThat(result, hasItems(dave, leroi)); + assertThat(result).hasSize(2).contains(dave, leroi); } @Test - public void findsPersonByShippingAddressesCorrectly() throws Exception { + void findsPersonByShippingAddressesCorrectly() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setShippingAddresses(new HashSet
                    (asList(address))); repository.save(dave); - assertThat(repository.findByShippingAddresses(address), is(dave)); + assertThat(repository.findByShippingAddresses(address)).isEqualTo(dave); } @Test - public void findsPersonByAddressCorrectly() throws Exception { + void findsPersonByAddressCorrectly() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); List result = repository.findByAddress(address); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByZipCode() throws Exception { + void findsPeopleByZipCode() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); List result = repository.findByAddressZipCode(address.getZipCode()); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByQueryDslLastnameSpec() throws Exception { + void findsPeopleByQueryDslLastnameSpec() { Iterable result = repository.findAll(person.lastname.eq("Matthews")); - assertThat(result, hasItem(dave)); - assertThat(result, not(hasItems(carter, boyd, stefan, leroi, alicia))); + assertThat(result).contains(dave).doesNotContain(carter, boyd, stefan, leroi, alicia); } @Test - public void findsPeopleByzipCodePredicate() throws Exception { + void findsPeopleByzipCodePredicate() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); Iterable result = repository.findAll(person.address.zipCode.eq("C0123")); - assertThat(result, hasItem(dave)); - assertThat(result, not(hasItems(carter, boyd, stefan, leroi, alicia))); + assertThat(result).contains(dave).doesNotContain(carter, boyd, stefan, leroi, alicia); } @Test - public void findsPeopleByLocationNear() { + void findsPeopleByLocationNear() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); List result = repository.findByLocationNear(point); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); + } + + @Test // DATAMONGO-1588 + void findsPeopleByLocationNearUsingGeoJsonType() { + + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave); + + List result = repository.findByLocationNear(point); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinCircle() { + void findsPeopleByLocationWithinCircle() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); List result = repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinBox() { + void findsPeopleByLocationWithinBox() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); @@ -290,12 +363,11 @@ public void findsPeopleByLocationWithinBox() { Box box = new Box(new Point(-78.99171, 35.738868), new Point(-68.99171, 45.738868)); List result = repository.findByLocationWithin(box); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinPolygon() { + void findsPeopleByLocationWithinPolygon() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -307,95 +379,79 @@ public void findsPeopleByLocationWithinPolygon() { Point fourth = new Point(-68.99171, 35.738868); List result = repository.findByLocationWithin(new Polygon(first, second, third, fourth)); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPagedPeopleByPredicate() throws Exception { + void findsPagedPeopleByPredicate() { Page page = repository.findAll(person.lastname.contains("a"), - new PageRequest(0, 2, Direction.ASC, "lastname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page, hasItems(carter, stefan)); + PageRequest.of(0, 2, Direction.ASC, "lastname")); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page).contains(carter, stefan); } - /** - * @see DATADOC-136 - */ - @Test - public void findsPeopleBySexCorrectly() { + @Test // DATADOC-136 + void findsPeopleBySexCorrectly() { List females = repository.findBySex(Sex.FEMALE); - assertThat(females.size(), is(1)); - assertThat(females.get(0), is(alicia)); + assertThat(females).hasSize(1); + assertThat(females.get(0)).isEqualTo(alicia); } - /** - * @see DATAMONGO-446 - */ - @Test - public void findsPeopleBySexPaginated() { + @Test // DATAMONGO-446 + void findsPeopleBySexPaginated() { - List males = repository.findBySex(Sex.MALE, new PageRequest(0, 2)); - assertThat(males.size(), is(2)); + List males = repository.findBySex(Sex.MALE, PageRequest.of(0, 2)); + assertThat(males).hasSize(2); } @Test - public void findsPeopleByNamedQuery() { + void findsPeopleByNamedQuery() { List result = repository.findByNamedQuery("Dave"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } - /** - * @see DATADOC-190 - */ - @Test - public void existsWorksCorrectly() { - assertThat(repository.exists(dave.getId()), is(true)); + @Test // DATADOC-190 + void existsWorksCorrectly() { + assertThat(repository.existsById(dave.getId())).isTrue(); } - @Test(expected = DuplicateKeyException.class) - public void rejectsDuplicateEmailAddressOnSave() { + @Test + void rejectsDuplicateEmailAddressOnSave() { - assertThat(dave.getEmail(), is("dave@dmband.com")); + assumeThat(repository.findById(dave.getId()).map(Person::getEmail)).contains("dave@dmband.com"); Person daveSyer = new Person("Dave", "Syer"); - assertThat(daveSyer.getEmail(), is("dave@dmband.com")); + assertThat(daveSyer.getEmail()).isEqualTo("dave@dmband.com"); - repository.save(daveSyer); + Assertions.assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> repository.save(daveSyer)); } - /** - * @see DATADOC-236 - */ - @Test - public void findsPeopleByLastnameAndOrdersCorrectly() { + @Test // DATADOC-236 + void findsPeopleByLastnameAndOrdersCorrectly() { List result = repository.findByLastnameOrderByFirstnameAsc("Matthews"); - assertThat(result.size(), is(2)); - assertThat(result.get(0), is(dave)); - assertThat(result.get(1), is(oliver)); + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(dave); + assertThat(result.get(1)).isEqualTo(oliver); } - /** - * @see DATADOC-236 - */ - @Test - public void appliesStaticAndDynamicSorting() { - List result = repository.findByFirstnameLikeOrderByLastnameAsc("*e*", new Sort("age")); - assertThat(result.size(), is(5)); - assertThat(result.get(0), is(carter)); - assertThat(result.get(1), is(stefan)); - assertThat(result.get(2), is(oliver)); - assertThat(result.get(3), is(dave)); - assertThat(result.get(4), is(leroi)); + @Test // DATADOC-236 + void appliesStaticAndDynamicSorting() { + List result = repository.findByFirstnameLikeOrderByLastnameAsc("*e*", Sort.by("age")); + assertThat(result).hasSize(5); + assertThat(result.get(0)).isEqualTo(carter); + assertThat(result.get(1)).isEqualTo(stefan); + assertThat(result.get(2)).isEqualTo(oliver); + assertThat(result.get(3)).isEqualTo(dave); + assertThat(result.get(4)).isEqualTo(leroi); } @Test - public void executesGeoNearQueryForResultsCorrectly() { + void executesGeoNearQueryForResultsCorrectly() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -403,49 +459,43 @@ public void executesGeoNearQueryForResultsCorrectly() { GeoResults results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)); - assertThat(results.getContent().isEmpty(), is(false)); + assertThat(results.getContent()).isNotEmpty(); } @Test - public void executesGeoPageQueryForResultsCorrectly() { + void executesGeoPageQueryForResultsCorrectly() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), - new Distance(2000, Metrics.KILOMETERS), new PageRequest(0, 20)); - assertThat(results.getContent().isEmpty(), is(false)); + new Distance(2000, Metrics.KILOMETERS), PageRequest.of(0, 20)); + assertThat(results.getContent()).isNotEmpty(); // DATAMONGO-607 - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } - /** - * @see DATAMONGO-323 - */ - @Test - public void considersSortForAnnotatedQuery() { + @Test // DATAMONGO-323 + void considersSortForAnnotatedQuery() { - List result = repository.findByAgeLessThan(60, new Sort("firstname")); + List result = repository.findByAgeLessThan(60, Sort.by("firstname")); - assertThat(result.size(), is(7)); - assertThat(result.get(0), is(alicia)); - assertThat(result.get(1), is(boyd)); - assertThat(result.get(2), is(carter)); - assertThat(result.get(3), is(dave)); - assertThat(result.get(4), is(leroi)); - assertThat(result.get(5), is(oliver)); - assertThat(result.get(6), is(stefan)); + assertThat(result).hasSize(7); + assertThat(result.get(0)).isEqualTo(alicia); + assertThat(result.get(1)).isEqualTo(boyd); + assertThat(result.get(2)).isEqualTo(carter); + assertThat(result.get(3)).isEqualTo(dave); + assertThat(result.get(4)).isEqualTo(leroi); + assertThat(result.get(5)).isEqualTo(oliver); + assertThat(result.get(6)).isEqualTo(stefan); } - /** - * @see DATAMONGO-347 - */ - @Test - public void executesQueryWithDBRefReferenceCorrectly() { + @Test // DATAMONGO-347 + void executesQueryWithDBRefReferenceCorrectly() { - operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); + operations.remove(new Query(), User.class); User user = new User(); user.username = "Oliver"; @@ -456,97 +506,65 @@ public void executesQueryWithDBRefReferenceCorrectly() { repository.save(dave); List result = repository.findByCreator(user); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } - /** - * @see DATAMONGO-425 - */ - @Test - public void bindsDateParameterForLessThanPredicateCorrectly() { + @Test // DATAMONGO-425 + void bindsDateParameterForLessThanPredicateCorrectly() { List result = repository.findByCreatedAtLessThan(boyd.createdAt); - assertThat(result.size(), is(3)); - assertThat(result, hasItems(dave, oliver, carter)); + assertThat(result).hasSize(3).contains(dave, oliver, carter); } - /** - * @see DATAMONGO-425 - */ - @Test - public void bindsDateParameterForGreaterThanPredicateCorrectly() { + @Test // DATAMONGO-425 + void bindsDateParameterForGreaterThanPredicateCorrectly() { List result = repository.findByCreatedAtGreaterThan(carter.createdAt); - assertThat(result.size(), is(4)); - assertThat(result, hasItems(boyd, stefan, leroi, alicia)); + assertThat(result).hasSize(4).contains(boyd, stefan, leroi, alicia); } - /** - * @see DATAMONGO-427 - */ - @Test - public void bindsDateParameterToBeforePredicateCorrectly() { + @Test // DATAMONGO-427 + void bindsDateParameterToBeforePredicateCorrectly() { List result = repository.findByCreatedAtBefore(boyd.createdAt); - assertThat(result.size(), is(3)); - assertThat(result, hasItems(dave, oliver, carter)); + assertThat(result).hasSize(3).contains(dave, oliver, carter); } - /** - * @see DATAMONGO-427 - */ - @Test - public void bindsDateParameterForAfterPredicateCorrectly() { + @Test // DATAMONGO-427 + void bindsDateParameterForAfterPredicateCorrectly() { List result = repository.findByCreatedAtAfter(carter.createdAt); - assertThat(result.size(), is(4)); - assertThat(result, hasItems(boyd, stefan, leroi, alicia)); + assertThat(result).hasSize(4).contains(boyd, stefan, leroi, alicia); } - /** - * @see DATAMONGO-425 - */ - @Test - public void bindsDateParameterForManuallyDefinedQueryCorrectly() { + @Test // DATAMONGO-425 + void bindsDateParameterForManuallyDefinedQueryCorrectly() { List result = repository.findByCreatedAtLessThanManually(boyd.createdAt); - assertThat(result.isEmpty(), is(false)); + assertThat(result).isNotEmpty(); } - /** - * @see DATAMONGO-472 - */ - @Test - public void findsPeopleUsingNotPredicate() { + @Test // DATAMONGO-472 + void findsPeopleUsingNotPredicate() { List result = repository.findByLastnameNot("Matthews"); - assertThat(result, not(hasItem(dave))); - assertThat(result, hasSize(5)); + assertThat(result).doesNotContain(dave).hasSize(5); } - /** - * @see DATAMONGO-521 - */ - @Test - public void executesAndQueryCorrectly() { + @Test // DATAMONGO-521 + void executesAndQueryCorrectly() { List result = repository.findByFirstnameAndLastname("Dave", "Matthews"); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); result = repository.findByFirstnameAndLastname("Oliver August", "Matthews"); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(oliver)); + assertThat(result).hasSize(1).contains(oliver); } - /** - * @see DATAMONGO-600 - */ - @Test - public void readsDocumentsWithNestedPolymorphismCorrectly() { + @Test // DATAMONGO-600 + void readsDocumentsWithNestedPolymorphismCorrectly() { UsernameAndPassword usernameAndPassword = new UsernameAndPassword(); usernameAndPassword.username = "dave"; @@ -557,61 +575,52 @@ public void readsDocumentsWithNestedPolymorphismCorrectly() { repository.save(dave); List result = repository.findByCredentials(usernameAndPassword); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } - /** - * @see DATAMONGO-636 - */ - @Test - public void executesDerivedCountProjection() { - assertThat(repository.countByLastname("Matthews"), is(2L)); + @Test // DATAMONGO-636 + void executesDerivedCountProjection() { + assertThat(repository.countByLastname("Matthews")).isEqualTo(2L); } - /** - * @see DATAMONGO-636 - */ - @Test - public void executesDerivedCountProjectionToInt() { - assertThat(repository.countByFirstname("Oliver August"), is(1)); + @Test // DATAMONGO-636 + void executesDerivedCountProjectionToInt() { + assertThat(repository.countByFirstname("Oliver August")).isEqualTo(1); } - /** - * @see DATAMONGO-636 - */ - @Test - public void executesAnnotatedCountProjection() { - assertThat(repository.someCountQuery("Matthews"), is(2L)); + @Test // DATAMONGO-636 + void executesAnnotatedCountProjection() { + assertThat(repository.someCountQuery("Matthews")).isEqualTo(2L); } - /** - * @see DATAMONGO-701 - */ - @Test - public void executesDerivedStartsWithQueryCorrectly() { + @Test // DATAMONGO-1454 + void executesDerivedExistsProjectionToBoolean() { + + assertThat(repository.existsByFirstname("Oliver August")).isTrue(); + assertThat(repository.existsByFirstname("Hans Peter")).isFalse(); + } + + @Test // DATAMONGO-1454 + void executesAnnotatedExistProjection() { + assertThat(repository.someExistQuery("Matthews")).isTrue(); + } + + @Test // DATAMONGO-701 + void executesDerivedStartsWithQueryCorrectly() { List result = repository.findByLastnameStartsWith("Matt"); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(dave, oliver)); + assertThat(result).hasSize(2).contains(dave, oliver); } - /** - * @see DATAMONGO-701 - */ - @Test - public void executesDerivedEndsWithQueryCorrectly() { + @Test // DATAMONGO-701 + void executesDerivedEndsWithQueryCorrectly() { List result = repository.findByLastnameEndsWith("thews"); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(dave, oliver)); + assertThat(result).hasSize(2).contains(dave, oliver); } - /** - * @see DATAMONGO-445 - */ - @Test - public void executesGeoPageQueryForWithPageRequestForPageInBetween() { + @Test // DATAMONGO-445 + void executesGeoPageQueryForWithPageRequestForPageInBetween() { Point farAway = new Point(-73.9, 40.7); Point here = new Point(-73.99, 40.73); @@ -622,24 +631,21 @@ public void executesGeoPageQueryForWithPageRequestForPageInBetween() { boyd.setLocation(here); leroi.setLocation(here); - repository.save(Arrays.asList(dave, oliver, carter, boyd, leroi)); + repository.saveAll(Arrays.asList(dave, oliver, carter, boyd, leroi)); GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), - new Distance(2000, Metrics.KILOMETERS), new PageRequest(1, 2)); + new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(2)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(false)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(results.getAverageDistance().getNormalizedValue(), is(0.0)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(2); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isFalse(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(results.getAverageDistance().getNormalizedValue()).isEqualTo(0.0); } - /** - * @see DATAMONGO-445 - */ - @Test - public void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { + @Test // DATAMONGO-445 + void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { Point point = new Point(-73.99171, 40.738868); @@ -647,141 +653,116 @@ public void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { oliver.setLocation(point); carter.setLocation(point); - repository.save(Arrays.asList(dave, oliver, carter)); + repository.saveAll(Arrays.asList(dave, oliver, carter)); GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), - new Distance(2000, Metrics.KILOMETERS), new PageRequest(1, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(1)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(1); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } - /** - * @see DATAMONGO-445 - */ - @Test - public void executesGeoPageQueryForWithPageRequestForJustOneElement() { + @Test // DATAMONGO-445 + void executesGeoPageQueryForWithPageRequestForJustOneElement() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), - new Distance(2000, Metrics.KILOMETERS), new PageRequest(0, 2)); + new Distance(2000, Metrics.KILOMETERS), PageRequest.of(0, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(1)); - assertThat(results.isFirst(), is(true)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(1); + assertThat(results.isFirst()).isTrue(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } - /** - * @see DATAMONGO-445 - */ - @Test - public void executesGeoPageQueryForWithPageRequestForJustOneElementEmptyPage() { + @Test // DATAMONGO-445 + void executesGeoPageQueryForWithPageRequestForJustOneElementEmptyPage() { dave.setLocation(new Point(-73.99171, 40.738868)); repository.save(dave); GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), - new Distance(2000, Metrics.KILOMETERS), new PageRequest(1, 2)); + new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); - assertThat(results.getContent().isEmpty(), is(true)); - assertThat(results.getNumberOfElements(), is(0)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getContent()).isEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(0); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } - /** - * @see DATAMONGO-770 - */ - @Test - public void findByFirstNameIgnoreCase() { + @Test // DATAMONGO-1608 + void findByFirstNameIgnoreCaseWithNull() { + + assertThatIllegalArgumentException().isThrownBy(() -> repository.findByFirstnameIgnoreCase(null)); + } + + @Test // DATAMONGO-770 + void findByFirstNameIgnoreCase() { List result = repository.findByFirstnameIgnoreCase("dave"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-770 - */ - @Test - public void findByFirstnameNotIgnoreCase() { + @Test // DATAMONGO-770 + void findByFirstnameNotIgnoreCase() { List result = repository.findByFirstnameNotIgnoreCase("dave"); - assertThat(result.size(), is(6)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(6).doesNotContain(dave); } - /** - * @see DATAMONGO-770 - */ - @Test - public void findByFirstnameStartingWithIgnoreCase() { + @Test // DATAMONGO-770 + void findByFirstnameStartingWithIgnoreCase() { List result = repository.findByFirstnameStartingWithIgnoreCase("da"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-770 - */ - @Test - public void findByFirstnameEndingWithIgnoreCase() { + @Test // DATAMONGO-770 + void findByFirstnameEndingWithIgnoreCase() { List result = repository.findByFirstnameEndingWithIgnoreCase("VE"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-770 - */ - @Test - public void findByFirstnameContainingIgnoreCase() { + @Test // DATAMONGO-770 + void findByFirstnameContainingIgnoreCase() { List result = repository.findByFirstnameContainingIgnoreCase("AV"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-870 - */ - @Test - public void findsSliceOfPersons() { + @Test // DATAMONGO-870 + void findsSliceOfPersons() { - Slice result = repository.findByAgeGreaterThan(40, new PageRequest(0, 2, Direction.DESC, "firstname")); + Slice result = repository.findByAgeGreaterThan(40, PageRequest.of(0, 2, Direction.DESC, "firstname")); - assertThat(result.hasNext(), is(true)); + assertThat(result.hasNext()).isTrue(); } - /** - * @see DATAMONGO-871 - */ - @Test - public void findsPersonsByFirstnameAsArray() { + @Test // DATAMONGO-871 + void findsPersonsByFirstnameAsArray() { Person[] result = repository.findByThePersonsFirstnameAsArray("Leroi"); - assertThat(result, is(arrayWithSize(1))); - assertThat(result, is(arrayContaining(leroi))); + assertThat(result).hasSize(1).containsExactly(leroi); } - /** - * @see DATAMONGO-821 - */ - @Test - public void findUsingAnnotatedQueryOnDBRef() { + @Test // DATAMONGO-821 + @DirtiesState + void findUsingAnnotatedQueryOnDBRef() { operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); @@ -792,95 +773,83 @@ public void findUsingAnnotatedQueryOnDBRef() { alicia.creator = user; repository.save(alicia); - Page result = repository.findByHavingCreator(new PageRequest(0, 100)); + Page result = repository.findByHavingCreator(PageRequest.of(0, 100)); - assertThat(result.getNumberOfElements(), is(1)); - assertThat(result.getContent().get(0), is(alicia)); + assertThat(result.getNumberOfElements()).isEqualTo(1); + assertThat(result.getContent().get(0)).isEqualTo(alicia); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { + @Test // DATAMONGO-566 + @DirtiesState + void deleteByShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { List result = repository.deleteByLastname("Beauford"); - assertThat(result, hasItem(carter)); - assertThat(result, hasSize(1)); + assertThat(result).contains(carter).hasSize(1); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByShouldRemoveElementsMatchingDerivedQuery() { + @Test // DATAMONGO-566 + @DirtiesState + void deleteByShouldRemoveElementsMatchingDerivedQuery() { repository.deleteByLastname("Beauford"); - assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class), is(0L)); + assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class)).isEqualTo(0L); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { - assertThat(repository.deletePersonByLastname("Beauford"), is(1L)); + @Test // DATAMONGO-566 + @DirtiesState + void deleteByShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { + assertThat(repository.deletePersonByLastname("Beauford")).isEqualTo(1L); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByShouldReturnZeroInCaseNoDocumentHasBeenRemovedAndReturnTypeIsNumber() { - assertThat(repository.deletePersonByLastname("dorfuaeB"), is(0L)); + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldResultWrappedInOptionalCorrectly() { + + assertThat(repository.deleteOptionalByLastname("Beauford")).isPresent(); + assertThat(repository.deleteOptionalByLastname("dorfuaeB")).isNotPresent(); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByShouldReturnEmptyListInCaseNoDocumentHasBeenRemovedAndReturnTypeIsCollectionLike() { - assertThat(repository.deleteByLastname("dorfuaeB"), empty()); + @Test // DATAMONGO-566 + @DirtiesState + void deleteByShouldReturnZeroInCaseNoDocumentHasBeenRemovedAndReturnTypeIsNumber() { + assertThat(repository.deletePersonByLastname("dorfuaeB")).isEqualTo(0L); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByUsingAnnotatedQueryShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { + @Test // DATAMONGO-566 + @DirtiesState + void deleteByShouldReturnEmptyListInCaseNoDocumentHasBeenRemovedAndReturnTypeIsCollectionLike() { + assertThat(repository.deleteByLastname("dorfuaeB")).isEmpty(); + } + + @Test // DATAMONGO-566 + @DirtiesState + void deleteByUsingAnnotatedQueryShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { List result = repository.removeByLastnameUsingAnnotatedQuery("Beauford"); - assertThat(result, hasItem(carter)); - assertThat(result, hasSize(1)); + assertThat(result).contains(carter).hasSize(1); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByUsingAnnotatedQueryShouldRemoveElementsMatchingDerivedQuery() { + @Test // DATAMONGO-566 + @DirtiesState + void deleteByUsingAnnotatedQueryShouldRemoveElementsMatchingDerivedQuery() { repository.removeByLastnameUsingAnnotatedQuery("Beauford"); - assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class), is(0L)); + assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class)).isEqualTo(0L); } - /** - * @see DATAMONGO-566 - */ - @Test - public void deleteByUsingAnnotatedQueryShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { - assertThat(repository.removePersonByLastnameUsingAnnotatedQuery("Beauford"), is(1L)); + @Test // DATAMONGO-566 + @DirtiesState + void deleteByUsingAnnotatedQueryShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { + assertThat(repository.removePersonByLastnameUsingAnnotatedQuery("Beauford")).isEqualTo(1L); } - /** - * @see DATAMONGO-893 - */ - @Test - public void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { + @Test // DATAMONGO-893 + @ProvidesState + void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { Person p = new Person("Mary", "Poppins"); Address adr = new Address("some", "2", "where"); @@ -888,16 +857,14 @@ public void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { repository.save(p); - Page result = repository.findByAddressIn(Arrays.asList(adr), new PageRequest(0, 10)); + Page result = repository.findByAddressIn(Arrays.asList(adr), PageRequest.of(0, 10)); - assertThat(result.getContent(), hasSize(1)); + assertThat(result.getContent()).hasSize(1); } - /** - * @see DATAMONGO-745 - */ - @Test - public void findByCustomQueryFirstnamesInListAndLastname() { + @Test // DATAMONGO-745 + @ProvidesState + void findByCustomQueryFirstnamesInListAndLastname() { repository.save(new Person("foo", "bar")); repository.save(new Person("bar", "bar")); @@ -905,18 +872,16 @@ public void findByCustomQueryFirstnamesInListAndLastname() { repository.save(new Person("notfound", "bar")); Page result = repository.findByCustomQueryFirstnamesAndLastname(Arrays.asList("bar", "foo", "fuu"), "bar", - new PageRequest(0, 2)); + PageRequest.of(0, 2)); - assertThat(result.getContent(), hasSize(2)); - assertThat(result.getTotalPages(), is(2)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalPages()).isEqualTo(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } - /** - * @see DATAMONGO-745 - */ - @Test - public void findByCustomQueryLastnameAndStreetInList() { + @Test // DATAMONGO-745 + @ProvidesState + void findByCustomQueryLastnameAndStreetInList() { repository.save(new Person("foo", "bar").withAddress(new Address("street1", "1", "SB"))); repository.save(new Person("bar", "bar").withAddress(new Address("street2", "1", "SB"))); @@ -924,83 +889,70 @@ public void findByCustomQueryLastnameAndStreetInList() { repository.save(new Person("notfound", "notfound")); Page result = repository.findByCustomQueryLastnameAndAddressStreetInList("bar", - Arrays.asList("street1", "street2"), new PageRequest(0, 2)); + Arrays.asList("street1", "street2"), PageRequest.of(0, 2)); - assertThat(result.getContent(), hasSize(2)); - assertThat(result.getTotalPages(), is(2)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalPages()).isEqualTo(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } - /** - * @see DATAMONGO-950 - */ - @Test - public void shouldLimitCollectionQueryToMaxResultsWhenPresent() { + @Test // DATAMONGO-950 + @ProvidesState + void shouldLimitCollectionQueryToMaxResultsWhenPresent() { - repository.save(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), + repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); List result = repository.findTop3ByLastnameStartingWith("Dylan"); - assertThat(result.size(), is(3)); + assertThat(result).hasSize(3); } - /** - * @see DATAMONGO-950 - */ - @Test - public void shouldNotLimitPagedQueryWhenPageRequestWithinBounds() { + @Test // DATAMONGO-950, DATAMONGO-1464 + @ProvidesState + void shouldNotLimitPagedQueryWhenPageRequestWithinBounds() { - repository.save(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), + repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); - Page result = repository.findTop3ByLastnameStartingWith("Dylan", new PageRequest(0, 2)); - assertThat(result.getContent().size(), is(2)); + Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(0, 2)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } - /** - * @see DATAMONGO-950 - */ - @Test - public void shouldLimitPagedQueryWhenPageRequestExceedsUpperBoundary() { + @Test // DATAMONGO-950 + @ProvidesState + void shouldLimitPagedQueryWhenPageRequestExceedsUpperBoundary() { - repository.save(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), + repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); - Page result = repository.findTop3ByLastnameStartingWith("Dylan", new PageRequest(1, 2)); - assertThat(result.getContent().size(), is(1)); + Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(1, 2)); + assertThat(result.getContent()).hasSize(1); } - /** - * @see DATAMONGO-950 - */ - @Test - public void shouldReturnEmptyWhenPageRequestedPageIsTotallyOutOfScopeForLimit() { + @Test // DATAMONGO-950, DATAMONGO-1464 + @ProvidesState + void shouldReturnEmptyWhenPageRequestedPageIsTotallyOutOfScopeForLimit() { - repository.save(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), + repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); - Page result = repository.findTop3ByLastnameStartingWith("Dylan", new PageRequest(2, 2)); - assertThat(result.getContent().size(), is(0)); + Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(100, 2)); + assertThat(result.getContent()).isEmpty(); + assertThat(result.getTotalElements()).isEqualTo(3L); } - /** - * @see DATAMONGO-996, DATAMONGO-950 - */ - @Test - public void gettingNonFirstPageWorksWithoutLimitBeingSet() { + @Test // DATAMONGO-996, DATAMONGO-950, DATAMONGO-1464 + void gettingNonFirstPageWorksWithoutLimitBeingSet() { - Page slice = repository.findByLastnameLike("Matthews", new PageRequest(1, 1)); + Page slice = repository.findByLastnameLike("Matthews", PageRequest.of(1, 1)); - assertThat(slice.getContent(), hasSize(1)); - assertThat(slice.hasPrevious(), is(true)); - assertThat(slice.hasNext(), is(false)); + assertThat(slice.getContent()).hasSize(1); + assertThat(slice.hasPrevious()).isTrue(); + assertThat(slice.hasNext()).isFalse(); + assertThat(slice.getTotalElements()).isEqualTo(2L); } - /** - * Ignored for now as this requires Querydsl 3.4.1 to succeed. - * - * @see DATAMONGO-972 - */ - @Test - @Ignore - public void shouldExecuteFindOnDbRefCorrectly() { + @Test // DATAMONGO-972 + @DirtiesState + void shouldExecuteFindOnDbRefCorrectly() { operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); @@ -1012,37 +964,42 @@ public void shouldExecuteFindOnDbRefCorrectly() { dave.setCreator(user); operations.save(dave); - assertThat(repository.findOne(QPerson.person.creator.eq(user)), is(dave)); + assertThat(repository.findOne(QPerson.person.creator.eq(user)).get()).isEqualTo(dave); } - /** - * @see DATAMONGO-969 - */ - @Test - public void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { - assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))), containsInAnyOrder(dave, carter)); + @Test // DATAMONGO-969 + void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id)))).contains(dave, carter); } - /** - * @see DATAMONGO-1030 - */ - @Test - public void executesSingleEntityQueryWithProjectionCorrectly() { + @Test // DATAMONGO-969 + void shouldScrollPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + + Window scroll = repository.findBy(person.id.in(asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())); + + assertThat(scroll).containsExactly(boyd, carter); - PersonSummary result = repository.findSummaryByLastname("Beauford"); + ScrollPosition resumeFrom = scroll.positionAt(scroll.size() - 1); + scroll = repository.findBy(person.id.in(asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(resumeFrom)); - assertThat(result, is(notNullValue())); - assertThat(result.firstname, is("Carter")); - assertThat(result.lastname, is("Beauford")); + assertThat(scroll).containsOnly(dave); } - /** - * @see DATAMONGO-1057 - */ - @Test - public void sliceShouldTraverseElementsWithoutSkippingOnes() { + @Test // DATAMONGO-1030 + void executesSingleEntityQueryWithProjectionCorrectly() { - repository.deleteAll(); + PersonSummaryDto result = repository.findSummaryByLastname("Beauford"); + + assertThat(result).isNotNull(); + assertThat(result.firstname).isEqualTo("Carter"); + assertThat(result.lastname).isEqualTo("Beauford"); + } + + @Test // DATAMONGO-1057 + @ProvidesState + void sliceShouldTraverseElementsWithoutSkippingOnes() { List persons = new ArrayList(100); for (int i = 0; i < 100; i++) { @@ -1050,43 +1007,34 @@ public void sliceShouldTraverseElementsWithoutSkippingOnes() { persons.add(new Person(String.format("%03d", i), "ln" + 1, 100)); } - repository.save(persons); + operations.bulkOps(BulkMode.UNORDERED, Person.class).insert(persons).execute(); - Slice slice = repository.findByAgeGreaterThan(50, new PageRequest(0, 20, Direction.ASC, "firstname")); - assertThat(slice, contains(persons.subList(0, 20).toArray())); + Slice slice = repository.findByAgeGreaterThan(50, PageRequest.of(0, 20, Direction.ASC, "firstname")); + assertThat(slice).containsExactlyElementsOf(persons.subList(0, 20)); slice = repository.findByAgeGreaterThan(50, slice.nextPageable()); - assertThat(slice, contains(persons.subList(20, 40).toArray())); + assertThat(slice).containsExactlyElementsOf(persons.subList(20, 40)); } - /** - * @see DATAMONGO-1072 - */ - @Test - public void shouldBindPlaceholdersUsedAsKeysCorrectly() { + @Test // DATAMONGO-1072 + void shouldBindPlaceholdersUsedAsKeysCorrectly() { List persons = repository.findByKeyValue("firstname", alicia.getFirstname()); - assertThat(persons, hasSize(1)); - assertThat(persons, hasItem(alicia)); + assertThat(persons).hasSize(1).contains(alicia); } - /** - * @see DATAMONGO-1105 - */ - @Test - public void returnsOrderedResultsForQuerydslOrderSpecifier() { + @Test // DATAMONGO-1105 + void returnsOrderedResultsForQuerydslOrderSpecifier() { Iterable result = repository.findAll(person.firstname.asc()); - assertThat(result, contains(alicia, boyd, carter, dave, leroi, oliver, stefan)); + assertThat(result).containsExactly(alicia, boyd, carter, dave, leroi, oliver, stefan); } - /** - * @see DATAMONGO-1085 - */ - @Test - public void shouldSupportSortingByQueryDslOrderSpecifier() { + @Test // DATAMONGO-1085 + @ProvidesState + void shouldSupportSortingByQueryDslOrderSpecifier() { repository.deleteAll(); @@ -1098,23 +1046,19 @@ public void shouldSupportSortingByQueryDslOrderSpecifier() { persons.add(person); } - repository.save(persons); + repository.saveAll(persons); QPerson person = QPerson.person; Iterable result = repository.findAll(person.firstname.isNotNull(), person.address.street.desc()); - assertThat(result, is(Matchers. iterableWithSize(persons.size()))); - assertThat(result.iterator().next().getFirstname(), is(persons.get(2).getFirstname())); + assertThat(result).hasSize(persons.size()); + assertThat(result.iterator().next().getFirstname()).isEqualTo(persons.get(2).getFirstname()); } - /** - * @see DATAMONGO-1085 - */ - @Test - public void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() throws Exception { - - repository.deleteAll(); + @Test // DATAMONGO-1085 + @ProvidesState + void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() { List persons = new ArrayList(); @@ -1124,22 +1068,18 @@ public void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() throws Excep persons.add(person); } - repository.save(persons); + repository.saveAll(persons); - PageRequest pageRequest = new PageRequest(0, 2, new QSort(person.address.street.desc())); + PageRequest pageRequest = PageRequest.of(0, 2, new QSort(person.address.street.desc())); Iterable result = repository.findAll(pageRequest); - assertThat(result, is(Matchers. iterableWithSize(2))); - assertThat(result.iterator().next().getFirstname(), is("Siggi 2")); + assertThat(result).hasSize(2); + assertThat(result.iterator().next().getFirstname()).isEqualTo("Siggi 2"); } - /** - * @see DATAMONGO-1085 - */ - @Test - public void shouldSupportSortingWithQSort() throws Exception { - - repository.deleteAll(); + @Test // DATAMONGO-1085 + @ProvidesState + void shouldSupportSortingWithQSort() { List persons = new ArrayList(); @@ -1149,34 +1089,29 @@ public void shouldSupportSortingWithQSort() throws Exception { persons.add(person); } - repository.save(persons); + repository.saveAll(persons); Iterable result = repository.findAll(new QSort(person.address.street.desc())); - assertThat(result, is(Matchers. iterableWithSize(persons.size()))); - assertThat(result.iterator().next().getFirstname(), is("Siggi 2")); + assertThat(result).hasSize(persons.size()); + assertThat(result.iterator().next().getFirstname()).isEqualTo("Siggi 2"); } - /** - * @see DATAMONGO-1165 - */ - @Test - public void shouldAllowReturningJava8StreamInCustomQuery() throws Exception { + @Test // DATAMONGO-1165 + void shouldAllowReturningJava8StreamInCustomQuery() { Stream result = repository.findByCustomQueryWithStreamingCursorByFirstnames(Arrays.asList("Dave")); try { - assertThat(result.collect(Collectors. toList()), hasItems(dave)); + assertThat(result.collect(Collectors. toList())).contains(dave); } finally { result.close(); } } - /** - * @see DATAMONGO-1110 - */ - @Test - public void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() { + @Test // DATAMONGO-1110 + @DirtiesState + void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -1185,51 +1120,51 @@ public void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() Range range = Distance.between(new Distance(0.01, KILOMETERS), new Distance(2000, KILOMETERS)); GeoResults results = repository.findPersonByLocationNear(new Point(-73.99, 40.73), range); - assertThat(results.getContent().isEmpty(), is(false)); + assertThat(results.getContent()).isNotEmpty(); } - /** - * @see DATAMONGO-990 - */ - @Test - public void shouldFindByFirstnameForSpELExpressionWithParameterIndexOnly() { + @Test // DATAMONGO-990 + void shouldFindByFirstnameForSpELExpressionWithParameterIndexOnly() { List users = repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-990 - */ - @Test - public void shouldFindByFirstnameAndCurrentUserWithCustomQuery() { + @Test // DATAMONGO-990 + void shouldFindByFirstnameAndCurrentUserWithCustomQuery() { SampleSecurityContextHolder.getCurrent().setPrincipal(dave); List users = repository.findWithSpelByFirstnameAndCurrentUserWithCustomQuery("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-990 - */ - @Test - public void shouldFindByFirstnameForSpELExpressionWithParameterVariableOnly() { + @Test // DATAMONGO-990 + void shouldFindByFirstnameForSpELExpressionWithParameterVariableOnly() { List users = repository.findWithSpelByFirstnameForSpELExpressionWithParameterVariableOnly("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldResolveStuffCorrectly() { + @Test // DATAMONGO-1911 + @DirtiesState + void findByUUIDShouldReturnCorrectResult() { + + dave.setUniqueId(UUID.randomUUID()); + repository.save(dave); + + Person dave = repository.findByUniqueId(this.dave.getUniqueId()); + + assertThat(dave).isEqualTo(dave); + } + + @Test // DATAMONGO-1245 + void findByExampleShouldResolveStuffCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -1239,15 +1174,12 @@ public void findByExampleShouldResolveStuffCorrectly() { ReflectionTestUtils.setField(sample, "createdAt", null); ReflectionTestUtils.setField(sample, "email", null); - Page result = repository.findAll(Example.of(sample), new PageRequest(0, 10)); - assertThat(result.getNumberOfElements(), is(2)); + Page result = repository.findAll(Example.of(sample), PageRequest.of(0, 10)); + assertThat(result.getNumberOfElements()).isEqualTo(2); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldResolveStuffCorrectly() { + @Test // DATAMONGO-1245 + void findAllByExampleShouldResolveStuffCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -1258,7 +1190,498 @@ public void findAllByExampleShouldResolveStuffCorrectly() { ReflectionTestUtils.setField(sample, "email", null); List result = repository.findAll(Example.of(sample)); - assertThat(result.size(), is(2)); + assertThat(result).hasSize(2); + } + + @Test // GH-4308 + void scrollByExampleShouldReturnCorrectResult() { + + Person sample = new Person(); + sample.setLastname("M"); + + // needed to tweak stuff a bit since some field are automatically set - so we need to undo this + ReflectionTestUtils.setField(sample, "id", null); + ReflectionTestUtils.setField(sample, "createdAt", null); + ReflectionTestUtils.setField(sample, "email", null); + + Window result = repository.findBy( + Example.of(sample, ExampleMatcher.matching().withMatcher("lastname", GenericPropertyMatcher::startsWith)), + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())); + + assertThat(result).containsOnly(dave, leroi); + assertThat(result.hasNext()).isTrue(); + + ScrollPosition position = result.positionAt(result.size() - 1); + result = repository.findBy( + Example.of(sample, ExampleMatcher.matching().withMatcher("lastname", GenericPropertyMatcher::startsWith)), + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(position)); + + assertThat(result).containsOnly(oliver); + assertThat(result.hasNext()).isFalse(); + } + + @Test // DATAMONGO-1425 + void findsPersonsByFirstnameNotContains() { + + List result = repository.findByFirstnameNotContains("Boyd"); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(boyd); + } + + @Test // DATAMONGO-1425 + void findBySkillsContains() { + + List result = repository.findBySkillsContains(asList("Drums")); + assertThat(result).hasSize(1).contains(carter); + } + + @Test // DATAMONGO-1425 + void findBySkillsNotContains() { + + List result = repository.findBySkillsNotContains(Arrays.asList("Drums")); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(carter); + } + + @Test // DATAMONGO-1424 + void findsPersonsByFirstnameNotLike() { + + List result = repository.findByFirstnameNotLike("Bo*"); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(boyd); + } + + @Test // DATAMONGO-1539 + void countsPersonsByFirstname() { + assertThat(repository.countByThePersonsFirstname("Dave")).isEqualTo(1L); + } + + @Test // DATAMONGO-1539 + @DirtiesState + void deletesPersonsByFirstname() { + + repository.deleteByThePersonsFirstname("Dave"); + + assertThat(repository.countByThePersonsFirstname("Dave")).isEqualTo(0L); + } + + @Test // DATAMONGO-1752 + void readsOpenProjection() { + assertThat(repository.findOpenProjectionBy()).isNotEmpty(); + } + + @Test // DATAMONGO-1752 + void readsClosedProjection() { + assertThat(repository.findClosedProjectionBy()).isNotEmpty(); + } + + @Test // DATAMONGO-1865 + void findFirstEntityReturnsFirstResultEvenForNonUniqueMatches() { + assertThat(repository.findFirstBy()).isNotNull(); + } + + @Test // DATAMONGO-1865 + void findSingleEntityThrowsErrorWhenNotUnique() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findPersonByLastnameLike(dave.getLastname())); + } + + @Test // DATAMONGO-1865 + void findOptionalSingleEntityThrowsErrorWhenNotUnique() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findOptionalPersonByLastnameLike(dave.getLastname())); + } + + @Test // DATAMONGO-1979 + void findAppliesAnnotatedSort() { + assertThat(repository.findByAgeGreaterThan(40)).containsExactly(carter, boyd, dave, leroi); + } + + @Test // DATAMONGO-1979 + void findWithSortOverwritesAnnotatedSort() { + assertThat(repository.findByAgeGreaterThan(40, Sort.by(Direction.ASC, "age"))).containsExactly(leroi, dave, boyd, + carter); + } + + @Test // DATAMONGO-2003 + void findByRegexWithPattern() { + assertThat(repository.findByFirstnameRegex(Pattern.compile(alicia.getFirstname()))).hasSize(1); + } + + @Test // DATAMONGO-2003 + void findByRegexWithPatternAndOptions() { + + String fn = alicia.getFirstname().toUpperCase(); + + assertThat(repository.findByFirstnameRegex(Pattern.compile(fn))).hasSize(0); + assertThat(repository.findByFirstnameRegex(Pattern.compile(fn, Pattern.CASE_INSENSITIVE))).hasSize(1); + } + + @Test // DATAMONGO-2149 + @DirtiesState + void annotatedQueryShouldAllowSliceInFieldsProjectionWithDbRef() { + + operations.remove(new Query(), User.class); + + List users = IntStream.range(0, 10).mapToObj(it -> { + + User user = new User(); + user.id = "id" + it; + user.username = "user" + it; + + return user; + }).collect(Collectors.toList()); + + users.forEach(operations::save); + + alicia.fans = new ArrayList<>(users); + operations.save(alicia); + + Person target = repository.findWithSliceInProjection(alicia.getId(), 0, 5); + assertThat(target.getFans()).hasSize(5); + } + + @Test // DATAMONGO-2149 + @DirtiesState + void annotatedQueryShouldAllowPositionalParameterInFieldsProjection() { + + Set
                    addressList = IntStream.range(0, 10).mapToObj(it -> new Address("street-" + it, "zip", "lnz")) + .collect(Collectors.toSet()); + + alicia.setShippingAddresses(addressList); + operations.save(alicia); + + Person target = repository.findWithArrayPositionInProjection(1); + + assertThat(target).isNotNull(); + assertThat(target.getShippingAddresses()).hasSize(1); } + @Test // DATAMONGO-2149, DATAMONGO-2154, DATAMONGO-2199 + @DirtiesState + void annotatedQueryShouldAllowPositionalParameterInFieldsProjectionWithDbRef() { + + List userList = IntStream.range(0, 10).mapToObj(it -> { + + User user = new User(); + user.id = "" + it; + user.username = "user" + it; + + return user; + }).collect(Collectors.toList()); + + userList.forEach(operations::save); + + alicia.setFans(userList); + operations.save(alicia); + + Person target = repository.findWithArrayPositionInProjectionWithDbRef(1); + + assertThat(target).isNotNull(); + assertThat(target.getFans()).hasSize(1); + } + + @Test // DATAMONGO-2153 + void findListOfSingleValue() { + + assertThat(repository.findAllLastnames()).contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + } + + @Test // GH-3543 + void findStreamOfSingleValue() { + + try (Stream lastnames = repository.findAllLastnamesAsStream()) { + assertThat(lastnames) // + .contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + } + } + + @Test // DATAMONGO-4841 + void annotatedAggregationStreamWithPlaceholderValue() { + + assertThat(repository.groupStreamByLastnameAnd("firstname")) + .contains(new PersonAggregate("Lessard", Collections.singletonList("Stefan"))) // + .contains(new PersonAggregate("Keys", Collections.singletonList("Alicia"))) // + .contains(new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))) // + .contains(new PersonAggregate("Beauford", Collections.singletonList("Carter"))) // + .contains(new PersonAggregate("Moore", Collections.singletonList("Leroi"))) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPlaceholderValue() { + + assertThat(repository.groupByLastnameAnd("firstname")) + .contains(new PersonAggregate("Lessard", Collections.singletonList("Stefan"))) // + .contains(new PersonAggregate("Keys", Collections.singletonList("Alicia"))) // + .contains(new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))) // + .contains(new PersonAggregate("Beauford", Collections.singletonList("Carter"))) // + .contains(new PersonAggregate("Moore", Collections.singletonList("Leroi"))) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // GH-3543 + void annotatedAggregationWithPlaceholderAsSlice() { + + Slice slice = repository.groupByLastnameAndAsSlice("firstname", Pageable.ofSize(5)); + assertThat(slice).hasSize(5); + assertThat(slice.hasNext()).isTrue(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSort() { + + assertThat(repository.groupByLastnameAnd("firstname", Sort.by("lastname"))) // + .containsSequence( // + new PersonAggregate("Beauford", Collections.singletonList("Carter")), // + new PersonAggregate("Keys", Collections.singletonList("Alicia")), // + new PersonAggregate("Lessard", Collections.singletonList("Stefan")), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August")), // + new PersonAggregate("Moore", Collections.singletonList("Leroi")), // + new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPageable() { + + assertThat(repository.groupByLastnameAnd("firstname", PageRequest.of(1, 2, Sort.by("lastname")))) // + .containsExactly( // + new PersonAggregate("Lessard", Collections.singletonList("Stefan")), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSingleSimpleResult() { + assertThat(repository.sumAge()).isEqualTo(245); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnType() { + + assertThat(repository.sumAgeAndReturnAggregationResultWrapper()) // + .isInstanceOf(AggregationResults.class) // + .containsExactly(new Document("_id", null).append("total", 245)); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() { + + assertThat(repository.sumAgeAndReturnAggregationResultWrapperWithConcreteType()) // + .isInstanceOf(AggregationResults.class) // + .containsExactly(new SumAge(245L)); + } + + @Test // GH-4839 + void annotatedAggregationWithAggregationResultAsClosedInterfaceProjection() { + + assertThat(repository.findAggregatedClosedInterfaceProjectionBy()).allSatisfy(it -> { + assertThat(it.getFirstname()).isIn(dave.getFirstname(), oliver.getFirstname()); + assertThat(it.getLastname()).isEqualTo(dave.getLastname()); + }); + } + + @Test // DATAMONGO-2374 + void findsWithNativeProjection() { + + assertThat(repository.findDocumentById(dave.getId()).get()).containsEntry("firstname", dave.getFirstname()) + .containsEntry("lastname", dave.getLastname()); + } + + @Test // DATAMONGO-1677 + @DirtiesState + void findWithMoreThan10Arguments() { + + alicia.setSkills(Arrays.asList("musician", "singer", "composer", "actress", "pianist")); + alicia.setAddress(new Address("street", "zipCode", "city")); + alicia.setUniqueId(UUID.randomUUID()); + UsernameAndPassword credentials = new UsernameAndPassword(); + credentials.password = "keys"; + credentials.username = "alicia"; + alicia.credentials = credentials; + + alicia = repository.save(this.alicia); + + assertThat(repository.findPersonByManyArguments(this.alicia.getFirstname(), this.alicia.getLastname(), + this.alicia.getEmail(), this.alicia.getAge(), Sex.FEMALE, this.alicia.createdAt, alicia.getSkills(), "street", + "zipCode", "city", alicia.getUniqueId(), credentials.username, credentials.password)).isNotNull(); + } + + @Test // DATAMONGO-1894 + void spelExpressionArgumentsGetReevaluatedOnEveryInvocation() { + + assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave")).containsExactly(dave); + assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Carter")) + .containsExactly(carter); + } + + @Test // DATAMONGO-1902 + @DirtiesState + void findByValueInsideUnwrapped() { + + Person bart = new Person("bart", "simpson"); + User user = new User(); + user.setUsername("bartman"); + user.setId("84r1m4n"); + bart.setUnwrappedUser(user); + + operations.save(bart); + + List result = repository.findByUnwrappedUserUsername(user.getUsername()); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getId().equals(bart.getId())); + } + + @Test // DATAMONGO-1902 + @DirtiesState + void findByUnwrapped() { + + Person bart = new Person("bart", "simpson"); + User user = new User(); + user.setUsername("bartman"); + user.setId("84r1m4n"); + bart.setUnwrappedUser(user); + + operations.save(bart); + + List result = repository.findByUnwrappedUser(user); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getId().equals(bart.getId())); + } + + @Test // GH-3395, GH-4404 + void caseInSensitiveInClause() { + + assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3); + + repository.save(new Person("the-first", "The First")); + repository.save(new Person("the-first-one", "The First One")); + repository.save(new Person("the-second", "The Second")); + + assertThat(repository.findByLastnameIgnoreCaseIn("tHE fIRsT")).hasSize(1); + } + + @Test // GH-3395 + void caseInSensitiveInClauseQuotesExpressions() { + assertThat(repository.findByLastnameIgnoreCaseIn(".*")).isEmpty(); + } + + @Test // GH-3395 + void caseSensitiveInClauseIgnoresExpressions() { + assertThat(repository.findByFirstnameIn(".*")).isEmpty(); + } + + @Test // GH-3583 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.4") + void annotatedQueryShouldAllowAggregationInProjection() { + + Person target = repository.findWithAggregationInProjection(alicia.getId()); + assertThat(target.getFirstname()).isEqualTo(alicia.getFirstname().toUpperCase()); + } + + @Test // GH-3633 + @DirtiesState + void annotatedQueryWithNullEqualityCheckShouldWork() { + + operations.updateFirst(Query.query(Criteria.where("id").is(dave.getId())), Update.update("age", null), + Person.class); + + Person byQueryWithNullEqualityCheck = repository.findByQueryWithNullEqualityCheck(); + assertThat(byQueryWithNullEqualityCheck.getId()).isEqualTo(dave.getId()); + } + + @Test // GH-3602 + @DirtiesState + void executesQueryWithDocumentReferenceCorrectly() { + + Person josh = new Person("Josh", "Long"); + User dave = new User(); + dave.id = "dave"; + + josh.setSpiritAnimal(dave); + + operations.save(josh); + + List result = repository.findBySpiritAnimal(dave); + assertThat(result).map(Person::getId).containsExactly(josh.getId()); + } + + @Test // GH-3656 + @DirtiesState + void resultProjectionWithOptionalIsExcecutedCorrectly() { + + carter.setAddress(new Address("batman", "robin", "gotham")); + repository.save(carter); + + PersonSummaryWithOptional result = repository.findSummaryWithOptionalByLastname("Beauford"); + + assertThat(result).isNotNull(); + assertThat(result.getAddress()).isPresent(); + assertThat(result.getFirstname()).contains("Carter"); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElements() { + assertThat(repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337))) + .isEqualTo(2); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateIsAppliedCorrectly() { + + assertThat(repository.findAndIncrementVisitsByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void mixAnnotatedUpdateWithAnnotatedQuery() { + + assertThat(repository.updateAllByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateWithSpELIsAppliedCorrectly() { + + assertThat(repository.findAndIncrementVisitsUsingSpELByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + @DirtiesState + void annotatedAggregationUpdateIsAppliedCorrectly() { + + repository.findAndIncrementVisitsViaPipelineByLastname("Matthews", 1337); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElementsWithVoidReturn() { + + repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337)); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(visits -> visits == 1337); + } + + @Test // GH-2107 + @DirtiesState + void allowsToUseComplexTypesInUpdate() { + + Address address = new Address("1007 Mountain Drive", "53540", "Gotham"); + + assertThat(repository.findAndPushShippingAddressByEmail(dave.getEmail(), address)).isEqualTo(1); + assertThat(repository.findById(dave.getId()).map(Person::getShippingAddresses)) + .contains(Collections.singleton(address)); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java index a03698b142..534f44c8fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,9 @@ */ package org.springframework.data.mongodb.repository; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + import com.querydsl.core.annotations.QueryEmbeddable; /** @@ -32,9 +35,9 @@ protected Address() { } /** - * @param string - * @param string2 - * @param string3 + * @param street + * @param zipcode + * @param city */ public Address(String street, String zipcode, String city) { this.street = street; @@ -83,4 +86,28 @@ public String getCity() { public void setCity(String city) { this.city = city; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Address address = (Address) o; + + if (!ObjectUtils.nullSafeEquals(street, address.street)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(zipCode, address.zipCode)) { + return false; + } + return ObjectUtils.nullSafeEquals(city, address.city); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(street); + result = 31 * result + ObjectUtils.nullSafeHashCode(zipCode); + result = 31 * result + ObjectUtils.nullSafeHashCode(city); + return result; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java index c788e4524f..a4f533f0be 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2016 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,43 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; +import java.util.Set; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * @author Christoph Strobl * @author Oliver Gierke * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) public class ComplexIdRepositoryIntegrationTests { + static @Client MongoClient mongoClient; + @Configuration - @EnableMongoRepositories - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = UserWithComplexIdRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -56,10 +59,14 @@ protected String getDatabaseName() { } @Override - public Mongo mongo() throws Exception { - return new MongoClient(); + public MongoClient mongoClient() { + return mongoClient; } + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } } @Autowired UserWithComplexIdRepository repo; @@ -68,7 +75,7 @@ public Mongo mongo() throws Exception { MyId id; UserWithComplexId userWithId; - @Before + @BeforeEach public void setUp() { repo.deleteAll(); @@ -82,76 +89,58 @@ public void setUp() { userWithId.id = id; } - /** - * @see DATAMONGO-1078 - */ - @Test + @Test // DATAMONGO-1078 public void annotatedFindQueryShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.getUserByComplexId(id), is(userWithId)); + assertThat(repo.getUserByComplexId(id)).isEqualTo(userWithId); } - /** - * @see DATAMONGO-1078 - */ - @Test + @Test // DATAMONGO-1078 public void annotatedFindQueryShouldWorkWhenUsingComplexIdWithinCollection() { repo.save(userWithId); List loaded = repo.findByUserIds(Collections.singleton(id)); - assertThat(loaded, hasSize(1)); - assertThat(loaded, contains(userWithId)); + assertThat(loaded).hasSize(1); + assertThat(loaded).containsExactly(userWithId); } - /** - * @see DATAMONGO-1078 - */ - @Test + @Test // DATAMONGO-1078 public void findOneShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.findOne(id), is(userWithId)); + assertThat(repo.findById(id)).isEqualTo(Optional.of(userWithId)); } - /** - * @see DATAMONGO-1078 - */ - @Test + @Test // DATAMONGO-1078 public void findAllShouldWorkWhenUsingComplexId() { repo.save(userWithId); - Iterable loaded = repo.findAll(Collections.singleton(id)); + Iterable loaded = repo.findAllById(Collections.singleton(id)); - assertThat(loaded, is(Matchers. iterableWithSize(1))); - assertThat(loaded, contains(userWithId)); + assertThat(loaded).hasSize(1); + assertThat(loaded).containsExactly(userWithId); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void composedAnnotationFindQueryShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.getUserUsingComposedAnnotationByComplexId(id), is(userWithId)); + assertThat(repo.getUserUsingComposedAnnotationByComplexId(id)).isEqualTo(userWithId); } - /** - * @see DATAMONGO-1373 - */ - @Test + @Test // DATAMONGO-1373 public void composedAnnotationFindMetaShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.findUsersUsingComposedMetaAnnotationByUserIds(Arrays.asList(id)), hasSize(0)); + assertThat(repo.findUsersUsingComposedMetaAnnotationByUserIds(Arrays.asList(id))).hasSize(1); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java index 1d15730afc..a3dbae74c7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,14 +21,13 @@ /** * Sample contact domain class. - * + * * @author Oliver Gierke */ @Document public abstract class Contact { - @Id - protected final String id; + @Id protected String id; public Contact() { this.id = new ObjectId().toString(); @@ -37,4 +36,8 @@ public Contact() { public String getId() { return id; } + + public void setId(String id) { + this.id = id; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java index ed00a830ac..b9a0652d01 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2011 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ package org.springframework.data.mongodb.repository; -import org.springframework.data.mongodb.repository.MongoRepository; - /** * Simple repository interface managing {@link Contact}s. - * + * * @author Oliver Gierke */ public interface ContactRepository extends MongoRepository { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java index 34722d7091..5f502a22e5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Before; import org.junit.Test; @@ -24,15 +23,16 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Example; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link ContactRepository}. Mostly related to mapping inheritance. - * + * * @author Oliver Gierke * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("config/MongoNamespaceIntegrationTests-context.xml") public class ContactRepositoryIntegrationTests { @@ -49,17 +49,14 @@ public void readsAndWritesContactCorrectly() { Person person = new Person("Oliver", "Gierke"); Contact result = repository.save(person); - assertTrue(repository.findOne(result.getId().toString()) instanceof Person); + assertThat(repository.findById(result.getId().toString())).containsInstanceOf(Person.class); } - /** - * @see DATAMONGO-1245 - */ - @Test + @Test // DATAMONGO-1245 public void findsContactByTypedExample() { Person person = repository.save(new Person("Oliver", "Gierke")); - assertThat(repository.findOne(Example.of(person)), instanceOf(Person.class)); + assertThat(repository.findOne(Example.of(person))).containsInstanceOf(Person.class); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java new file mode 100644 index 0000000000..94a77f003a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java @@ -0,0 +1,337 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.assertj.core.api.Assertions.*; + +import io.reactivex.rxjava3.core.Observable; +import io.reactivex.rxjava3.core.Single; +import io.reactivex.rxjava3.observers.TestObserver; +import io.reactivex.rxjava3.subscribers.TestSubscriber; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Objects; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.reactivestreams.Publisher; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.ImportResource; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.data.repository.reactive.ReactiveSortingRepository; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; +import org.springframework.data.repository.reactive.RxJava3SortingRepository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +/** + * Test for {@link ReactiveMongoRepository} using reactive wrapper type conversion. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration(classes = ConvertingReactiveMongoRepositoryTests.Config.class) +public class ConvertingReactiveMongoRepositoryTests { + + @EnableReactiveMongoRepositories( + includeFilters = { @Filter(value = ReactivePersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE), + @Filter(value = RxJava3PersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE), + @Filter(value = MixedReactivePersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE) }, + considerNestedRepositories = true) + @ImportResource("classpath:reactive-infrastructure.xml") + static class Config {} + + @Autowired MixedReactivePersonRepostitory reactiveRepository; + @Autowired ReactivePersonRepostitory reactivePersonRepostitory; + @Autowired RxJava3PersonRepostitory rxJava3PersonRepostitory; + + ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; + + @Before + public void setUp() { + + reactiveRepository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave = new ReactivePerson("Dave", "Matthews", 42); + oliver = new ReactivePerson("Oliver August", "Matthews", 4); + carter = new ReactivePerson("Carter", "Beauford", 49); + boyd = new ReactivePerson("Boyd", "Tinsley", 45); + stefan = new ReactivePerson("Stefan", "Lessard", 34); + leroi = new ReactivePerson("Leroi", "Moore", 41); + alicia = new ReactivePerson("Alicia", "Keys", 30); + + reactiveRepository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)) + .as(StepVerifier::create) // + .expectNextCount(7) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void reactiveStreamsMethodsShouldWork() { + reactivePersonRepostitory.existsById(dave.getId()).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void reactiveStreamsQueryMethodsShouldWork() { + StepVerifier.create(reactivePersonRepostitory.findByLastname(boyd.getLastname())).expectNext(boyd).verifyComplete(); + } + + @Test // DATAMONGO-2558 + public void simpleRxJava3MethodsShouldWork() throws InterruptedException { + + TestObserver testObserver = rxJava3PersonRepostitory.existsById(dave.getId()).test(); + + testObserver.await(); + testObserver.assertComplete(); + testObserver.assertNoErrors(); + testObserver.assertValue(true); + } + + @Test // DATAMONGO-2558 + public void existsWithSingleRxJava3IdMethodsShouldWork() throws InterruptedException { + + TestObserver testObserver = rxJava3PersonRepostitory.existsById(Single.just(dave.getId())) + .test(); + + testObserver.await(); + testObserver.assertComplete(); + testObserver.assertNoErrors(); + testObserver.assertValue(true); + } + + @Test // DATAMONGO-2558 + public void flowableRxJava3QueryMethodShouldWork() throws InterruptedException { + + TestSubscriber testSubscriber = rxJava3PersonRepostitory + .findByFirstnameAndLastname(dave.getFirstname(), dave.getLastname()).test(); + + testSubscriber.await(); + testSubscriber.assertComplete(); + testSubscriber.assertNoErrors(); + testSubscriber.assertValue(dave); + } + + @Test // DATAMONGO-2558 + public void singleProjectedRxJava3QueryMethodShouldWork() throws InterruptedException { + + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findProjectedByLastname(io.reactivex.rxjava3.core.Maybe.just(carter.getLastname())).test(); + + testObserver.await(); + testObserver.assertComplete(); + testObserver.assertNoErrors(); + + testObserver.assertValue(actual -> { + assertThat(actual.getFirstname()).isEqualTo(carter.getFirstname()); + return true; + }); + } + + @Test // DATAMONGO-2558 + public void observableProjectedRxJava3QueryMethodShouldWork() throws InterruptedException { + + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findProjectedByLastname(io.reactivex.rxjava3.core.Single.just(carter.getLastname())).test(); + + testObserver.await(); + testObserver.assertComplete(); + testObserver.assertNoErrors(); + + testObserver.assertValue(actual -> { + assertThat(actual.getFirstname()).isEqualTo(carter.getFirstname()); + return true; + }); + } + + @Test // DATAMONGO-2558 + public void maybeRxJava3QueryMethodShouldWork() throws InterruptedException { + + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findByLastname(boyd.getLastname()).test(); + + testObserver.await(); + testObserver.assertComplete(); + testObserver.assertNoErrors(); + testObserver.assertValue(boyd); + } + +// @Test // DATAMONGO-1444 +// public void mixedRepositoryShouldWork() { +// +// reactiveRepository.findByLastname(boyd.getLastname()) // +// .test() // +// .awaitTerminalEvent() // +// .assertValue(boyd) // +// .assertNoErrors() // +// .assertCompleted() // +// .getOnNextEvents(); +// } + + @Test // DATAMONGO-1444 + public void shouldFindOneBySingleOfLastName() { + + reactiveRepository.findByLastname(Single.just(carter.getLastname())).as(StepVerifier::create) // + .expectNext(carter) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + public void shouldFindByObservableOfLastNameIn() { + + reactiveRepository.findByLastnameIn(Observable.just(carter.getLastname(), dave.getLastname())) + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + +// @Test // DATAMONGO-1444 +// public void shouldFindByPublisherOfLastNameInAndAgeGreater() { +// +// List people = reactiveRepository +// .findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41).test() // +// .awaitTerminalEvent() // +// .assertValueCount(2) // +// .assertNoErrors() // +// .assertCompleted() // +// .getOnNextEvents(); +// +// assertThat(people).contains(carter, dave); +// } + +interface ReactivePersonRepostitory + extends ReactiveCrudRepository, ReactiveSortingRepository { + + Publisher findByLastname(String lastname); + } + + interface RxJava3PersonRepostitory + extends RxJava3CrudRepository, RxJava3SortingRepository { + + io.reactivex.rxjava3.core.Flowable findByFirstnameAndLastname(String firstname, String lastname); + + io.reactivex.rxjava3.core.Maybe findByLastname(String lastname); + + io.reactivex.rxjava3.core.Single findProjectedByLastname( + io.reactivex.rxjava3.core.Maybe lastname); + + io.reactivex.rxjava3.core.Observable findProjectedByLastname( + io.reactivex.rxjava3.core.Single lastname); + } + + interface MixedReactivePersonRepostitory extends ReactiveMongoRepository { + + Single findByLastname(String lastname); + + Mono findByLastname(Single lastname); + + Flux findByLastnameIn(Observable lastname); + + Flux findByLastname(String lastname, Sort sort); + + Observable findByLastnameInAndAgeGreaterThan(Flux lastname, int age); + } + + @Document + static class ReactivePerson { + + @Id String id; + + String firstname; + String lastname; + int age; + + public ReactivePerson() {} + + public ReactivePerson(String firstname, String lastname, int age) { + + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public int getAge() { + return this.age; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReactivePerson that = (ReactivePerson) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstname, that.firstname) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, age); + } + + public String toString() { + return "ConvertingReactiveMongoRepositoryTests.ReactivePerson(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", age=" + this.getAge() + ")"; + } + } + + interface ProjectedPerson { + + String getId(); + + String getFirstname(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Credentials.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Credentials.java index 2f58f1027a..24d894625c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Credentials.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Credentials.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.repository; /** - * * @author Oliver Gierke */ public interface Credentials { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MongoRepositoryTextSearchIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MongoRepositoryTextSearchIntegrationTests.java index 05305d9a90..c41abf4aa1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MongoRepositoryTextSearchIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MongoRepositoryTextSearchIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,55 +15,41 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.collection.IsCollectionWithSize.*; -import static org.hamcrest.core.Is.*; -import static org.hamcrest.core.IsCollectionContaining.*; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; -import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder; import org.springframework.data.mongodb.core.index.TextIndexed; import org.springframework.data.mongodb.core.mapping.TextScore; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.ClassUtils; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; - /** * Integration tests for text searches on repository. - * + * * @author Christoph Strobl * @author Oliver Gierke + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class MongoRepositoryTextSearchIntegrationTests { - - public static @ClassRule MongoVersionRule versionRule = MongoVersionRule.atLeast(new Version(2, 6, 0)); +@ExtendWith(MongoTemplateExtension.class) +class MongoRepositoryTextSearchIntegrationTests { private static final FullTextDocument PASSENGER_57 = new FullTextDocument("1", "Passenger 57", "Passenger 57 is an action film that stars Wesley Snipes and Bruce Payne."); @@ -72,169 +58,130 @@ public class MongoRepositoryTextSearchIntegrationTests { private static final FullTextDocument DROP_ZONE = new FullTextDocument("3", "Drop Zone", "Drop Zone is an action film featuring Wesley Snipes and Gary Busey."); - @Autowired MongoTemplate template; - FullTextRepository repo; + @Template(initialEntitySet = FullTextDocument.class) // + private static MongoTestTemplate template; + + private FullTextRepository repo = new MongoRepositoryFactory(this.template).getRepository(FullTextRepository.class); - @Before - public void setUp() { + @BeforeEach + void setUp() { - template.indexOps(FullTextDocument.class).ensureIndex( - new TextIndexDefinitionBuilder().onField("title").onField("content").build()); - this.repo = new MongoRepositoryFactory(this.template).getRepository(FullTextRepository.class); + template.indexOps(FullTextDocument.class) + .ensureIndex(new TextIndexDefinitionBuilder().onField("title").onField("content").build()); } - @After - public void tearDown() { - template.dropCollection(FullTextDocument.class); + @AfterEach + void tearDown() { + template.flush(); } - /** - * @see DATAMONGO-973 - */ - @Test - public void findAllByTextCriteriaShouldReturnMatchingDocuments() { + @Test // DATAMONGO-973 + void findAllByTextCriteriaShouldReturnMatchingDocuments() { initRepoWithDefaultDocuments(); List result = repo.findAllBy(TextCriteria.forDefaultLanguage().matchingAny("stallone", "payne")); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(PASSENGER_57, DEMOLITION_MAN)); + assertThat(result).hasSize(2); + assertThat(result).contains(PASSENGER_57, DEMOLITION_MAN); } - /** - * @see DATAMONGO-973 - */ - @Test - public void derivedFinderWithTextCriteriaReturnsCorrectResult() { + @Test // DATAMONGO-973 + void derivedFinderWithTextCriteriaReturnsCorrectResult() { initRepoWithDefaultDocuments(); - FullTextDocument blade = new FullTextDocument( - "4", - "Blade", - "Blade is a 1998 American vampire-superhero-vigilante action film starring Wesley Snipes and Stephen Dorff, loosely based on the Marvel Comics character Blade"); + FullTextDocument blade = new FullTextDocument("4", "Blade", + "Blade is a 1998-2018 American vampire-superhero-vigilante action film starring Wesley Snipes and Stephen Dorff, loosely based on the Marvel Comics character Blade"); blade.nonTextIndexProperty = "foo"; repo.save(blade); List result = repo.findByNonTextIndexProperty("foo", TextCriteria.forDefaultLanguage().matching("snipes")); - assertThat(result, hasSize(1)); - assertThat(result, hasItems(blade)); + assertThat(result).hasSize(1); + assertThat(result).contains(blade); } - /** - * @see DATAMONGO-973 - */ - @Test - public void findByWithPaginationWorksCorrectlyWhenUsingTextCriteria() { + @Test // DATAMONGO-973 + void findByWithPaginationWorksCorrectlyWhenUsingTextCriteria() { initRepoWithDefaultDocuments(); - Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("film"), new PageRequest(1, - 1, Direction.ASC, "id")); + Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("film"), + PageRequest.of(1, 1, Direction.ASC, "id")); - assertThat(page.hasNext(), is(true)); - assertThat(page.hasPrevious(), is(true)); - assertThat(page.getTotalElements(), is(3L)); - assertThat(page.getContent().get(0), equalTo(DEMOLITION_MAN)); + assertThat(page.hasNext()).isTrue(); + assertThat(page.hasPrevious()).isTrue(); + assertThat(page.getTotalElements()).isEqualTo(3L); + assertThat(page.getContent().get(0)).isEqualTo(DEMOLITION_MAN); } - /** - * @see DATAMONGO-973 - */ - @Test - public void findAllByTextCriteriaWithSortWorksCorrectly() { + @Test // DATAMONGO-973 + void findAllByTextCriteriaWithSortWorksCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); repo.save(snipes); - List result = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), new Sort( - "score")); + List result = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), + Sort.by("score")); - assertThat(result.size(), is(4)); - assertThat(result.get(0), equalTo(snipes)); + assertThat(result.size()).isEqualTo(4); + assertThat(result.get(0)).isEqualTo(snipes); } - /** - * @see DATAMONGO-973 - */ - @Test - public void findByWithSortByScoreViaPageRequestTriggersSortingCorrectly() { + @Test // DATAMONGO-973 + void findByWithSortByScoreViaPageRequestTriggersSortingCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); repo.save(snipes); - Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), new PageRequest( - 0, 10, Direction.ASC, "score")); + Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), + PageRequest.of(0, 10, Direction.ASC, "score")); - assertThat(page.getTotalElements(), is(4L)); - assertThat(page.getContent().get(0), equalTo(snipes)); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page.getContent().get(0)).isEqualTo(snipes); } - /** - * @see DATAMONGO-973 - */ - @Test - public void findByWithSortViaPageRequestIgnoresTextScoreWhenSortedByOtherProperty() { + @Test // DATAMONGO-973 + void findByWithSortViaPageRequestIgnoresTextScoreWhenSortedByOtherProperty() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); repo.save(snipes); - Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), new PageRequest( - 0, 10, Direction.ASC, "id")); + Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), + PageRequest.of(0, 10, Direction.ASC, "id")); - assertThat(page.getTotalElements(), is(4L)); - assertThat(page.getContent().get(0), equalTo(PASSENGER_57)); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page.getContent().get(0)).isEqualTo(PASSENGER_57); } - /** - * @see DATAMONGO-973 - */ - @Test - public void derivedSortForTextScorePropertyWorksCorrectly() { + @Test // DATAMONGO-973 + void derivedSortForTextScorePropertyWorksCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); repo.save(snipes); - List result = repo.findByNonTextIndexPropertyIsNullOrderByScoreDesc(TextCriteria - .forDefaultLanguage().matching("snipes")); - assertThat(result.get(0), equalTo(snipes)); + List result = repo + .findByNonTextIndexPropertyIsNullOrderByScoreDesc(TextCriteria.forDefaultLanguage().matching("snipes")); + assertThat(result.get(0)).isEqualTo(snipes); } - /** - * @see DATAMONGO-973 - */ - @Test - public void derivedFinderMethodWithoutFullTextShouldNoCauseTroubleWhenHavingEntityWithTextScoreProperty() { + @Test // DATAMONGO-973, DATAMONGO-2516 + void derivedFinderMethodWithoutFullTextShouldNoCauseTroubleWhenHavingEntityWithTextScoreProperty() { initRepoWithDefaultDocuments(); List result = repo.findByTitle(DROP_ZONE.getTitle()); - assertThat(result.get(0), equalTo(DROP_ZONE)); - assertThat(result.get(0).score, equalTo(0.0F)); - } - private void initRepoWithDefaultDocuments() { - repo.save(Arrays.asList(PASSENGER_57, DEMOLITION_MAN, DROP_ZONE)); + assertThat(result.get(0)).isEqualTo(DROP_ZONE); + assertThat(result.get(0).score).isNull(); } - @org.springframework.context.annotation.Configuration - public static class Configuration extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return ClassUtils.getShortNameAsProperty(MongoRepositoryTextSearchIntegrationTests.class); - } - - @Override - public Mongo mongo() throws Exception { - return new MongoClient(); - } - + private void initRepoWithDefaultDocuments() { + repo.saveAll(Arrays.asList(PASSENGER_57, DEMOLITION_MAN, DROP_ZONE)); } static class FullTextDocument { @@ -286,7 +233,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java index 086065b21f..3dace8928b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import java.io.Serializable; +import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; /** @@ -42,7 +43,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java index 4c165f7392..664b5279c8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import java.util.Date; import java.util.List; import java.util.Set; +import java.util.UUID; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; @@ -26,14 +27,18 @@ import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.lang.Nullable; /** * Sample domain class. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ @Document public class Person extends Contact { @@ -44,7 +49,7 @@ public enum Sex { private String firstname; private String lastname; - @Indexed(unique = true, dropDups = true) private String email; + @Indexed(unique = true) private String email; private Integer age; @SuppressWarnings("unused") private Sex sex; Date createdAt; @@ -56,6 +61,8 @@ public enum Sex { private @Field("add") Address address; private Set
                    shippingAddresses; + private UUID uniqueId; + @DBRef User creator; @DBRef(lazy = true) User coworker; @@ -66,6 +73,13 @@ public enum Sex { Credentials credentials; + @Unwrapped.Nullable(prefix = "u") // + User unwrappedUser; + + @DocumentReference User spiritAnimal; + + int visits; + public Person() { this(null, null); @@ -196,6 +210,14 @@ public void setShippingAddresses(Set
                    addresses) { this.shippingAddresses = addresses; } + public UUID getUniqueId() { + return uniqueId; + } + + public void setUniqueId(UUID uniqueId) { + this.uniqueId = uniqueId; + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.repository.Contact#getName() */ @@ -245,13 +267,16 @@ public void setCoworker(User coworker) { this.coworker = coworker; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ + public int getVisits() { + return visits; + } + + public void setVisits(int visits) { + this.visits = visits; + } + @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -284,11 +309,22 @@ public List getSkills() { return skills; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ + public User getUnwrappedUser() { + return unwrappedUser; + } + + public void setUnwrappedUser(User unwrappedUser) { + this.unwrappedUser = unwrappedUser; + } + + public User getSpiritAnimal() { + return spiritAnimal; + } + + public void setSpiritAnimal(User spiritAnimal) { + this.spiritAnimal = spiritAnimal; + } + @Override public int hashCode() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java new file mode 100644 index 0000000000..16b2157bc8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java @@ -0,0 +1,75 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceConstructor; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +final class PersonAggregate { + + @Id private final String lastname; + private final Set names; + + public PersonAggregate(String lastname, String name) { + this(lastname, Collections.singletonList(name)); + } + + @PersistenceConstructor + public PersonAggregate(String lastname, Collection names) { + + this.lastname = lastname; + this.names = new HashSet<>(names); + } + + public String getLastname() { + return this.lastname; + } + + public Set getNames() { + return this.names; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonAggregate that = (PersonAggregate) o; + return Objects.equals(lastname, that.lastname) && Objects.equals(names, that.names); + } + + @Override + public int hashCode() { + return Objects.hash(lastname, names); + } + + public String toString() { + return "PersonAggregate(lastname=" + this.getLastname() + ", names=" + this.getNames() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java new file mode 100644 index 0000000000..e531af2212 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import org.springframework.beans.factory.annotation.Value; + +/** + * @author Oliver Gierke + */ +public interface PersonExcerpt { + + @Value("#{target.firstname + ' ' + target.lastname}") + String getFullName(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index eae2c02e10..c66b554078 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,19 @@ import java.util.Collection; import java.util.Date; import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.regex.Pattern; import java.util.stream.Stream; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; @@ -32,22 +38,27 @@ import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Person.Sex; -import org.springframework.data.querydsl.QueryDslPredicateExecutor; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.repository.query.Param; +import org.springframework.lang.Nullable; /** * Sample repository managing {@link Person} entities. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Fırat KÜÇÜK + * @author Mark Paluch */ -public interface PersonRepository extends MongoRepository, QueryDslPredicateExecutor { +public interface PersonRepository extends MongoRepository, QuerydslPredicateExecutor { /** * Returns all {@link Person}s with the given lastname. - * + * * @param lastname * @return */ @@ -59,7 +70,7 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns all {@link Person}s with the given lastname ordered by their firstname. - * + * * @param lastname * @return */ @@ -68,47 +79,86 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns the {@link Person}s with the given firstname. Uses {@link Query} annotation to define the query to be * executed. - * + * * @param firstname * @return */ @Query(value = "{ 'firstname' : ?0 }", fields = "{ 'firstname': 1, 'lastname': 1}") List findByThePersonsFirstname(String firstname); - /** - * @see DATAMONGO-871 - */ + // DATAMONGO-871 @Query(value = "{ 'firstname' : ?0 }") Person[] findByThePersonsFirstnameAsArray(String firstname); /** * Returns all {@link Person}s with a firstname matching the given one (*-wildcard supported). - * + * + * @param firstname + * @return + */ + List findByFirstnameLike(@Nullable String firstname); + + List findByFirstnameNotContains(String firstname); + + /** + * Returns all {@link Person}s with a firstname not matching the given one (*-wildcard supported). + * * @param firstname * @return */ - List findByFirstnameLike(String firstname); + List findByFirstnameNotLike(String firstname); List findByFirstnameLikeOrderByLastnameAsc(String firstname, Sort sort); + List findBySkillsContains(List skills); + + List findBySkillsNotContains(List skills); + @Query("{'age' : { '$lt' : ?0 } }") List findByAgeLessThan(int age, Sort sort); /** - * Returns a page of {@link Person}s with a lastname mathing the given one (*-wildcards supported). - * + * Returns a scroll of {@link Person}s with a lastname matching the given one (*-wildcards supported). + * + * @param lastname + * @param scrollPosition + * @return + */ + Window findTop2ByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, + ScrollPosition scrollPosition); + + Window findByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, + ScrollPosition scrollPosition, Limit limit); + + /** + * Returns a scroll of {@link Person}s applying projections with a lastname matching the given one (*-wildcards + * supported). + * + * @param lastname + * @param pageable + * @return + */ + Window findCursorProjectionByLastnameLike(String lastname, Pageable pageable); + + /** + * Returns a page of {@link Person}s with a lastname matching the given one (*-wildcards supported). + * * @param lastname * @param pageable * @return */ Page findByLastnameLike(String lastname, Pageable pageable); - @Query("{ 'lastname' : { '$regex' : ?0, '$options' : ''}}") + List findByLastnameLike(String lastname, Sort sort, Limit limit); + + @Query("{ 'lastname' : { '$regex' : '?0', '$options' : 'i'}}") Page findByLastnameLikeWithPageable(String lastname, Pageable pageable); + List findByLastnameIgnoreCaseIn(String... lastname); + /** * Returns all {@link Person}s with a firstname contained in the given varargs. - * + * * @param firstnames * @return */ @@ -116,7 +166,7 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns all {@link Person}s with a firstname not contained in the given collection. - * + * * @param firstnames * @return */ @@ -126,7 +176,7 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns all {@link Person}s with an age between the two given values. - * + * * @param from * @param to * @return @@ -135,7 +185,7 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns the {@link Person} with the given {@link Address} as shipping address. - * + * * @param address * @return */ @@ -143,7 +193,7 @@ public interface PersonRepository extends MongoRepository, Query /** * Returns all {@link Person}s with the given {@link Address}. - * + * * @param address * @return */ @@ -171,185 +221,263 @@ public interface PersonRepository extends MongoRepository, Query GeoResults findByLocationNear(Point point, Distance maxDistance); - /** - * @see DATAMONGO-1110 - */ + // DATAMONGO-1110 GeoResults findPersonByLocationNear(Point point, Range distance); GeoPage findByLocationNear(Point point, Distance maxDistance, Pageable pageable); List findByCreator(User user); - /** - * @see DATAMONGO-425 - */ + // DATAMONGO-425 List findByCreatedAtLessThan(Date date); - /** - * @see DATAMONGO-425 - */ + // DATAMONGO-425 List findByCreatedAtGreaterThan(Date date); - /** - * @see DATAMONGO-425 - */ + // DATAMONGO-425 @Query("{ 'createdAt' : { '$lt' : ?0 }}") List findByCreatedAtLessThanManually(Date date); - /** - * @see DATAMONGO-427 - */ + // DATAMONGO-427 List findByCreatedAtBefore(Date date); - /** - * @see DATAMONGO-427 - */ + // DATAMONGO-427 List findByCreatedAtAfter(Date date); - /** - * @see DATAMONGO-472 - * @param lastname - * @return - */ + // DATAMONGO-472 List findByLastnameNot(String lastname); - /** - * @see DATAMONGO-600 - * @param credentials - * @return - */ + // DATAMONGO-600 List findByCredentials(Credentials credentials); - /** - * @see DATAMONGO-636 - */ + // DATAMONGO-636 long countByLastname(String lastname); - /** - * @see DATAMONGO-636 - */ + // DATAMONGO-636 int countByFirstname(String firstname); - /** - * @see DATAMONGO-636 - */ + // DATAMONGO-636 @Query(value = "{ 'lastname' : ?0 }", count = true) long someCountQuery(String lastname); - /** - * @see DATAMONGO-770 - */ - List findByFirstnameIgnoreCase(String firstName); + // DATAMONGO-1454 + boolean existsByFirstname(String firstname); - /** - * @see DATAMONGO-770 - */ + // DATAMONGO-1454 + @ExistsQuery(value = "{ 'lastname' : ?0 }") + boolean someExistQuery(String lastname); + + // DATAMONGO-770 + List findByFirstnameIgnoreCase(@Nullable String firstName); + + // DATAMONGO-770 List findByFirstnameNotIgnoreCase(String firstName); - /** - * @see DATAMONGO-770 - */ + // DATAMONGO-770 List findByFirstnameStartingWithIgnoreCase(String firstName); - /** - * @see DATAMONGO-770 - */ + // DATAMONGO-770 List findByFirstnameEndingWithIgnoreCase(String firstName); - /** - * @see DATAMONGO-770 - */ + // DATAMONGO-770 List findByFirstnameContainingIgnoreCase(String firstName); - /** - * @see DATAMONGO-870 - */ + // DATAMONGO-870 Slice findByAgeGreaterThan(int age, Pageable pageable); - /** - * @see DATAMONGO-821 - */ + // DATAMONGO-821 @Query("{ creator : { $exists : true } }") Page findByHavingCreator(Pageable page); - /** - * @see DATAMONGO-566 - */ + // DATAMONGO-566 List deleteByLastname(String lastname); - /** - * @see DATAMONGO-566 - */ + // DATAMONGO-566 Long deletePersonByLastname(String lastname); - /** - * @see DATAMONGO-566 - */ + // DATAMONGO-1997 + Optional deleteOptionalByLastname(String lastname); + + // DATAMONGO-566 @Query(value = "{ 'lastname' : ?0 }", delete = true) List removeByLastnameUsingAnnotatedQuery(String lastname); - /** - * @see DATAMONGO-566 - */ + // DATAMONGO-566 @Query(value = "{ 'lastname' : ?0 }", delete = true) Long removePersonByLastnameUsingAnnotatedQuery(String lastname); - /** - * @see DATAMONGO-893 - */ + // DATAMONGO-893 Page findByAddressIn(List
                    address, Pageable page); - /** - * @see DATAMONGO-745 - */ + // DATAMONGO-745 @Query("{firstname:{$in:?0}, lastname:?1}") Page findByCustomQueryFirstnamesAndLastname(List firstnames, String lastname, Pageable page); - /** - * @see DATAMONGO-745 - */ - @Query("{lastname:?0, address.street:{$in:?1}}") - Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, List streetNames, Pageable page); + // DATAMONGO-745 + @Query("{lastname:?0, 'address.street':{$in:?1}}") + Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, List streetNames, + Pageable page); - /** - * @see DATAMONGO-950 - */ + // DATAMONGO-950 List findTop3ByLastnameStartingWith(String lastname); - /** - * @see DATAMONGO-950 - */ + // DATAMONGO-950 Page findTop3ByLastnameStartingWith(String lastname, Pageable pageRequest); - /** - * @see DATAMONGO-1030 - */ - PersonSummary findSummaryByLastname(String lastname); + // DATAMONGO-1865 + Person findFirstBy(); // limits to 1 result if more, just return the first one + + // DATAMONGO-1865 + Person findPersonByLastnameLike(String firstname); // single person, error if more than one + + // DATAMONGO-1865 + Optional findOptionalPersonByLastnameLike(String firstname); // optional still, error when more than one + + // DATAMONGO-1030 + PersonSummaryDto findSummaryByLastname(String lastname); + + PersonSummaryWithOptional findSummaryWithOptionalByLastname(String lastname); @Query("{ ?0 : ?1 }") List findByKeyValue(String key, String value); - /** - * @see DATAMONGO-1165 - */ + // DATAMONGO-1165 @Query("{ firstname : { $in : ?0 }}") Stream findByCustomQueryWithStreamingCursorByFirstnames(List firstnames); - - /** - * @see DATAMONGO-990 - */ + + // DATAMONGO-990 @Query("{ firstname : ?#{[0]}}") List findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly(String firstname); - - /** - * @see DATAMONGO-990 - */ + + // DATAMONGO-990 @Query("{ firstname : ?#{[0]}, email: ?#{principal.email} }") List findWithSpelByFirstnameAndCurrentUserWithCustomQuery(String firstname); - - /** - * @see DATAMONGO-990 - */ + + // DATAMONGO-990 @Query("{ firstname : :#{#firstname}}") List findWithSpelByFirstnameForSpELExpressionWithParameterVariableOnly(@Param("firstname") String firstname); + + // DATAMONGO-1911 + @Query("{ uniqueId: ?0}") + Person findByUniqueId(UUID uniqueId); + + /** + * Returns the count of {@link Person} with the given firstname. Uses {@link CountQuery} annotation to define the + * query to be executed. + * + * @param firstname + * @return + */ + @CountQuery("{ 'firstname' : ?0 }") // DATAMONGO-1539 + long countByThePersonsFirstname(String firstname); + + /** + * Deletes {@link Person} entities with the given firstname. Uses {@link DeleteQuery} annotation to define the query + * to be executed. + * + * @param firstname + */ + @DeleteQuery("{ 'firstname' : ?0 }") // DATAMONGO-1539 + void deleteByThePersonsFirstname(String firstname); + + // DATAMONGO-1752 + Iterable findOpenProjectionBy(); + + // DATAMONGO-1752 + Iterable findClosedProjectionBy(); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age, Sort sort); + + List findByFirstnameRegex(Pattern pattern); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'fans': { '$slice': [ ?1, ?2 ] } }") + Person findWithSliceInProjection(String id, int skip, int limit); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'firstname': { '$toUpper': '$firstname' } }") + Person findWithAggregationInProjection(String id); + + @Query(value = "{ 'shippingAddresses' : { '$elemMatch' : { 'city' : { '$eq' : 'lnz' } } } }", + fields = "{ 'shippingAddresses.$': ?0 }") + Person findWithArrayPositionInProjection(int position); + + @Query(value = "{ 'fans' : { '$elemMatch' : { '$ref' : 'user' } } }", fields = "{ 'fans.$': ?0 }") + Person findWithArrayPositionInProjectionWithDbRef(int position); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + List findAllLastnames(); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Stream findAllLastnamesAsStream(); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Stream groupStreamByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Slice groupByLastnameAndAsSlice(String property, Pageable pageable); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Sort sort); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Pageable page); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + int sumAge(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapper(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapperWithConcreteType(); + + @Aggregation({ + "{ '$match' : { 'lastname' : 'Matthews'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" + }) + Iterable findAggregatedClosedInterfaceProjectionBy(); + + @Query(value = "{_id:?0}") + Optional findDocumentById(String id); + + @Query(value = "{ 'firstname' : ?0, 'lastname' : ?1, 'email' : ?2 , 'age' : ?3, 'sex' : ?4, " + + "'createdAt' : ?5, 'skills' : ?6, 'address.street' : ?7, 'address.zipCode' : ?8, " // + + "'address.city' : ?9, 'uniqueId' : ?10, 'credentials.username' : ?11, 'credentials.password' : ?12 }") + Person findPersonByManyArguments(String firstname, String lastname, String email, Integer age, Sex sex, + Date createdAt, List skills, String street, String zipCode, // + String city, UUID uniqueId, String username, String password); + + List findByUnwrappedUserUsername(String username); + + List findByUnwrappedUser(User user); + + int findAndUpdateViaMethodArgAllByLastname(String lastname, UpdateDefinition update); + + @Update("{ '$inc' : { 'visits' : ?1 } }") + int findAndIncrementVisitsByLastname(String lastname, int increment); + + @Query("{ 'lastname' : ?0 }") + @Update("{ '$inc' : { 'visits' : ?1 } }") + int updateAllByLastname(String lastname, int increment); + + @Update(pipeline = { "{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }" }) + void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); + + @Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + int findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); + + @Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + int findAndPushShippingAddressByEmail(String email, Address address); + + @Query("{ 'age' : null }") + Person findByQueryWithNullEqualityCheck(); + + List findBySpiritAnimal(User user); + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java index 2c5a72a03b..c407d76276 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2015 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,7 @@ /** * Integration test for {@link PersonRepository}. - * + * * @author Oliver Gierke * @author Thomas Darimont */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java index 5bba542d85..f94a52e916 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2013 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,44 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link PersonRepository} for lazy loading support. - * + * * @author Thomas Darimont * @author Oliver Gierke */ @ContextConfiguration(locations = "PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) public class PersonRepositoryLazyLoadingIntegrationTests { @Autowired PersonRepository repository; @Autowired MongoOperations operations; - @Before + @BeforeEach public void setUp() throws InterruptedException { repository.deleteAll(); operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); } - /** - * @see DATAMONGO-348 - */ - @Test + @Test // DATAMONGO-348 public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() throws Exception { User thomas = new User(); @@ -64,24 +62,21 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr Person person = new Person(); person.setFirstname("Oliver"); person.setFans(Arrays.asList(thomas)); - person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); - Person oliver = repository.findOne(person.id); + Person oliver = repository.findById(person.id).get(); List fans = oliver.getFans(); assertProxyIsResolved(fans, false); User user = fans.get(0); assertProxyIsResolved(fans, true); - assertThat(user.getUsername(), is(thomas.getUsername())); + assertThat(user.getUsername()).isEqualTo(thomas.getUsername()); } - /** - * @see DATAMONGO-348 - */ - @Test - public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() throws Exception { + @Test // DATAMONGO-348 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() { User thomas = new User(); thomas.username = "Thomas"; @@ -89,29 +84,25 @@ public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnab Person person = new Person(); person.setFirstname("Oliver"); - person.setFans(Arrays.asList(thomas)); person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); - Person oliver = repository.findOne(person.id); + Person oliver = repository.findById(person.id).get(); List realFans = oliver.getRealFans(); assertProxyIsResolved(realFans, false); User realFan = realFans.get(0); assertProxyIsResolved(realFans, true); - assertThat(realFan.getUsername(), is(thomas.getUsername())); + assertThat(realFan.getUsername()).isEqualTo(thomas.getUsername()); realFans = oliver.getRealFans(); assertProxyIsResolved(realFans, true); realFan = realFans.get(0); - assertThat(realFan.getUsername(), is(thomas.getUsername())); + assertThat(realFan.getUsername()).isEqualTo(thomas.getUsername()); } - /** - * @see DATAMONGO-348 - */ - @Test + @Test // DATAMONGO-348 public void shouldLoadAssociationWithDbRefOnConcreteDomainClassAndLazyLoadingEnabled() throws Exception { User thomas = new User(); @@ -123,13 +114,13 @@ public void shouldLoadAssociationWithDbRefOnConcreteDomainClassAndLazyLoadingEna person.setCoworker(thomas); repository.save(person); - Person oliver = repository.findOne(person.id); + Person oliver = repository.findById(person.id).get(); User coworker = oliver.getCoworker(); assertProxyIsResolved(coworker, false); - assertThat(coworker.getUsername(), is(thomas.getUsername())); + assertThat(coworker.getUsername()).isEqualTo(thomas.getUsername()); assertProxyIsResolved(coworker, true); - assertThat(coworker.getUsername(), is(thomas.getUsername())); + assertThat(coworker.getUsername()).isEqualTo(thomas.getUsername()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java new file mode 100644 index 0000000000..0af684b9c1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.MongoTestUtils.*; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.domain.Persistable; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.AfterTransactionAssertion; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.lang.Nullable; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.transaction.AfterTransaction; +import org.springframework.test.context.transaction.BeforeTransaction; +import org.springframework.transaction.annotation.Transactional; + +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; + +/** + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@Transactional(transactionManager = "txManager") +public class PersonRepositoryTransactionalTests { + + static final String DB_NAME = "repository-tx-tests"; + static @ReplSetClient MongoClient mongoClient; + + @Configuration + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = PersonRepository.class)) + static class Config extends AbstractMongoClientConfiguration { + + @Bean + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Bean + MongoTransactionManager txManager(MongoDatabaseFactory dbFactory) { + return new MongoTransactionManager(dbFactory); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.singleton(Person.class); + } + } + + @Autowired MongoClient client; + @Autowired PersonRepository repository; + @Autowired MongoTemplate template; + + Person durzo, kylar, vi; + + List all; + + List>> assertionList; + + @BeforeEach + public void setUp() { + assertionList = new CopyOnWriteArrayList<>(); + } + + @BeforeTransaction + public void beforeTransaction() { + + createOrReplaceCollection(DB_NAME, template.getCollectionName(Person.class), client); + createOrReplaceCollection(DB_NAME, template.getCollectionName(User.class), client); + + durzo = new Person("Durzo", "Blint", 700); + kylar = new Person("Kylar", "Stern", 21); + vi = new Person("Viridiana", "Sovari", 20); + + all = repository.saveAll(Arrays.asList(durzo, kylar, vi)); + } + + @AfterTransaction + public void verifyDbState() throws InterruptedException { + + Thread.sleep(100); + + MongoCollection collection = client.getDatabase(DB_NAME) // + .withWriteConcern(WriteConcern.MAJORITY) // + .withReadPreference(ReadPreference.primary()) // + .getCollection(template.getCollectionName(Person.class)); + + try { + assertionList.forEach(it -> { + + boolean isPresent = collection.find(Filters.eq("_id", new ObjectId(it.getId().toString()))).iterator() + .hasNext(); + + assertThat(isPresent) // + .withFailMessage(String.format("After transaction entity %s should %s.", it.getPersistable(), + it.shouldBePresent() ? "be present" : "NOT be present")) + .isEqualTo(it.shouldBePresent()); + + }); + } finally { + assertionList.clear(); + } + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldHonorCommitForDerivedQuery() { + + repository.removePersonByLastnameUsingAnnotatedQuery(durzo.getLastname()); + + assertAfterTransaction(durzo).isNotPresent(); + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldHonorCommit() { + + Person hu = new Person("Hu", "Gibbet", 43); + + repository.save(hu); + + assertAfterTransaction(hu).isPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldHonorRollback() { + + Person hu = new Person("Hu", "Gibbet", 43); + + repository.save(hu); + + assertAfterTransaction(hu).isNotPresent(); + } + + @Test // DATAMONGO-2490 + public void shouldBeAbleToReadDbRefDuringTransaction() { + + User rat = new User(); + rat.setUsername("rat"); + + template.save(rat); + + Person elene = new Person("Elene", "Cromwyll", 18); + elene.setCoworker(rat); + + repository.save(elene); + + Optional loaded = repository.findById(elene.getId()); + assertThat(loaded).isPresent(); + assertThat(loaded.get().getCoworker()).isNotNull(); + assertThat(loaded.get().getCoworker().getUsername()).isEqualTo(rat.getUsername()); + } + + private AfterTransactionAssertion assertAfterTransaction(Person person) { + + AfterTransactionAssertion assertion = new AfterTransactionAssertion<>(new Persistable() { + + @Nullable + @Override + public Object getId() { + return person.id; + } + + @Override + public boolean isNew() { + return person.id != null; + } + + @Override + public String toString() { + return getId() + " - " + person.toString(); + } + }); + + assertionList.add(assertion); + return assertion; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java index d7caf60693..dd6378fc90 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,9 @@ /** * @author Oliver Gierke */ -public class PersonSummary { +public interface PersonSummary { - String firstname; - String lastname; + String getFirstname(); + + String getLastname(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java new file mode 100644 index 0000000000..621eb3e647 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java @@ -0,0 +1,55 @@ +/* + * Copyright 2014-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Objects; + +/** + * @author Oliver Gierke + */ +public class PersonSummaryDto { + + String firstname; + String lastname; + + public PersonSummaryDto() {} + + public PersonSummaryDto(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonSummaryDto that = (PersonSummaryDto) o; + return Objects.equals(firstname, that.firstname) && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(firstname, lastname); + } + + public String toString() { + return "PersonSummaryDto(firstname=" + this.firstname + ", lastname=" + this.lastname + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java new file mode 100644 index 0000000000..317aea81bd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java @@ -0,0 +1,24 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Optional; + +public interface PersonSummaryWithOptional { + + Optional
                    getAddress(); + Optional getFirstname(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java new file mode 100644 index 0000000000..e89dec21bd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java @@ -0,0 +1,925 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.reactivestreams.Publisher; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.Circle; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Person.Sex; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.DirtiesState; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.ProvidesState; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +/** + * Test for {@link ReactiveMongoRepository} query methods. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Jens Schauder + */ +@ExtendWith({ SpringExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ReactiveMongoRepositoryTests implements DirtiesStateExtension.StateFunctions { + + private static final int PERSON_COUNT = 7; + @Autowired ReactiveMongoTemplate template; + + @Autowired ReactivePersonRepository repository; + @Autowired ReactiveContactRepository contactRepository; + @Autowired ReactiveCappedCollectionRepository cappedRepository; + + private Person dave, oliver, carter, boyd, stefan, leroi, alicia; + private QPerson person = QPerson.person; + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + protected String getDatabaseName() { + return "reactive"; + } + + @Bean + ReactiveMongoRepositoryFactory factory(ReactiveMongoOperations template, BeanFactory beanFactory) { + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); + factory.setBeanClassLoader(beanFactory.getClass().getClassLoader()); + factory.setBeanFactory(beanFactory); + factory.setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + + return factory; + } + + @Bean + ReactivePersonRepository reactivePersonRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactivePersonRepository.class); + } + + @Bean + ReactiveContactRepository reactiveContactRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactiveContactRepository.class); + } + + @Bean + ReactiveCappedCollectionRepository reactiveCappedCollectionRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactiveCappedCollectionRepository.class); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } + } + + @Override + public void clear() { + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + } + + @Override + public void setupState() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave = new Person("Dave", "Matthews", 42); + oliver = new Person("Oliver August", "Matthews", 4); + carter = new Person("Carter", "Beauford", 49); + carter.setSkills(Arrays.asList("Drums", "percussion", "vocals")); + try { + Thread.sleep(10); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + boyd = new Person("Boyd", "Tinsley", 45); + boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar")); + stefan = new Person("Stefan", "Lessard", 34); + leroi = new Person("Leroi", "Moore", 41); + + alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); + + repository.saveAll(Arrays.asList(oliver, carter, boyd, stefan, leroi, alicia, dave)).as(StepVerifier::create) // + .expectNextCount(PERSON_COUNT) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindByLastName() { + repository.findByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(2).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindOneByLastName() { + repository.findOneByLastname(carter.getLastname()).as(StepVerifier::create).expectNext(carter).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindOneByPublisherOfLastName() { + repository.findByLastname(Mono.just(carter.getLastname())).as(StepVerifier::create).expectNext(carter) + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindByPublisherOfLastNameIn() { + repository.findByLastnameIn(Flux.just(carter.getLastname(), dave.getLastname())).as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindByPublisherOfLastNameInAndAgeGreater() { + + repository.findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41) + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindUsingPublishersInStringQuery() { + + repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41)).as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldFindByLastNameAndSort() { + + repository.findByLastname("Matthews", Sort.by(ASC, "age")).as(StepVerifier::create) // + .expectNext(oliver, dave) // + .verifyComplete(); + + repository.findByLastname("Matthews", Sort.by(DESC, "age")).as(StepVerifier::create) // + .expectNext(dave, oliver) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void shouldUseTailableCursor() throws Exception { + + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue documents = new LinkedBlockingDeque<>(100); + + Disposable disposable = cappedRepository.findByKey("value").doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); + + disposable.dispose(); + } + + @Test // DATAMONGO-1444 + void shouldUseTailableCursorWithProjection() throws Exception { + + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue documents = new LinkedBlockingDeque<>(100); + + Disposable disposable = cappedRepository.findProjectionByKey("value").doOnNext(documents::add).subscribe(); + + CappedProjection projection1 = documents.poll(5, TimeUnit.SECONDS); + assertThat(projection1).isNotNull(); + assertThat(projection1.getRandom()).isNotEqualTo(0); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + CappedProjection projection2 = documents.poll(5, TimeUnit.SECONDS); + assertThat(projection2).isNotNull(); + assertThat(projection2.getRandom()).isNotEqualTo(0); + + assertThat(documents).isEmpty(); + + disposable.dispose(); + } + + @Test // DATAMONGO-2080 + void shouldUseTailableCursorWithDtoProjection() { + + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) // + .as(StepVerifier::create).expectNextCount(1) // + .verifyComplete(); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + cappedRepository.findDtoProjectionByKey("value").as(StepVerifier::create).expectNextCount(1).thenCancel().verify(); + } + + @Test // GH-4308 + void appliesScrollingCorrectly() { + + Window scroll = repository + .findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc("*", ScrollPosition.keyset()).block(); + + assertThat(scroll).hasSize(2); + assertThat(scroll).containsSequence(alicia, boyd); + assertThat(scroll.isLast()).isFalse(); + + Window nextScroll = repository + .findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc("*", scroll.positionAt(scroll.size() - 1)).block(); + + assertThat(nextScroll).hasSize(2); + assertThat(nextScroll).containsSequence(carter, dave); + assertThat(nextScroll.isLast()).isFalse(); + } + + @Test // GH-4308 + void appliesScrollingWithProjectionCorrectly() { + + repository + .findCursorProjectionByLastnameLike("*", PageRequest.of(0, 2, Sort.by(Direction.ASC, "firstname", "lastname"))) // + .flatMapIterable(Function.identity()) // + .as(StepVerifier::create) // + .expectNext(new PersonSummaryDto(alicia.getFirstname(), alicia.getLastname())) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + @DirtiesState + void findsPeopleByLocationWithinCircle() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)).as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + @DirtiesState + void findsPeopleByPageableLocationWithinCircle() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170), // + PageRequest.of(0, 10)).as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + @DirtiesState + void findsPeopleGeoresultByLocationWithinBox() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + repository.findByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS)).as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getDistance().getValue()).isCloseTo(1, offset(1d)); + assertThat(actual.getContent()).isEqualTo(dave); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + @DirtiesState + void findsPeoplePageableGeoresultByLocationWithinBox() throws InterruptedException { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + // Allow for index creation + Thread.sleep(500); + + repository.findByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS), // + PageRequest.of(0, 10)).as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.getDistance().getValue()).isCloseTo(1, offset(1d)); + assertThat(actual.getContent()).isEqualTo(dave); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + @DirtiesState + void findsPeopleByLocationWithinBox() throws InterruptedException { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + // Allow for index creation + Thread.sleep(500); + + repository.findPersonByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS)).as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-1865 + void shouldErrorOnFindOneWithNonUniqueResult() { + repository.findOneByLastname(dave.getLastname()).as(StepVerifier::create) + .expectError(IncorrectResultSizeDataAccessException.class).verify(); + } + + @Test // DATAMONGO-1865 + void shouldReturnFirstFindFirstWithMoreResults() { + repository.findFirstByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-2030 + void shouldReturnExistsBy() { + repository.existsByLastname(dave.getLastname()).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1979 + void findAppliesAnnotatedSort() { + + repository.findByAgeGreaterThan(40).collectList().as(StepVerifier::create).consumeNextWith(result -> { + assertThat(result).containsSequence(carter, boyd, dave, leroi); + }).verifyComplete(); + } + + @Test // DATAMONGO-1979 + void findWithSortOverwritesAnnotatedSort() { + + repository.findByAgeGreaterThan(40, Sort.by(Direction.ASC, "age")).collectList().as(StepVerifier::create) + .consumeNextWith(result -> { + assertThat(result).containsSequence(leroi, dave, boyd, carter); + }).verifyComplete(); + } + + @Test // DATAMONGO-2181 + @ProvidesState + void considersRepositoryCollectionName() { + + contactRepository.deleteAll() // + .as(StepVerifier::create) // + .verifyComplete(); + + leroi.id = null; + boyd.id = null; + contactRepository.saveAll(Arrays.asList(leroi, boyd)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + repository.count() // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + + contactRepository.count() // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + + repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual).containsExactlyInAnyOrder(dave, carter); + }).verifyComplete(); + } + + @Test // GH-4308 + void shouldScrollWithId() { + + List> capture = new ArrayList<>(); + repository.findBy(person.id.in(Arrays.asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())) // + .as(StepVerifier::create) // + .recordWith(() -> capture).assertNext(actual -> { + assertThat(actual).hasSize(2).containsExactly(boyd, carter); + }).verifyComplete(); + + Window scroll = capture.get(0); + + repository.findBy(person.id.in(Arrays.asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(scroll.positionAt(scroll.size() - 1))) // + .as(StepVerifier::create) // + .recordWith(() -> capture).assertNext(actual -> { + assertThat(actual).containsOnly(dave); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void findListOfSingleValue() { + + repository.findAllLastnames() // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual).contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPlaceholderValue() { + + repository.groupByLastnameAnd("firstname") // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .contains(new PersonAggregate("Lessard", "Stefan")) // + .contains(new PersonAggregate("Keys", "Alicia")) // + .contains(new PersonAggregate("Tinsley", "Boyd")) // + .contains(new PersonAggregate("Beauford", "Carter")) // + .contains(new PersonAggregate("Moore", "Leroi")) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSort() { + + repository.groupByLastnameAnd("firstname", Sort.by("lastname")) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .containsSequence( // + new PersonAggregate("Beauford", "Carter"), // + new PersonAggregate("Keys", "Alicia"), // + new PersonAggregate("Lessard", "Stefan"), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August")), // + new PersonAggregate("Moore", "Leroi"), // + new PersonAggregate("Tinsley", "Boyd")); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPageable() { + + repository.groupByLastnameAnd("firstname", PageRequest.of(1, 2, Sort.by("lastname"))) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .containsExactly( // + new PersonAggregate("Lessard", "Stefan"), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSingleSimpleResult() { + + repository.sumAge() // + .as(StepVerifier::create) // + .expectNext(245L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnType() { + + repository.sumAgeAndReturnRawResult() // + .as(StepVerifier::create) // + .expectNext(new org.bson.Document("_id", null).append("total", 245)) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() { + + repository.sumAgeAndReturnSumWrapper() // + .as(StepVerifier::create) // + .expectNext(new SumAge(245L)) // + .verifyComplete(); + } + + @Test // DATAMONGO-2374 + void findsWithNativeProjection() { + + repository.findDocumentById(dave.getId()) // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it).containsEntry("firstname", dave.getFirstname()).containsEntry("lastname", dave.getLastname()); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsMap() { + + repository.sumAgeAndReturnSumAsMap() // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it).isInstanceOf(Map.class); + }).verifyComplete(); + } + + @Test // GH-4839 + void annotatedAggregationWithAggregationResultAsClosedInterfaceProjection() { + + repository.findAggregatedClosedInterfaceProjectionBy() // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it.getFirstname()).isIn(dave.getFirstname(), oliver.getFirstname()); + assertThat(it.getLastname()).isEqualTo(dave.getLastname()); + }).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-2403 + @DirtiesState + void annotatedAggregationExtractingSimpleValueIsEmptyForEmptyDocument() { + + Person p = new Person("project-on-lastanme", null); + repository.save(p).then().as(StepVerifier::create).verifyComplete(); + + repository.projectToLastnameAndRemoveId(p.getFirstname()) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2403 + @DirtiesState + void annotatedAggregationSkipsEmptyDocumentsWhenExtractingSimpleValue() { + + String firstname = "project-on-lastanme"; + + Person p1 = new Person(firstname, null); + p1.setEmail("p1@example.com"); + Person p2 = new Person(firstname, "lastname"); + p2.setEmail("p2@example.com"); + Person p3 = new Person(firstname, null); + p3.setEmail("p3@example.com"); + + repository.saveAll(Arrays.asList(p1, p2, p3)).then().as(StepVerifier::create).verifyComplete(); + + repository.projectToLastnameAndRemoveId(firstname) // + .as(StepVerifier::create) // + .expectNext("lastname").verifyComplete(); + } + + @Test // DATAMONGO-2406 + @DirtiesState + void deleteByShouldHandleVoidResultTypeCorrectly() { + + repository.deleteByLastname(dave.getLastname()) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.find(query(where("lastname").is(dave.getLastname())), Person.class) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldAllowDeletedCountAsResult() { + + repository.deleteCountByLastname(dave.getLastname()) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldAllowSingleDocumentRemovalCorrectly() { + + repository.deleteSinglePersonByLastname(carter.getLastname()) // + .as(StepVerifier::create) // + .expectNext(carter) // + .verifyComplete(); + + repository.deleteSinglePersonByLastname("dorfuaeB") // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2652 + @DirtiesState + void deleteAllById() { + + repository.deleteAllById(Arrays.asList(carter.id, dave.id)) // + .as(StepVerifier::create) // + .verifyComplete(); + + repository.count().as(StepVerifier::create) // + .expectNext(PERSON_COUNT - 2L) // + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElements() { + repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337)) + .as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void mixAnnotatedUpdateWithAnnotatedQuery() { + + repository.updateAllByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L).verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateWithSpELIsAppliedCorrectly() { + + repository.findAndIncrementVisitsUsingSpELByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L) + .verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void annotatedAggregationUpdateIsAppliedCorrectly() { + + repository.findAndIncrementVisitsViaPipelineByLastname("Matthews", 1337).as(StepVerifier::create).verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElementsWithVoidReturn() { + + repository.findAndIncrementVisitsByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L) + .verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void allowsToUseComplexTypesInUpdate() { + + Address address = new Address("1007 Mountain Drive", "53540", "Gotham"); + + repository.findAndPushShippingAddressByEmail(dave.getEmail(), address) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + repository.findById(dave.getId()).map(Person::getShippingAddresses).as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).containsExactly(address)).verifyComplete(); + } + + interface ReactivePersonRepository + extends ReactiveMongoRepository, ReactiveQuerydslPredicateExecutor { + + Flux findByLastname(String lastname); + + Mono findOneByLastname(String lastname); + + Mono findOneProjectedByLastname(String lastname); + + Mono findByLastname(Publisher lastname); + + Flux findByLastnameIn(Publisher lastname); + + Flux findByLastname(String lastname, Sort sort); + + Flux findByLastnameInAndAgeGreaterThan(Flux lastname, int age); + + @Query("{ lastname: { $in: ?0 }, age: { $gt : ?1 } }") + Flux findStringQuery(Flux lastname, Mono age); + + Mono> findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc(String lastname, + ScrollPosition scrollPosition); + + Mono> findCursorProjectionByLastnameLike(String lastname, Pageable pageable); + + Flux findByLocationWithin(Circle circle); + + Flux findByLocationWithin(Circle circle, Pageable pageable); + + Flux> findByLocationNear(Point point, Distance maxDistance); + + Flux> findByLocationNear(Point point, Distance maxDistance, Pageable pageable); + + Flux findPersonByLocationNear(Point point, Distance maxDistance); + + Mono existsByLastname(String lastname); + + Mono findFirstByLastname(String lastname); + + @Query(sort = "{ age : -1 }") + Flux findByAgeGreaterThan(int age); + + @Query(sort = "{ age : -1 }") + Flux findByAgeGreaterThan(int age, Sort sort); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Flux findAllLastnames(); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property, Sort sort); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property, Pageable page); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAge(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnRawResult(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnSumWrapper(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnSumAsMap(); + + @Aggregation({ "{ '$match' : { 'lastname' : 'Matthews'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" }) + Flux findAggregatedClosedInterfaceProjectionBy(); + + @Aggregation( + pipeline = { "{ '$match' : { 'firstname' : '?0' } }", "{ '$project' : { '_id' : 0, 'lastname' : 1 } }" }) + Mono projectToLastnameAndRemoveId(String firstname); + + @Query(value = "{_id:?0}") + Mono findDocumentById(String id); + + Mono deleteByLastname(String lastname); + + Mono deleteCountByLastname(String lastname); + + Mono deleteSinglePersonByLastname(String lastname); + + Mono findAndUpdateViaMethodArgAllByLastname(String lastname, UpdateDefinition update); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?1 } }") + Mono findAndIncrementVisitsByLastname(String lastname, int increment); + + @Query("{ 'lastname' : ?0 }") + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?1 } }") + Mono updateAllByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update( + pipeline = { "{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }" }) + Mono findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + Mono findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + Mono findAndPushShippingAddressByEmail(String email, Address address); + } + + interface ReactiveContactRepository extends ReactiveMongoRepository {} + + interface ReactiveCappedCollectionRepository extends Repository { + + @Tailable + Flux findByKey(String key); + + @Tailable + Flux findProjectionByKey(String key); + + @Tailable + Flux findDtoProjectionByKey(String key); + } + + @Document + static class Capped { + + String id; + String key; + double random; + + public Capped() {} + + Capped(String key, double random) { + this.key = key; + this.random = random; + } + } + + interface CappedProjection { + double getRandom(); + } + + static class DtoProjection { + + String id; + double unknown; + + public String getId() { + return this.id; + } + + public double getUnknown() { + return this.unknown; + } + + public void setId(String id) { + this.id = id; + } + + public void setUnknown(double unknown) { + this.unknown = unknown; + } + + public String toString() { + return "ReactiveMongoRepositoryTests.DtoProjection(id=" + this.getId() + ", unknown=" + this.getUnknown() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java new file mode 100644 index 0000000000..878d3974c0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java @@ -0,0 +1,36 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository; + +import reactor.core.publisher.Flux; + +/** + * Sample reactive repository managing {@link Person} entities. + * + * @author Mark Paluch + */ +public interface ReactivePersonRepository extends ReactiveMongoRepository { + + /** + * Returns all {@link Person}s with the given lastname. + * + * @param lastname + * @return + */ + Flux findByLastname(String lastname); + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java index 44ca49ad5e..47594aa985 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,7 +33,7 @@ public interface RedeclaringRepositoryMethodsRepository extends MongoRepository< /** * Should only find users with the firstname 'Oliver'. - * + * * @param page * @return */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java index 3447d7d6a7..837b6801ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; @@ -28,32 +27,27 @@ /** * @author Thomas Darimont + * @author Mark Paluch */ @ContextConfiguration("config/MongoNamespaceIntegrationTests-context.xml") -public class RedeclaringRepositoryMethodsTests extends AbstractPersonRepositoryIntegrationTests { +class RedeclaringRepositoryMethodsTests extends AbstractPersonRepositoryIntegrationTests { @Autowired RedeclaringRepositoryMethodsRepository repository; - /** - * @see DATAMONGO-760 - */ - @Test - public void adjustedWellKnownPagedFindAllMethodShouldReturnOnlyTheUserWithFirstnameOliverAugust() { + @Test // DATAMONGO-760 + void adjustedWellKnownPagedFindAllMethodShouldReturnOnlyTheUserWithFirstnameOliverAugust() { - Page page = repository.findAll(new PageRequest(0, 2)); + Page page = repository.findAll(PageRequest.of(0, 2)); - assertThat(page.getNumberOfElements(), is(1)); - assertThat(page.getContent().get(0).getFirstname(), is(oliver.getFirstname())); + assertThat(page.getNumberOfElements()).isEqualTo(1); + assertThat(page.getContent().get(0).getFirstname()).isEqualTo(oliver.getFirstname()); } - /** - * @see DATAMONGO-760 - */ - @Test - public void adjustedWllKnownFindAllMethodShouldReturnAnEmptyList() { + @Test // DATAMONGO-760 + void adjustedWllKnownFindAllMethodShouldReturnAnEmptyList() { List result = repository.findAll(); - assertThat(result.isEmpty(), is(true)); + assertThat(result.isEmpty()).isTrue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java index bd2f523ef8..320f2206b9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,48 +15,51 @@ */ package org.springframework.data.mongodb.repository; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.bson.Document; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; import com.mongodb.MongoException; +import com.mongodb.client.MongoCollection; /** * Integration test for index creation for query methods. - * + * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class RepositoryIndexCreationIntegrationTests { - @Autowired - MongoOperations operations; + @Autowired MongoOperations operations; - @Autowired - PersonRepository repository; + @Autowired PersonRepository repository; @After public void tearDown() { operations.execute(Person.class, new CollectionCallback() { - public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { + public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + List indexes = new ArrayList(); + collection.listIndexes(Document.class).into(indexes); - for (DBObject index : collection.getIndexInfo()) { + for (Document index : indexes) { String indexName = index.get("name").toString(); if (indexName.startsWith("find")) { collection.dropIndex(indexName); @@ -75,6 +78,7 @@ public void testname() { assertHasIndexForField(indexInfo, "lastname"); assertHasIndexForField(indexInfo, "firstname"); + assertHasIndexForField(indexInfo, "add"); } private static void assertHasIndexForField(List indexInfo, String... fields) { @@ -85,6 +89,6 @@ private static void assertHasIndexForField(List indexInfo, String... } } - fail(String.format("Did not find index for field(s) %s in %s!", fields, indexInfo)); + fail(String.format("Did not find index for field(s) %s in %s", fields, indexInfo)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java index 70becf6abe..4f28d2efb9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,15 +18,14 @@ import java.util.Collections; import java.util.Map; -import org.springframework.data.repository.query.spi.EvaluationContextExtension; -import org.springframework.data.repository.query.spi.EvaluationContextExtensionSupport; +import org.springframework.data.spel.spi.EvaluationContextExtension; /** * A sample implementation of a custom {@link EvaluationContextExtension}. - * + * * @author Thomas Darimont */ -public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport { +public class SampleEvaluationContextExtension implements EvaluationContextExtension { @Override public String getExtensionId() { @@ -45,6 +44,7 @@ public static class SampleSecurityContextHolder { private static ThreadLocal auth = new ThreadLocal() { + @Override protected SampleAuthentication initialValue() { return new SampleAuthentication(new SampleUser(-1, "anonymous")); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java new file mode 100644 index 0000000000..44235c54ef --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java @@ -0,0 +1,834 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.ExampleMatcher.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Objects; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.RepeatedTest; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanClassLoaderAware; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.ReactiveMongoTransactionManager; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.lang.Nullable; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.util.ClassUtils; + +/** + * Tests for {@link ReactiveMongoRepository}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Ruben J Garcia + * @author Clément Petit + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware { + + @Autowired private ReactiveMongoTemplate template; + + private ReactiveMongoRepositoryFactory factory; + private ClassLoader classLoader; + private BeanFactory beanFactory; + private ReactivePersonRepository repository; + private ReactiveImmutablePersonRepository immutableRepository; + + private ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; + private ImmutableReactivePerson keith, james, mariah; + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader == null ? ClassUtils.getDefaultClassLoader() : classLoader; + } + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = beanFactory; + } + + @BeforeEach + void setUp() { + + factory = new ReactiveMongoRepositoryFactory(template); + factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); + factory.setBeanClassLoader(classLoader); + factory.setBeanFactory(beanFactory); + factory.setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + + repository = factory.getRepository(ReactivePersonRepository.class); + immutableRepository = factory.getRepository(ReactiveImmutablePersonRepository.class); + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave = new ReactivePerson("Dave", "Matthews", 42); + oliver = new ReactivePerson("Oliver August", "Matthews", 4); + carter = new ReactivePerson("Carter", "Beauford", 49); + boyd = new ReactivePerson("Boyd", "Tinsley", 45); + stefan = new ReactivePerson("Stefan", "Lessard", 34); + leroi = new ReactivePerson("Leroi", "Moore", 41); + alicia = new ReactivePerson("Alicia", "Keys", 30); + keith = new ImmutableReactivePerson(null, "Keith", "Urban", 53); + james = new ImmutableReactivePerson(null, "James", "Arthur", 33); + mariah = new ImmutableReactivePerson(null, "Mariah", "Carey", 51); + + repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).as(StepVerifier::create) // + .expectNextCount(7) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void existsByIdShouldReturnTrueForExistingObject() { + repository.existsById(dave.id).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void existsByIdShouldReturnFalseForAbsentObject() { + repository.existsById("unknown").as(StepVerifier::create).expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void existsByMonoOfIdShouldReturnTrueForExistingObject() { + repository.existsById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1712 + void existsByFluxOfIdShouldReturnTrueForExistingObject() { + repository.existsById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void existsByEmptyMonoOfIdShouldReturnEmptyMono() { + repository.existsById(Mono.empty()).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findByIdShouldReturnObject() { + repository.findById(dave.id).as(StepVerifier::create).expectNext(dave).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findByIdShouldCompleteWithoutValueForAbsentObject() { + repository.findById("unknown").as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findByIdByMonoOfIdShouldReturnTrueForExistingObject() { + repository.findById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(dave).verifyComplete(); + } + + @Test // DATAMONGO-1712 + void findByIdByFluxOfIdShouldReturnTrueForExistingObject() { + repository.findById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(dave).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() { + repository.findById(Mono.empty()).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findAllShouldReturnAllResults() { + repository.findAll().as(StepVerifier::create).expectNextCount(7).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findAllByIterableOfIdShouldReturnResults() { + repository.findAllById(Arrays.asList(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findAllByPublisherOfIdShouldReturnResults() { + repository.findAllById(Flux.just(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findAllByEmptyPublisherOfIdShouldReturnResults() { + repository.findAllById(Flux.empty()).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void findAllWithSortShouldReturnResults() { + + repository.findAll(Sort.by(new Order(Direction.ASC, "age"))).as(StepVerifier::create) // + .expectNextCount(7) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void countShouldReturnNumberOfRecords() { + repository.count().as(StepVerifier::create).expectNext(7L).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void insertEntityShouldInsertEntity() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + repository.insert(person).as(StepVerifier::create).expectNext(person).verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void insertShouldDeferredWrite() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + repository.insert(person); + + assertThat(person.getId()).isNull(); + } + + @Test // DATAMONGO-1444 + void insertIterableOfEntitiesShouldInsertEntity() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + repository.insert(Arrays.asList(dave, oliver, boyd)).as(StepVerifier::create) // + .expectNext(dave, oliver, boyd) // + .verifyComplete(); + + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void insertPublisherOfEntitiesShouldInsertEntity() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + repository.insert(Flux.just(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void saveEntityShouldUpdateExistingEntity() { + + dave.setFirstname("Hello, Dave"); + dave.setLastname("Bowman"); + + repository.save(dave).as(StepVerifier::create).expectNext(dave).verifyComplete(); + + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); + + repository.findById(dave.id).as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getFirstname()).isEqualTo(dave.getFirstname()); + assertThat(actual.getLastname()).isEqualTo(dave.getLastname()); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void saveEntityShouldInsertNewEntity() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + repository.save(person).as(StepVerifier::create).expectNext(person).verifyComplete(); + + repository.findById(person.id).as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getFirstname()).isEqualTo(person.getFirstname()); + assertThat(actual.getLastname()).isEqualTo(person.getLastname()); + }).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void saveIterableOfNewEntitiesShouldInsertEntity() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + repository.saveAll(Arrays.asList(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); + } + + @Test // DATAMONGO-1444 + void saveIterableOfMixedEntitiesShouldInsertEntity() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + dave.setFirstname("Hello, Dave"); + dave.setLastname("Bowman"); + + repository.saveAll(Arrays.asList(person, dave)).as(StepVerifier::create).expectNextCount(2).verifyComplete(); + + repository.findById(dave.id).as(StepVerifier::create).expectNext(dave).verifyComplete(); + + assertThat(person.id).isNotNull(); + repository.findById(person.id).as(StepVerifier::create).expectNext(person).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void savePublisherOfEntitiesShouldInsertEntity() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + repository.saveAll(Flux.just(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); + } + + @RepeatedTest(10) // GH-4838 + @EnableIfReplicaSetAvailable + void transactionalSaveAllForStuffThatIsConsideredAnUpdateOfExistingData() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionalOperator.create(txmgr, TransactionDefinition.withDefaults()).execute(callback -> { + return repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + }).as(StepVerifier::create) // + .expectNext(oliver, dave, carter, boyd, stefan, leroi, alicia).verifyComplete(); + } + + @RepeatedTest(10) // GH-4838 + @EnableIfReplicaSetAvailable + void transactionalSaveAllWithPublisherForStuffThatIsConsideredAnUpdateOfExistingData() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + Flux personFlux = Flux.fromStream(Stream.of(oliver, dave, carter, boyd, stefan, leroi, alicia)); + + TransactionalOperator.create(txmgr, TransactionDefinition.withDefaults()).execute(callback -> { + return repository.saveAll(personFlux); + }).as(StepVerifier::create) // + .expectNextCount(7).verifyComplete(); + } + + @Test // GH-3609 + void savePublisherOfImmutableEntitiesShouldInsertEntity() { + + immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete(); + + immutableRepository.saveAll(Flux.just(keith)).as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.id).isNotNull(); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1444 + void deleteAllShouldRemoveEntities() { + + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + repository.findAll().as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void deleteByIdShouldRemoveEntity() { + + repository.deleteById(dave.id).as(StepVerifier::create).verifyComplete(); + + repository.findById(dave.id).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1712 + void deleteByIdUsingMonoShouldRemoveEntity() { + + repository.deleteById(Mono.just(dave.id)).as(StepVerifier::create).verifyComplete(); + + repository.existsById(dave.id).as(StepVerifier::create).expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-1712 + void deleteByIdUsingFluxShouldRemoveEntity() { + + repository.deleteById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).verifyComplete(); + + repository.existsById(dave.id).as(StepVerifier::create).expectNext(false).verifyComplete(); + repository.existsById(oliver.id).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void deleteShouldRemoveEntity() { + + repository.delete(dave).as(StepVerifier::create).verifyComplete(); + + repository.findById(dave.id).as(StepVerifier::create).verifyComplete(); + + } + + @Test // DATAMONGO-1444 + void deleteIterableOfEntitiesShouldRemoveEntities() { + + repository.deleteAll(Arrays.asList(dave, boyd)).as(StepVerifier::create).verifyComplete(); + + repository.findById(boyd.id).as(StepVerifier::create).verifyComplete(); + + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); + } + + @Test // DATAMONGO-1444 + void deletePublisherOfEntitiesShouldRemoveEntities() { + + repository.deleteAll(Flux.just(dave, boyd)).as(StepVerifier::create).verifyComplete(); + + repository.findById(boyd.id).as(StepVerifier::create).verifyComplete(); + + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void findOneByExampleShouldReturnObject() { + + Example example = Example.of(dave); + + repository.findOne(example).as(StepVerifier::create).expectNext(dave).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void findAllByExampleShouldReturnObjects() { + + Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); + + repository.findAll(example).as(StepVerifier::create).expectNextCount(2).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void findAllByExampleAndSortShouldReturnObjects() { + + Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); + + repository.findAll(example, Sort.by("firstname")).as(StepVerifier::create).expectNext(dave, oliver) + .verifyComplete(); + } + + @Test // DATAMONGO-1619 + void countByExampleShouldCountObjects() { + + Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); + + repository.count(example).as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void existsByExampleShouldReturnExisting() { + + Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); + + repository.exists(example).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void existsByExampleShouldReturnNonExisting() { + + Example example = Example.of(new ReactivePerson("foo", "bar", -1)); + + repository.exists(example).as(StepVerifier::create).expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-1619 + void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() { + + Example example = Example.of(new ReactivePerson(null, "Matthews", -1), + matching().withIgnorePaths("age")); + + repository.findOne(example).as(StepVerifier::create).expectError(IncorrectResultSizeDataAccessException.class); + } + + @Test // DATAMONGO-1907 + void findOneByExampleWithoutResultShouldCompleteEmpty() { + + Example example = Example.of(new ReactivePerson("foo", "bar", -1)); + + repository.findOne(example).as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReturnFirstResult() { + + ReactivePerson probe = new ReactivePerson(); + probe.setFirstname(oliver.getFirstname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::first) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReturnOneResult() { + + ReactivePerson probe = new ReactivePerson(); + probe.setFirstname(oliver.getFirstname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .verifyError(IncorrectResultSizeDataAccessException.class); + } + + @Test // GH-3757 + void findByShouldReturnAll() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::all) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplySortAll() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), it -> it.sortBy(Sort.by("firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(dave, oliver) // + .verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.sortBy(Sort.by(Direction.DESC, "firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(oliver, dave) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplyProjection() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), it -> it.project("firstname").first()) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getFirstname()).isNotNull(); + assertThat(it.getLastname()).isNull(); + }).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplyPagination() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).contains(dave); + }).verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).contains(oliver); + }).verifyComplete(); + } + + @Test // GH-4889 + void findByShouldApplySlice() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isTrue(); + assertThat(it.getContent()).contains(dave); + }).verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isFalse(); + assertThat(it.getContent()).contains(oliver); + }).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldCount() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname("foo"); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReportExists() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname("foo"); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + interface ReactivePersonRepository extends ReactiveMongoRepository { + + Flux findByLastname(String lastname); + + } + + interface ReactiveImmutablePersonRepository extends ReactiveMongoRepository { + + } + + static class ReactivePerson { + + @Id String id; + + String firstname; + String lastname; + int age; + + public ReactivePerson() {} + + ReactivePerson(String firstname, String lastname, int age) { + + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public int getAge() { + return this.age; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReactivePerson that = (ReactivePerson) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstname, that.firstname) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, age); + } + + public String toString() { + return "SimpleReactiveMongoRepositoryTests.ReactivePerson(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", age=" + this.getAge() + ")"; + } + } + + static final class ImmutableReactivePerson { + + @Id private final String id; + + private final String firstname; + private final String lastname; + private final int age; + + ImmutableReactivePerson(@Nullable String id, String firstname, String lastname, int age) { + + this.id = id; + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public int getAge() { + return this.age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableReactivePerson that = (ImmutableReactivePerson) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstname, that.firstname) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, age); + } + + public String toString() { + return "SimpleReactiveMongoRepositoryTests.ImmutableReactivePerson(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", age=" + this.getAge() + ")"; + } + + public ImmutableReactivePerson withId(String id) { + return this.id == id ? this : new ImmutableReactivePerson(id, this.firstname, this.lastname, this.age); + } + + public ImmutableReactivePerson withFirstname(String firstname) { + return this.firstname == firstname ? this + : new ImmutableReactivePerson(this.id, firstname, this.lastname, this.age); + } + + public ImmutableReactivePerson withLastname(String lastname) { + return this.lastname == lastname ? this + : new ImmutableReactivePerson(this.id, this.firstname, lastname, this.age); + } + + public ImmutableReactivePerson withAge(int age) { + return this.age == age ? this : new ImmutableReactivePerson(this.id, this.firstname, this.lastname, age); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java new file mode 100644 index 0000000000..abbfac5943 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Objects; + +/** + * @author Christoph Strobl + */ +final class SumAge { + + private final Long total; + + public SumAge(Long total) { + this.total = total; + } + + public Long getTotal() { + return this.total; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SumAge sumAge = (SumAge) o; + return Objects.equals(total, sumAge.total); + } + + @Override + public int hashCode() { + return Objects.hash(total); + } + + public String toString() { + return "SumAge(total=" + this.getTotal() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java index 2f6b4b1f69..123f7a4889 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java index d2f49cdb89..606cca8647 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; /** @@ -40,7 +41,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java index 33aec81980..332eeff8ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2016 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -60,7 +60,7 @@ public interface UserWithComplexIdRepository extends CrudRepository { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("firstname", "Duckling").containsEntry("version", 1L); + } + + @Test // GH-4918 + void updatesVersionedTypeCorrectlyWhenUpdateIsUsingInc() { + + VersionedPerson person = template.insert(VersionedPersonWithCounter.class) + .one(new VersionedPersonWithCounter("Donald", "Duckling")); + + int updateCount = versionedPersonRepository.findAndIncCounterByLastname(person.getLastname()); + + assertThat(updateCount).isOne(); + + Document document = template.execute(VersionedPersonWithCounter.class, collection -> { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("lastname", "Duckling").containsEntry("version", 1L).containsEntry("counter", + 42); + } + + @Test // GH-4918 + void updatesVersionedTypeCorrectlyWhenUpdateCoversVersionBump() { + + VersionedPerson person = template.insert(VersionedPersonWithCounter.class) + .one(new VersionedPersonWithCounter("Donald", "Duckling")); + + int updateCount = versionedPersonRepository.findAndSetFirstnameToLastnameIncVersionByLastname(person.getLastname(), + 10); + + assertThat(updateCount).isOne(); + + Document document = template.execute(VersionedPersonWithCounter.class, collection -> { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("firstname", "Duckling").containsEntry("version", 10L); + } + + interface VersionedPersonRepository extends CrudRepository { + + @Update("{ '$set': { 'firstname' : ?0 } }") + int findAndSetFirstnameToLastnameByLastname(String lastname); + + @Update("{ '$inc': { 'counter' : 42 } }") + int findAndIncCounterByLastname(String lastname); + + @Update(""" + { + '$set': { 'firstname' : ?0 }, + '$inc': { 'version' : ?1 } + }""") + int findAndSetFirstnameToLastnameIncVersionByLastname(String lastname, int incVersion); + + } + + @org.springframework.data.mongodb.core.mapping.Document("versioned-person") + static class VersionedPersonWithCounter extends VersionedPerson { + + int counter; + + public VersionedPersonWithCounter(String firstname, @Nullable String lastname) { + super(firstname, lastname); + } + + public int getCounter() { + return counter; + } + + public void setCounter(int counter) { + this.counter = counter; + } + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java new file mode 100644 index 0000000000..2978c07b64 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; +import org.springframework.data.mongodb.classloading.HidingClassLoader; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadata; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.repository.support.ReactiveQuerydslMongoPredicateExecutor; + +import com.mongodb.client.MongoClient; + +/** + * Unit tests for {@link RepositoryRuntimeHints}. + * + * @author Christoph Strobl + */ +class RepositoryRuntimeHintsUnitTests { + + @Test // GH-4244 + void registersTypesForQuerydslIntegration() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, null); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class))); + } + + @Test // GH-4244 + void onlyRegistersReactiveTypesForQuerydslIntegrationWhenNoSyncClientPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, HidingClassLoader.hide(MongoClient.class)); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .negate().and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class))); + } + + @Test // GH-4244 + @Disabled("TODO: ReactiveWrappers does not support ClassLoader") + void doesNotRegistersReactiveTypesForQuerydslIntegrationWhenReactorNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, new HidingClassLoader("reactor.core")); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class).negate())); + } + + @Test // GH-2971, GH-4534 + void registersProxyForCrudMethodMetadata() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, null); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.proxies().forInterfaces(CrudMethodMetadata.class, // + org.springframework.aop.SpringProxy.class, // + org.springframework.aop.framework.Advised.class, // + org.springframework.core.DecoratingProxy.class)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java index eef0448479..90886d7760 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,62 @@ */ package org.springframework.data.mongodb.repository.cdi; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import jakarta.enterprise.inject.se.SeContainer; +import jakarta.enterprise.inject.se.SeContainerInitializer; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; -import org.apache.webbeans.cditest.CdiTestContainer; -import org.apache.webbeans.cditest.CdiTestContainerLoader; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; import org.springframework.data.mongodb.repository.Person; /** * Integration tests for {@link MongoRepositoryExtension}. - * + * * @author Oliver Gierke * @author Mark Paluch */ public class CdiExtensionIntegrationTests { - static CdiTestContainer container; + static SeContainer container; - @BeforeClass - public static void setUp() throws Exception { - container = CdiTestContainerLoader.getCdiContainer(); - container.bootContainer(); + @BeforeAll + public static void setUp() { + + container = SeContainerInitializer.newInstance() // + .disableDiscovery() // + .addPackages(CdiExtensionIntegrationTests.class) // + .initialize(); } - @AfterClass - public static void tearDown() throws Exception { - container.shutdownContainer(); + @AfterAll + public static void tearDown() { + container.close(); } - @Test + @Test // DATAMONGO-356, DATAMONGO-1785 public void bootstrapsRepositoryCorrectly() { - RepositoryClient client = container.getInstance(RepositoryClient.class); + RepositoryClient client = container.select(RepositoryClient.class).get(); CdiPersonRepository repository = client.getRepository(); - assertThat(repository, is(notNullValue())); + assertThat(repository).isNotNull(); repository.deleteAll(); Person person = new Person("Dave", "Matthews"); Person result = repository.save(person); - assertThat(result, is(notNullValue())); - assertThat(repository.findOne(person.getId()).getId(), is(result.getId())); + assertThat(result).isNotNull(); + assertThat(repository.findById(person.getId()).get().getId()).isEqualTo(result.getId()); } - /** - * @see DATAMONGO-1017 - */ - @Test + @Test // DATAMONGO-1017, DATAMONGO-1785 public void returnOneFromCustomImpl() { - RepositoryClient repositoryConsumer = container.getInstance(RepositoryClient.class); - assertThat(repositoryConsumer.getSamplePersonRepository().returnOne(), is(1)); + RepositoryClient repositoryConsumer = container.select(RepositoryClient.class).get(); + assertThat(repositoryConsumer.getSamplePersonRepository().returnOne()).isEqualTo(1); } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java index 48b1cebb11..0c79530dca 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.repository.cdi; +import java.util.Optional; + import org.springframework.data.mongodb.repository.Person; import org.springframework.data.repository.Repository; @@ -24,5 +26,5 @@ public interface CdiPersonRepository extends Repository { Person save(Person person); - Person findOne(String id); + Optional findById(String id); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java index ddc1bcbe1a..ca094b9b8e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,27 @@ */ package org.springframework.data.mongodb.repository.cdi; -import java.net.UnknownHostException; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.Produces; -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; - -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; - -import com.mongodb.MongoClient; -import com.mongodb.MongoException; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.test.util.MongoTestUtils; /** * Simple component exposing a {@link MongoOperations} instance as CDI bean. - * + * * @author Oliver Gierke */ class MongoTemplateProducer { @Produces @ApplicationScoped - public MongoOperations createMongoTemplate() throws UnknownHostException, MongoException { + public MongoOperations createMongoTemplate() { - MongoDbFactory factory = new SimpleMongoDbFactory(new MongoClient(), "database"); + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoTestUtils.client(), "database"); return new MongoTemplate(factory); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java index cb93f59c2c..514ed3e01a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.repository.cdi; -import javax.inject.Inject; +import jakarta.inject.Inject; /** * @author Oliver Gierke diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java similarity index 78% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java index a545d35919..12b59d86a4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.repository.cdi; /** - * @see DATAMONGO-1017 * @author Mark Paluch */ -interface SamplePersonRepositoryCustom { +interface SamplePersonFragment { int returnOne(); -} \ No newline at end of file +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java similarity index 76% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java index 46a22cb6e8..09e20b9bf2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.repository.cdi; /** - * @see DATAMONGO-1017 * @author Mark Paluch */ -class SamplePersonRepositoryImpl implements SamplePersonRepositoryCustom { +class SamplePersonFragmentImpl implements SamplePersonFragment { @Override public int returnOne() { return 1; } -} \ No newline at end of file +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java index 224a7ed3b4..140cb6ca36 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,6 +20,5 @@ /** * @author Mark Paluch - * @see DATAMONGO-1017 */ -public interface SamplePersonRepository extends Repository, SamplePersonRepositoryCustom {} +public interface SamplePersonRepository extends Repository, SamplePersonFragment {} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java index 8847e0abd0..5480fe3e1b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionReader; @@ -36,42 +35,37 @@ /** * Test class using the namespace configuration to set up the repository instance. - * + * * @author Oliver Gierke */ @ContextConfiguration -public class MongoNamespaceIntegrationTests extends AbstractPersonRepositoryIntegrationTests { +class MongoNamespaceIntegrationTests extends AbstractPersonRepositoryIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; @Autowired ApplicationContext context; - @Before - @Override + @BeforeEach public void setUp() throws InterruptedException { - super.setUp(); factory = new DefaultListableBeanFactory(); reader = new XmlBeanDefinitionReader(factory); } @Test - public void assertDefaultMappingContextIsWired() { + void assertDefaultMappingContextIsWired() { reader.loadBeanDefinitions(new ClassPathResource("MongoNamespaceIntegrationTests-context.xml", getClass())); BeanDefinition definition = factory.getBeanDefinition("personRepository"); - assertThat(definition, is(notNullValue())); + assertThat(definition).isNotNull(); } - /** - * @see DATAMONGO-581 - */ - @Test - public void exposesPersistentEntity() { + @Test // DATAMONGO-581 + void exposesPersistentEntity() { Repositories repositories = new Repositories(context); PersistentEntity entity = repositories.getPersistentEntity(Person.class); - assertThat(entity, is(notNullValue())); - assertThat(entity, is(instanceOf(MongoPersistentEntity.class))); + assertThat(entity).isNotNull(); + assertThat(entity).isInstanceOf(MongoPersistentEntity.class); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java index 32d49c7b8a..7d116e25e9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2016 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,38 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; - -import java.util.Arrays; - import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.MongoClient; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link MongoRepositoriesRegistrar}. - * + * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoRepositoriesRegistrarIntegrationTests { @Configuration - @EnableMongoRepositories(basePackages = "org.springframework.data.mongodb.repository") + @EnableMongoRepositories(basePackages = "org.springframework.data.mongodb.repository", includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = PersonRepository.class)) static class Config { @Bean public MongoOperations mongoTemplate() throws Exception { - return new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database")); + return new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoTestUtils.client(), "database")); } } @@ -59,15 +55,4 @@ public MongoOperations mongoTemplate() throws Exception { @Test public void testConfiguration() {} - - /** - * @see DATAMONGO-901 - */ - @Test - public void registersTypePredictingPostProcessor() { - - Iterable beanNames = Arrays.asList(context.getBeanDefinitionNames()); - - assertThat(beanNames, hasItem(containsString("RepositoryFactoryBeanSupport_Predictor"))); - } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java new file mode 100644 index 0000000000..778ac61db1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.AnnotationBeanNameGenerator; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.mongodb.repository.PersonRepository; + +/** + * @author Christoph Strobl + */ +class MongoRepositoriesRegistrarUnitTests { + + private BeanDefinitionRegistry registry; + + @BeforeEach + void setUp() { + registry = new DefaultListableBeanFactory(); + } + + @ParameterizedTest // GH-499, GH-3440 + @MethodSource(value = { "args" }) + void configuresRepositoriesCorrectly(AnnotationMetadata metadata, String[] beanNames) { + + MongoRepositoriesRegistrar registrar = new MongoRepositoriesRegistrar(); + registrar.setResourceLoader(new DefaultResourceLoader()); + registrar.setEnvironment(new StandardEnvironment()); + registrar.registerBeanDefinitions(metadata, registry); + + Iterable names = Arrays.asList(registry.getBeanDefinitionNames()); + assertThat(names).contains(beanNames); + } + + static Stream args() { + return Stream.of( + Arguments.of(AnnotationMetadata.introspect(Config.class), + new String[] { "personRepository", "samplePersonRepository", "contactRepository" }), + Arguments.of(AnnotationMetadata.introspect(ConfigWithBeanNameGenerator.class), + new String[] { "personREPO", "samplePersonREPO", "contactREPO" })); + } + + @EnableMongoRepositories(basePackageClasses = PersonRepository.class) + private class Config { + + } + + @EnableMongoRepositories(basePackageClasses = PersonRepository.class, nameGenerator = MyBeanNameGenerator.class) + private class ConfigWithBeanNameGenerator { + + } + + static class MyBeanNameGenerator extends AnnotationBeanNameGenerator { + + @Override + public String generateBeanName(BeanDefinition definition, BeanDefinitionRegistry registry) { + return super.generateBeanName(definition, registry).replaceAll("Repository", "REPO"); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java index eb6fd4f40f..f613beb6d5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,14 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collection; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.core.env.Environment; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.io.ResourceLoader; @@ -34,7 +37,7 @@ /** * Unit tests for {@link MongoRepositoryConfigurationExtension}. - * + * * @author Oliver Gierke * @since 1.6 */ @@ -43,33 +46,26 @@ public class MongoRepositoryConfigurationExtensionUnitTests { StandardAnnotationMetadata metadata = new StandardAnnotationMetadata(Config.class, true); ResourceLoader loader = new PathMatchingResourcePatternResolver(); Environment environment = new StandardEnvironment(); + BeanDefinitionRegistry registry = new DefaultListableBeanFactory(); + RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata, - EnableMongoRepositories.class, loader, environment); + EnableMongoRepositories.class, loader, environment, registry); - /** - * @see DATAMONGO-1009 - */ - @Test + @Test // DATAMONGO-1009 public void isStrictMatchIfDomainTypeIsAnnotatedWithDocument() { MongoRepositoryConfigurationExtension extension = new MongoRepositoryConfigurationExtension(); assertHasRepo(SampleRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); } - /** - * @see DATAMONGO-1009 - */ - @Test + @Test // DATAMONGO-1009 public void isStrictMatchIfRepositoryExtendsStoreSpecificBase() { MongoRepositoryConfigurationExtension extension = new MongoRepositoryConfigurationExtension(); assertHasRepo(StoreRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); } - /** - * @see DATAMONGO-1009 - */ - @Test + @Test // DATAMONGO-1009 public void isNotStrictMatchIfDomainTypeIsNotAnnotatedWithDocument() { MongoRepositoryConfigurationExtension extension = new MongoRepositoryConfigurationExtension(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java new file mode 100644 index 0000000000..402f13a47b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link ReactiveMongoRepositoriesRegistrar}. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class ReactiveMongoRepositoriesRegistrarIntegrationTests { + + @Configuration + @EnableReactiveMongoRepositories(basePackages = "org.springframework.data.mongodb.repository") + static class Config { + + @Bean + public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception { + return new ReactiveMongoTemplate( + new SimpleReactiveMongoDatabaseFactory(Mockito.mock(MongoClient.class), "database")); + } + } + + @Autowired ReactivePersonRepository personRepository; + @Autowired ApplicationContext context; + + @Test // DATAMONGO-1444 + public void testConfiguration() {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java new file mode 100644 index 0000000000..332ff06ee4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.AnnotationBeanNameGenerator; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.mongodb.repository.PersonRepository; + +/** + * @author Christoph Strobl + */ +class ReactiveMongoRepositoriesRegistrarUnitTests { + + private BeanDefinitionRegistry registry; + + @BeforeEach + void setUp() { + registry = new DefaultListableBeanFactory(); + } + + @ParameterizedTest // GH-499, GH-3440 + @MethodSource(value = { "args" }) + void configuresRepositoriesCorrectly(AnnotationMetadata metadata, String[] beanNames) { + + ReactiveMongoRepositoriesRegistrar registrar = new ReactiveMongoRepositoriesRegistrar(); + registrar.setResourceLoader(new DefaultResourceLoader()); + registrar.setEnvironment(new StandardEnvironment()); + registrar.registerBeanDefinitions(metadata, registry); + + Iterable names = Arrays.asList(registry.getBeanDefinitionNames()); + assertThat(names).contains(beanNames); + } + + static Stream args() { + return Stream.of( + Arguments.of(AnnotationMetadata.introspect(Config.class), new String[] { "reactivePersonRepository" }), + Arguments.of(AnnotationMetadata.introspect(ConfigWithBeanNameGenerator.class), + new String[] { "reactivePersonREPO" })); + } + + @EnableReactiveMongoRepositories(basePackageClasses = PersonRepository.class) + private class Config { + + } + + @EnableReactiveMongoRepositories(basePackageClasses = PersonRepository.class, + nameGenerator = MyBeanNameGenerator.class) + private class ConfigWithBeanNameGenerator { + + } + + static class MyBeanNameGenerator extends AnnotationBeanNameGenerator { + + @Override + public String generateBeanName(BeanDefinition definition, BeanDefinitionRegistry registry) { + return super.generateBeanName(definition, registry).replaceAll("Repository", "REPO"); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java new file mode 100644 index 0000000000..45ecba992f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Collection; + +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.ResourceLoader; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.type.StandardAnnotationMetadata; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfiguration; +import org.springframework.data.repository.config.RepositoryConfigurationSource; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; + +/** + * Unit tests for {@link ReactiveMongoRepositoryConfigurationExtension}. + * + * @author Mark Paluch + */ +public class ReactiveMongoRepositoryConfigurationExtensionUnitTests { + + StandardAnnotationMetadata metadata = new StandardAnnotationMetadata(Config.class, true); + ResourceLoader loader = new PathMatchingResourcePatternResolver(); + Environment environment = new StandardEnvironment(); + BeanDefinitionRegistry registry = new DefaultListableBeanFactory(); + + RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata, + EnableReactiveMongoRepositories.class, loader, environment, registry); + + @Test // DATAMONGO-1444 + public void isStrictMatchIfDomainTypeIsAnnotatedWithDocument() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertHasRepo(SampleRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + @Test // DATAMONGO-1444 + public void isStrictMatchIfRepositoryExtendsStoreSpecificBase() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertHasRepo(StoreRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + @Test // DATAMONGO-1444 + public void isNotStrictMatchIfDomainTypeIsNotAnnotatedWithDocument() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertDoesNotHaveRepo(UnannotatedRepository.class, + extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + private static void assertHasRepo(Class repositoryInterface, + Collection> configs) { + + for (RepositoryConfiguration config : configs) { + if (config.getRepositoryInterface().equals(repositoryInterface.getName())) { + return; + } + } + + fail("Expected to find config for repository interface ".concat(repositoryInterface.getName()).concat(" but got ") + .concat(configs.toString())); + } + + private static void assertDoesNotHaveRepo(Class repositoryInterface, + Collection> configs) { + + for (RepositoryConfiguration config : configs) { + if (config.getRepositoryInterface().equals(repositoryInterface.getName())) { + fail("Expected not to find config for repository interface ".concat(repositoryInterface.getName())); + } + } + } + + @EnableReactiveMongoRepositories(considerNestedRepositories = true) + static class Config { + + } + + @Document + static class Sample {} + + static class Store {} + + interface SampleRepository extends ReactiveCrudRepository {} + + interface UnannotatedRepository extends RxJava3CrudRepository {} + + interface StoreRepository extends ReactiveMongoRepository {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java index bb4f61d8e9..cc36c6cafb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,29 @@ */ package org.springframework.data.mongodb.repository.config.lazy; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.repository.config.lazy.ClassWithNestedRepository.NestedUserRepository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration test for repository namespace configuration with nested repositories. - * + * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml") public class AllowNestedMongoRepositoriesRepositoryConfigTests { @Autowired NestedUserRepository fooRepository; - /** - * @see DATAMONGO-780 - */ - @Test + @Test // DATAMONGO-780 public void shouldFindNestedRepository() { - assertThat(fooRepository, is(notNullValue())); + assertThat(fooRepository).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java index fe9b547783..1827559ffd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,6 @@ import org.springframework.data.mongodb.repository.User; /** - * @see DATAMONGO-780 * @author Thomas Darimont */ public class ClassWithNestedRepository { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java index e9713a3839..c64b831270 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.data.mongodb.repository.config.lazy; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.ImportResource; @@ -27,14 +27,14 @@ import org.springframework.data.mongodb.repository.config.lazy.ClassWithNestedRepository.NestedUserRepository; import org.springframework.data.repository.support.Repositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration test for the combination of JavaConfig and an {@link Repositories} wrapper. - * + * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class NestedMongoRepositoriesJavaConfigTests { @@ -45,11 +45,8 @@ static class Config {} @Autowired NestedUserRepository nestedUserRepository; - /** - * @see DATAMONGO-780 - */ - @Test + @Test // DATAMONGO-780 public void shouldSupportNestedRepositories() { - assertThat(nestedUserRepository, is(notNullValue())); + assertThat(nestedUserRepository).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java new file mode 100644 index 0000000000..3693f60780 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java @@ -0,0 +1,26 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +import org.springframework.data.mongodb.core.User; +import org.springframework.data.repository.Repository; + +/** + * @author Mark Paluch + */ +public interface ComposedRepository extends Repository, RepositoryMixin { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java new file mode 100644 index 0000000000..4361be2420 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.ImportResource; +import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +/** + * Integration tests for composed Repository implementations. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class ComposedRepositoryImplementationTests { + + @Configuration + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ComposedRepository.class)) + @ImportResource("classpath:infrastructure.xml") + static class Config {} + + @Autowired ComposedRepository composedRepository; + + @Test // DATAMONGO-1702 + public void shouldExecuteMethodOnCustomRepositoryImplementation() { + assertThat(composedRepository.getFoo()).isEqualTo("foo"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java index 19a45b93ac..21e0cb3914 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java index 9adfb7701d..69654bc7b7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,10 +25,6 @@ */ public class CustomMongoRepositoryImpl implements CustomMongoRepository { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.custom.CustomMongoRepository#findByFullName() - */ @Override public List findByUsernameCustom(String username) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java new file mode 100644 index 0000000000..d83b53c62a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; + +/** + * @author Mark Paluch + */ +public interface CustomReactiveMongoRepository + extends RxJava3CrudRepository, CustomReactiveMongoRepositoryCustom { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java new file mode 100644 index 0000000000..6b650ecaac --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java @@ -0,0 +1,30 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.custom; + +import java.util.List; + +import org.springframework.data.mongodb.repository.User; + +/** + * @author Mark Paluch + */ +public interface CustomReactiveMongoRepositoryCustom { + + List findByUsernameCustom(String username); + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java new file mode 100644 index 0000000000..4c0a7b78e8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java @@ -0,0 +1,37 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.custom; + +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.repository.User; + +/** + * @author Mark Paluch + */ +public class CustomReactiveMongoRepositoryImpl implements CustomReactiveMongoRepositoryCustom { + + @Override + public List findByUsernameCustom(String username) { + + User user = new User(); + user.setUsername(username); + + return Collections.singletonList(user); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java new file mode 100644 index 0000000000..c7de6e72bf --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.context.annotation.ImportResource; +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +/** + * Integration tests for custom reactive Repository implementations. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class CustomReactiveRepositoryImplementationTests { + + @Configuration + @EnableReactiveMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = CustomReactiveMongoRepository.class)) + @ImportResource("classpath:reactive-infrastructure.xml") + static class Config {} + + @Autowired CustomReactiveMongoRepository customMongoRepository; + + @Test // DATAMONGO-1444 + public void shouldExecuteMethodOnCustomRepositoryImplementation() { + + String username = "bubu"; + List users = customMongoRepository.findByUsernameCustom(username); + + assertThat(users.size()).isEqualTo(1); + assertThat(users.get(0)).isNotNull(); + assertThat(users.get(0).getUsername()).isEqualTo(username); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java index d375bd0e14..697799d3e8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,48 +15,47 @@ */ package org.springframework.data.mongodb.repository.custom; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.ImportResource; import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for custom Repository implementations. - * + * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class CustomRepositoryImplementationTests { @Configuration - @EnableMongoRepositories + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = CustomMongoRepository.class)) @ImportResource("classpath:infrastructure.xml") static class Config {} @Autowired CustomMongoRepository customMongoRepository; - /** - * @see DATAMONGO-804 - */ - @Test + @Test // DATAMONGO-804 public void shouldExecuteMethodOnCustomRepositoryImplementation() { String username = "bubu"; List users = customMongoRepository.findByUsernameCustom(username); - assertThat(users.size(), is(1)); - assertThat(users.get(0), is(notNullValue())); - assertThat(users.get(0).getUsername(), is(username)); + assertThat(users.size()).isEqualTo(1); + assertThat(users.get(0)).isNotNull(); + assertThat(users.get(0).getUsername()).isEqualTo(username); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java new file mode 100644 index 0000000000..191e6a6b68 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java @@ -0,0 +1,24 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +/** + * @author Mark Paluch + */ +public interface RepositoryMixin { + + String getFoo(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java new file mode 100644 index 0000000000..be7ec72a91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java @@ -0,0 +1,27 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +/** + * @author Mark Paluch + */ +public class RepositoryMixinImpl implements RepositoryMixin { + + @Override + public String getFoo() { + return "foo"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java index 34e1f1793b..ea3c9ad023 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,47 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.mockito.Matchers.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.Date; +import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Optional; +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; -import org.hamcrest.core.Is; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; -import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithQuery; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.convert.DbRefResolver; @@ -50,175 +64,178 @@ import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Hint; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.MongoRepository; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.Update; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.spel.standard.SpelExpressionParser; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; -import com.mongodb.WriteResult; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Unit tests for {@link AbstractMongoQuery}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch + * @author Jorge Rodríguez */ -@RunWith(MockitoJUnitRunner.class) -public class AbstractMongoQueryUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class AbstractMongoQueryUnitTests { @Mock MongoOperations mongoOperationsMock; + @Mock ExecutableFind executableFind; + @Mock FindWithQuery withQueryMock; + @Mock ExecutableUpdate executableUpdate; + @Mock UpdateWithQuery updateWithQuery; + @Mock UpdateWithUpdate updateWithUpdate; + @Mock TerminatingUpdate terminatingUpdate; @Mock BasicMongoPersistentEntity persitentEntityMock; @Mock MongoMappingContext mappingContextMock; - @Mock WriteResult writeResultMock; + @Mock DeleteResult deleteResultMock; + @Mock UpdateResult updateResultMock; - @Before - public void setUp() { + @BeforeEach + void setUp() { doReturn("persons").when(persitentEntityMock).getCollection(); - doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Matchers.any(Class.class)); + doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Mockito.any(Class.class)); + doReturn(persitentEntityMock).when(mappingContextMock).getRequiredPersistentEntity(Mockito.any(Class.class)); doReturn(Person.class).when(persitentEntityMock).getType(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); + MongoDatabaseFactory mongoDbFactory = mock(MongoDatabaseFactory.class); + when(mongoDbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContextMock); converter.afterPropertiesSet(); doReturn(converter).when(mongoOperationsMock).getConverter(); + doReturn(executableFind).when(mongoOperationsMock).query(any()); + doReturn(withQueryMock).when(executableFind).as(any()); + doReturn(withQueryMock).when(withQueryMock).matching(any(Query.class)); + doReturn(executableUpdate).when(mongoOperationsMock).update(any()); + doReturn(updateWithQuery).when(executableUpdate).matching(any(Query.class)); + doReturn(terminatingUpdate).when(updateWithQuery).apply(any(UpdateDefinition.class)); + + when(mongoOperationsMock.remove(any(), any(), anyString())).thenReturn(deleteResultMock); + when(mongoOperationsMock.updateMulti(any(), any(), any(), anyString())).thenReturn(updateResultMock); } - /** - * @see DATAMONGO-566 - */ - @SuppressWarnings("unchecked") - @Test - public void testDeleteExecutionCallsRemoveCorreclty() { + @Test // DATAMONGO-566 + void testDeleteExecutionCallsRemoveCorrectly() { createQueryForMethod("deletePersonByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" }); - verify(mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons")); - verify(mongoOperationsMock, times(0)).find(Matchers.any(Query.class), Matchers.any(Class.class), - Matchers.anyString()); + verify(mongoOperationsMock, times(1)).remove(any(), eq(Person.class), eq("persons")); + verify(mongoOperationsMock, times(0)).find(any(), any(), any()); } - /** - * @see DATAMONGO-566 - * @see DATAMONGO-1040 - */ - @SuppressWarnings("unchecked") - @Test - public void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollectionLike() { - - when(mongoOperationsMock.find(Matchers.any(Query.class), Matchers.any(Class.class), Matchers.anyString())) - .thenReturn(Arrays.asList(new Person(new ObjectId(new Date()), "bar"))); + @Test // DATAMONGO-566, DATAMONGO-1040 + void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollectionLike() { createQueryForMethod("deleteByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" }); - verify(mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), eq(Person.class), eq("persons")); + verify(mongoOperationsMock, times(1)).findAllAndRemove(any(), eq(Person.class), eq("persons")); } - /** - * @see DATAMONGO-566 - */ - @Test - public void testDeleteExecutionReturnsZeroWhenWriteResultIsNull() { + @Test // DATAMONGO-566 + void testDeleteExecutionReturnsZeroWhenWriteResultIsNull() { MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class); query.setDeleteQuery(true); - assertThat(query.execute(new Object[] { "fake" }), Is. is(0L)); + assertThat(query.execute(new Object[] { "fake" })).isEqualTo(0L); } - /** - * @see DATAMONGO-566 - * @see DATAMONGO-978 - */ - @Test - public void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() { + @Test // DATAMONGO-566, DATAMONGO-978 + void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() { - when(writeResultMock.getN()).thenReturn(100); - when(mongoOperationsMock.remove(Matchers.any(Query.class), eq(Person.class), eq("persons"))) - .thenReturn(writeResultMock); + when(deleteResultMock.getDeletedCount()).thenReturn(100L); + when(deleteResultMock.wasAcknowledged()).thenReturn(true); MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class); query.setDeleteQuery(true); - assertThat(query.execute(new Object[] { "fake" }), is((Object) 100L)); - verify(mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons")); + assertThat(query.execute(new Object[] { "fake" })).isEqualTo(100L); + verify(mongoOperationsMock, times(1)).remove(any(), eq(Person.class), eq("persons")); } - /** - * @see DATAMONGO-957 - */ - @Test - public void metadataShouldNotBeAddedToQueryWhenNotPresent() { + @Test // DATAMONGO-957 + void metadataShouldNotBeAddedToQueryWhenNotPresent() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class); query.execute(new Object[] { "fake" }); ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons")); + verify(executableFind).as(Person.class); + verify(withQueryMock).matching(captor.capture()); - assertThat(captor.getValue().getMeta().getComment(), nullValue()); + assertThat(captor.getValue().getMeta().getComment()).isNull(); + ; } - /** - * @see DATAMONGO-957 - */ - @Test - public void metadataShouldBeAddedToQueryCorrectly() { + @Test // DATAMONGO-957 + void metadataShouldBeAddedToQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class, Pageable.class); - query.execute(new Object[] { "fake", new PageRequest(0, 10) }); + query.execute(new Object[] { "fake", PageRequest.of(0, 10) }); ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons")); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + verify(executableFind).as(Person.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } - /** - * @see DATAMONGO-957 - */ - @Test - public void metadataShouldBeAddedToCountQueryCorrectly() { + @Test // DATAMONGO-957 + void metadataShouldBeAddedToCountQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class, Pageable.class); - query.execute(new Object[] { "fake", new PageRequest(0, 10) }); + query.execute(new Object[] { "fake", PageRequest.of(1, 10) }); ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(mongoOperationsMock, times(1)).count(captor.capture(), eq(Person.class), eq("persons")); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + verify(executableFind).as(Person.class); + verify(withQueryMock, atLeast(1)).matching(captor.capture()); + + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } - /** - * @see DATAMONGO-957 - */ - @Test - public void metadataShouldBeAddedToStringBasedQueryCorrectly() { + @Test // DATAMONGO-957, DATAMONGO-1783 + void metadataShouldBeAddedToStringBasedQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByAnnotatedQuery", String.class, Pageable.class); - query.execute(new Object[] { "fake", new PageRequest(0, 10) }); + query.execute(new Object[] { "fake", PageRequest.of(0, 10) }); ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons")); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + verify(executableFind).as(Person.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } - /** - * @see DATAMONGO-1057 - */ - @Test - public void slicedExecutionShouldRetainNrOfElementsToSkip() { + @Test // DATAMONGO-1057 + void slicedExecutionShouldRetainNrOfElementsToSkip() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); - Pageable page1 = new PageRequest(0, 10); + Pageable page1 = PageRequest.of(0, 10); Pageable page2 = page1.next(); query.execute(new Object[] { "fake", page1 }); @@ -226,20 +243,18 @@ public void slicedExecutionShouldRetainNrOfElementsToSkip() { ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons")); + verify(executableFind, times(2)).as(Person.class); + verify(withQueryMock, times(2)).matching(captor.capture()); - assertThat(captor.getAllValues().get(0).getSkip(), is(0)); - assertThat(captor.getAllValues().get(1).getSkip(), is(10)); + assertThat(captor.getAllValues().get(0).getSkip()).isZero(); + assertThat(captor.getAllValues().get(1).getSkip()).isEqualTo(10); } - /** - * @see DATAMONGO-1057 - */ - @Test - public void slicedExecutionShouldIncrementLimitByOne() { + @Test // DATAMONGO-1057 + void slicedExecutionShouldIncrementLimitByOne() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); - Pageable page1 = new PageRequest(0, 10); + Pageable page1 = PageRequest.of(0, 10); Pageable page2 = page1.next(); query.execute(new Object[] { "fake", page1 }); @@ -247,20 +262,18 @@ public void slicedExecutionShouldIncrementLimitByOne() { ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons")); + verify(executableFind, times(2)).as(Person.class); + verify(withQueryMock, times(2)).matching(captor.capture()); - assertThat(captor.getAllValues().get(0).getLimit(), is(11)); - assertThat(captor.getAllValues().get(1).getLimit(), is(11)); + assertThat(captor.getAllValues().get(0).getLimit()).isEqualTo(11); + assertThat(captor.getAllValues().get(1).getLimit()).isEqualTo(11); } - /** - * @see DATAMONGO-1057 - */ - @Test - public void slicedExecutionShouldRetainSort() { + @Test // DATAMONGO-1057 + void slicedExecutionShouldRetainSort() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); - Pageable page1 = new PageRequest(0, 10, Sort.Direction.DESC, "bar"); + Pageable page1 = PageRequest.of(0, 10, Sort.Direction.DESC, "bar"); Pageable page2 = page1.next(); query.execute(new Object[] { "fake", page1 }); @@ -268,53 +281,302 @@ public void slicedExecutionShouldRetainSort() { ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); - verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons")); + verify(executableFind, times(2)).as(Person.class); + verify(withQueryMock, times(2)).matching(captor.capture()); - DBObject expectedSortObject = new BasicDBObjectBuilder().add("bar", -1).get(); - assertThat(captor.getAllValues().get(0).getSortObject(), is(expectedSortObject)); - assertThat(captor.getAllValues().get(1).getSortObject(), is(expectedSortObject)); + Document expectedSortObject = new Document().append("bar", -1); + assertThat(captor.getAllValues().get(0).getSortObject()).isEqualTo(expectedSortObject); + assertThat(captor.getAllValues().get(1).getSortObject()).isEqualTo(expectedSortObject); } - /** - * @see DATAMONGO-1080 - */ - @Test - public void doesNotTryToPostProcessQueryResultIntoWrapperType() { + @Test // DATAMONGO-1080 + void doesNotTryToPostProcessQueryResultIntoWrapperType() { Person reference = new Person(); - when(mongoOperationsMock.findOne(Mockito.any(Query.class), eq(Person.class), eq("persons"))).// - thenReturn(reference); + + doReturn(reference).when(withQueryMock).oneValue(); AbstractMongoQuery query = createQueryForMethod("findByLastname", String.class); - assertThat(query.execute(new Object[] { "lastname" }), is((Object) reference)); + assertThat(query.execute(new Object[] { "lastname" })).isEqualTo(reference); + } + + @Test // DATAMONGO-1865 + void limitingSingleEntityQueryCallsFirst() { + + Person reference = new Person(); + + doReturn(reference).when(withQueryMock).firstValue(); + + AbstractMongoQuery query = createQueryForMethod("findFirstByLastname", String.class).setLimitingQuery(true); + + assertThat(query.execute(new Object[] { "lastname" })).isEqualTo(reference); + } + + @Test // DATAMONGO-1872 + void doesNotFixCollectionOnPreparation() { + + AbstractMongoQuery query = createQueryForMethod(DynamicallyMappedRepository.class, "findBy"); + + query.execute(new Object[0]); + + verify(executableFind, never()).inCollection(anyString()); + verify(executableFind).as(DynamicallyMapped.class); + } + + @Test // DATAMONGO-1979 + void usesAnnotatedSortWhenPresent() { + + createQueryForMethod("findByAge", Integer.class) // + .execute(new Object[] { 1000 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("age", 1)); + } + + @Test // DATAMONGO-1979 + void usesExplicitSortOverridesAnnotatedSortWhenPresent() { + + createQueryForMethod("findByAge", Integer.class, Sort.class) // + .execute(new Object[] { 1000, Sort.by(Direction.DESC, "age") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("age", -1)); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollation() { + + createQueryForMethod("findWithCollationUsingSpimpleStringValueByFirstName", String.class) // + .execute(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingDocumentByFirstName", String.class) // + .execute(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsString() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", new Document("locale", "en_US") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsLocale() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", Locale.US }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldThrowExceptionOnNonParsableCollation() { + + assertThatIllegalArgumentException().isThrownBy(() -> { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", 100 }); + }); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationIn() { + + createQueryForMethod("findWithCollationUsingPlaceholderInDocumentByFirstName", String.class, String.class) // + .execute(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyCollationParameter() { + + Collation collation = Collation.of("en_US"); + createQueryForMethod("findWithCollationParameterByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldOverrideAnnotation() { + + Collation collation = Collation.of("de_AT"); + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldNotBeAppliedWhenNullOverrideAnnotation() { + + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", null }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); } + @Test // GH-2107 + void updateExecutionCallsUpdateAllCorrectly() { + + when(terminatingUpdate.all()).thenReturn(updateResultMock); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .execute(new Object[] { "dalinar", 100 }); + + ArgumentCaptor update = ArgumentCaptor.forClass(UpdateDefinition.class); + verify(updateWithQuery).apply(update.capture()); + verify(terminatingUpdate).all(); + + assertThat(update.getValue().getUpdateObject()).isEqualTo(Document.parse("{ '$inc' : { 'visits' : 100 } }")); + } + + @Test // GH-3230 + void findShouldApplyHint() { + + createQueryForMethod("findWithHintByFirstname", String.class).execute(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-fn"); + } + + @Test // GH-3230 + void updateShouldApplyHint() { + + when(terminatingUpdate.all()).thenReturn(updateResultMock); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .execute(new Object[] { "dalinar", 100 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(executableUpdate).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-ln"); + } + + @Test // GH-4397 + void limitShouldBeAppliedToQuery() { + + createQueryForMethod("findWithLimit", String.class, Limit.class).execute(new Object[] { "dalinar", Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getLimit()).isEqualTo(42); + } + + @Test // GH-4397 + void sortAndLimitShouldBeAppliedToQuery() { + + createQueryForMethod("findWithSortAndLimit", String.class, Sort.class, Limit.class) + .execute(new Object[] { "dalinar", Sort.by("fn"), Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getLimit()).isEqualTo(42); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("fn", 1)); + } + + @Test // GH-2971 + void findShouldApplyReadPreference() { + + createQueryForMethod("findWithReadPreferenceByFirstname", String.class).execute(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void findShouldApplyReadPreferenceAtRepository() { + + createQueryForMethod("findWithLimit", String.class, Limit.class).execute(new Object[] { "dalinar", Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.primaryPreferred()); + } + + private MongoQueryFake createQueryForMethod(String methodName, Class... paramTypes) { + return createQueryForMethod(Repo.class, methodName, paramTypes); + } + + private MongoQueryFake createQueryForMethod(Class repository, String methodName, Class... paramTypes) { try { - Method method = Repo.class.getMethod(methodName, paramTypes); + Method method = repository.getMethod(methodName, paramTypes); ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); - MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(Repo.class), factory, + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, mappingContextMock); return new MongoQueryFake(queryMethod, mongoOperationsMock); - - } catch (NoSuchMethodException e) { - throw new IllegalArgumentException(e.getMessage(), e); - } catch (SecurityException e) { + } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } } private static class MongoQueryFake extends AbstractMongoQuery { - private boolean isCountQuery; private boolean isDeleteQuery; - - public MongoQueryFake(MongoQueryMethod method, MongoOperations operations) { - super(method, operations); + private boolean isLimitingQuery; + + MongoQueryFake(MongoQueryMethod method, MongoOperations operations) { + super(method, operations, + new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), + Collections.emptySet()), + ValueExpressionParser.create(SpelExpressionParser::new))); } @Override @@ -324,7 +586,12 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { @Override protected boolean isCountQuery() { - return isCountQuery; + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; } @Override @@ -332,12 +599,29 @@ protected boolean isDeleteQuery() { return isDeleteQuery; } - public MongoQueryFake setDeleteQuery(boolean isDeleteQuery) { + @Override + protected boolean isLimiting() { + return isLimitingQuery; + } + + MongoQueryFake setDeleteQuery(boolean isDeleteQuery) { this.isDeleteQuery = isDeleteQuery; return this; } + + MongoQueryFake setLimitingQuery(boolean limitingQuery) { + + isLimitingQuery = limitingQuery; + return this; + } + + @Override + protected CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } } + @ReadPreference(value = "primaryPreferred") private interface Repo extends MongoRepository { List deleteByLastname(String lastname); @@ -346,16 +630,64 @@ private interface Repo extends MongoRepository { List findByFirstname(String firstname); - @Meta(comment = "comment") + @Meta(comment = "comment", flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT }) Page findByFirstname(String firstnanme, Pageable pageable); @Meta(comment = "comment") @org.springframework.data.mongodb.repository.Query("{}") Page findByAnnotatedQuery(String firstnanme, Pageable pageable); - /** @see DATAMONGO-1057 */ + // DATAMONGO-1057 Slice findByLastname(String lastname, Pageable page); Optional findByLastname(String lastname); + + Person findFirstByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Query(sort = "{ age : 1 }") + List findByAge(Integer age); + + @org.springframework.data.mongodb.repository.Query(sort = "{ age : 1 }") + List findByAge(Integer age, Sort page); + + @org.springframework.data.mongodb.repository.Query(collation = "en_US") + List findWithCollationUsingSpimpleStringValueByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithCollationUsingDocumentByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "?1") + List findWithCollationUsingPlaceholderByFirstName(String firstname, Object collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1' }") + List findWithCollationUsingPlaceholderInDocumentByFirstName(String firstname, String collation); + + List findWithCollationParameterByFirstName(String firstname, Collation collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation); + + @Hint("idx-ln") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncreaseVisitsByLastname(String lastname, int value); + + @Hint("idx-fn") + void findWithHintByFirstname(String firstname); + + List findWithLimit(String firstname, Limit limit); + + List findWithSortAndLimit(String firstname, Sort sort, Limit limit); + + @ReadPreference(value = "secondaryPreferred") + List findWithReadPreferenceByFirstname(String firstname); + } + + // DATAMONGO-1872 + + @org.springframework.data.mongodb.core.mapping.Document("#{T(java.lang.Math).random()}") + static class DynamicallyMapped {} + + interface DynamicallyMappedRepository extends Repository { + DynamicallyMapped findBy(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java new file mode 100644 index 0000000000..1f3602fc71 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java @@ -0,0 +1,395 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.Update; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.spel.standard.SpelExpressionParser; + +import com.mongodb.MongoClientSettings; +import com.mongodb.client.result.UpdateResult; + +/** + * Unit tests for {@link AbstractReactiveMongoQuery}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Jorge Rodríguez + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class AbstractReactiveMongoQueryUnitTests { + + @Mock ReactiveMongoOperations mongoOperationsMock; + @Mock BasicMongoPersistentEntity persitentEntityMock; + @Mock MongoMappingContext mappingContextMock; + + @Mock ReactiveFind executableFind; + @Mock FindWithQuery withQueryMock; + @Mock ReactiveUpdate executableUpdate; + @Mock UpdateWithQuery updateWithQuery; + @Mock TerminatingUpdate terminatingUpdate; + + @BeforeEach + void setUp() { + + doReturn("persons").when(persitentEntityMock).getCollection(); + doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Mockito.any(Class.class)); + doReturn(persitentEntityMock).when(mappingContextMock).getRequiredPersistentEntity(Mockito.any(Class.class)); + doReturn(Person.class).when(persitentEntityMock).getType(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContextMock); + converter.afterPropertiesSet(); + + doReturn(converter).when(mongoOperationsMock).getConverter(); + + doReturn(executableFind).when(mongoOperationsMock).query(any()); + doReturn(withQueryMock).when(executableFind).as(any()); + doReturn(withQueryMock).when(withQueryMock).matching(any(Query.class)); + doReturn(Flux.empty()).when(withQueryMock).all(); + doReturn(Mono.empty()).when(withQueryMock).first(); + doReturn(Mono.empty()).when(withQueryMock).one(); + + doReturn(executableUpdate).when(mongoOperationsMock).update(any()); + doReturn(executableUpdate).when(executableUpdate).inCollection(anyString()); + doReturn(updateWithQuery).when(executableUpdate).matching(any(Query.class)); + doReturn(terminatingUpdate).when(updateWithQuery).apply(any(UpdateDefinition.class)); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollation() { + + createQueryForMethod("findWithCollationUsingSpimpleStringValueByFirstName", String.class) // + .executeBlocking(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingDocumentByFirstName", String.class) // + .executeBlocking(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsString() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", new Document("locale", "en_US") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsLocale() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", Locale.US }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldThrowExceptionOnNonParsableCollation() { + + assertThatIllegalArgumentException().isThrownBy(() -> { + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", 100 }); + }); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationIn() { + + createQueryForMethod("findWithCollationUsingPlaceholderInDocumentByFirstName", String.class, String.class) // + .executeBlocking(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationWithMultiplePlaceholders() { + + createQueryForMethod("findWithCollationUsingPlaceholdersInDocumentByFirstName", String.class, String.class, + int.class) // + .executeBlocking(new Object[] { "dalinar", "en_US", 2 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").strength(2).toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyCollationParameter() { + + Collation collation = Collation.of("en_US"); + createQueryForMethod("findWithCollationParameterByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldOverrideAnnotation() { + + Collation collation = Collation.of("de_AT"); + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldNotBeAppliedWhenNullOverrideAnnotation() { + + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", null }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // GH-3230 + void findShouldApplyHint() { + + createQueryForMethod("findWithHintByFirstname", String.class).executeBlocking(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-fn"); + } + + @Test // GH-3230 + void updateShouldApplyHint() { + + when(terminatingUpdate.all()).thenReturn(Mono.just(mock(UpdateResult.class))); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .executeBlocking(new Object[] { "dalinar", 100 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(executableUpdate).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-ln"); + } + + @Test // GH-2971 + void findShouldApplyReadPreference() { + + createQueryForMethod("findWithReadPreferenceByFirstname", String.class).executeBlocking(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private ReactiveMongoQueryFake createQueryForMethod(String methodName, Class... paramTypes) { + return createQueryForMethod(Repo.class, methodName, paramTypes); + } + + private ReactiveMongoQueryFake createQueryForMethod(Class repository, String methodName, Class... paramTypes) { + + try { + + Method method = repository.getMethod(methodName, paramTypes); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(repository), factory, mappingContextMock); + + return new ReactiveMongoQueryFake(queryMethod, mongoOperationsMock); + } catch (Exception e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + + private static class ReactiveMongoQueryFake extends AbstractReactiveMongoQuery { + + private boolean isDeleteQuery; + private boolean isLimitingQuery; + + ReactiveMongoQueryFake(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations) { + super(method, operations, + new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor( + new StandardEnvironment(), + Collections.emptySet()), + ValueExpressionParser.create(SpelExpressionParser::new))); + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + return Mono.just(new BasicQuery("{'foo':'bar'}")); + } + + Object executeBlocking(Object[] parameters) { + return Flux.from(super.execute(parameters)).collectList().block(); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return isDeleteQuery; + } + + @Override + protected boolean isLimiting() { + return isLimitingQuery; + } + + public ReactiveMongoQueryFake setDeleteQuery(boolean isDeleteQuery) { + this.isDeleteQuery = isDeleteQuery; + return this; + } + + public ReactiveMongoQueryFake setLimitingQuery(boolean limitingQuery) { + + isLimitingQuery = limitingQuery; + return this; + } + + @Override + protected Mono getCodecRegistry() { + return Mono.just(MongoClientSettings.getDefaultCodecRegistry()); + } + } + + private interface Repo extends ReactiveMongoRepository { + + @org.springframework.data.mongodb.repository.Query(collation = "en_US") + List findWithCollationUsingSpimpleStringValueByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithCollationUsingDocumentByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "?1") + List findWithCollationUsingPlaceholderByFirstName(String firstname, Object collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1' }") + List findWithCollationUsingPlaceholderInDocumentByFirstName(String firstname, String collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1', 'strength' : ?#{[2]}}") + List findWithCollationUsingPlaceholdersInDocumentByFirstName(String firstname, String collation, + int strength); + + List findWithCollationParameterByFirstName(String firstname, Collation collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation); + + @Hint("idx-ln") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncreaseVisitsByLastname(String lastname, int value); + + @Hint("idx-fn") + void findWithHintByFirstname(String firstname); + + @ReadPreference(value = "secondaryPreferred") + Flux findWithReadPreferenceByFirstname(String firstname); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java index 4692b0ef25..71b2ff7581 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,20 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.Collection; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; @@ -41,46 +42,41 @@ /** * Unit tests for {@link ConvertingParameterAccessor}. - * + * * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class ConvertingParameterAccessorUnitTests { +@ExtendWith(MockitoExtension.class) +class ConvertingParameterAccessorUnitTests { - @Mock MongoDbFactory factory; + @Mock MongoDatabaseFactory factory; @Mock MongoParameterAccessor accessor; - MongoMappingContext context; - MappingMongoConverter converter; - DbRefResolver resolver; + private MongoMappingContext context; + private MappingMongoConverter converter; + private DbRefResolver resolver; - @Before - public void setUp() { + @BeforeEach + void setUp() { + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); this.context = new MongoMappingContext(); this.resolver = new DefaultDbRefResolver(factory); this.converter = new MappingMongoConverter(resolver, context); } - @SuppressWarnings("deprecation") - @Test(expected = IllegalArgumentException.class) - public void rejectsNullMongoDbFactory() { - new MappingMongoConverter((MongoDbFactory) null, context); - } - - @Test(expected = IllegalArgumentException.class) - public void rejectsNullDbRefResolver() { - new MappingMongoConverter((DbRefResolver) null, context); + @Test + void rejectsNullDbRefResolver() { + assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter((DbRefResolver) null, context)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullContext() { - new MappingMongoConverter(resolver, null); + @Test + void rejectsNullContext() { + assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter(resolver, null)); } @Test - public void convertsCollectionUponAccess() { + void convertsCollectionUponAccess() { when(accessor.getBindableValue(0)).thenReturn(Arrays.asList("Foo")); @@ -90,47 +86,41 @@ public void convertsCollectionUponAccess() { BasicDBList reference = new BasicDBList(); reference.add("Foo"); - assertThat(result, is((Object) reference)); + assertThat(result).isEqualTo((Object) reference); } - /** - * @see DATAMONGO-505 - */ - @Test - public void convertsAssociationsToDBRef() { + @Test // DATAMONGO-505 + void convertsAssociationsToDBRef() { Property property = new Property(); property.id = 5L; Object result = setupAndConvert(property); - assertThat(result, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(result).isInstanceOf(com.mongodb.DBRef.class); com.mongodb.DBRef dbRef = (com.mongodb.DBRef) result; - assertThat(dbRef.getCollectionName(), is("property")); - assertThat(dbRef.getId(), is((Object) 5L)); + assertThat(dbRef.getCollectionName()).isEqualTo("property"); + assertThat(dbRef.getId()).isEqualTo((Object) 5L); } - /** - * @see DATAMONGO-505 - */ - @Test - public void convertsAssociationsToDBRefForCollections() { + @Test // DATAMONGO-505 + void convertsAssociationsToDBRefForCollections() { Property property = new Property(); property.id = 5L; Object result = setupAndConvert(Arrays.asList(property)); - assertThat(result, is(instanceOf(Collection.class))); + assertThat(result).isInstanceOf(Collection.class); Collection collection = (Collection) result; - assertThat(collection, hasSize(1)); + assertThat(collection).hasSize(1); Object element = collection.iterator().next(); - assertThat(element, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(element).isInstanceOf(com.mongodb.DBRef.class); com.mongodb.DBRef dbRef = (com.mongodb.DBRef) element; - assertThat(dbRef.getCollectionName(), is("property")); - assertThat(dbRef.getId(), is((Object) 5L)); + assertThat(dbRef.getCollectionName()).isEqualTo("property"); + assertThat(dbRef.getId()).isEqualTo((Object) 5L); } private Object setupAndConvert(Object... parameters) { @@ -138,8 +128,8 @@ private Object setupAndConvert(Object... parameters) { MongoParameterAccessor delegate = new StubParameterAccessor(parameters); PotentiallyConvertingIterator iterator = new ConvertingParameterAccessor(converter, delegate).iterator(); - MongoPersistentEntity entity = context.getPersistentEntity(Entity.class); - MongoPersistentProperty property = entity.getPersistentProperty("property"); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(Entity.class); + MongoPersistentProperty property = entity.getRequiredPersistentProperty("property"); return iterator.nextConverted(property); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java index 821aab557c..19ac837513 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 by the original author(s). + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,76 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Persistable; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.support.MappingMongoEntityInformation; +import org.springframework.data.repository.core.EntityInformation; /** * Unit tests for {@link MappingMongoEntityInformation}. - * + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MappingMongoEntityInformationUnitTests { @Mock MongoPersistentEntity info; + @Mock MongoPersistentEntity persistableImplementingEntityTypeInfo; - @Before - public void setUp() { + @Test // DATAMONGO-248 + public void usesEntityCollectionIfNoCustomOneGiven() { - when(info.getType()).thenReturn(Person.class); when(info.getCollection()).thenReturn("Person"); - } - - /** - * @see DATAMONGO-248 - */ - @Test - public void usesEntityCollectionIfNoCustomOneGiven() { MongoEntityInformation information = new MappingMongoEntityInformation(info); - assertThat(information.getCollectionName(), is("Person")); + assertThat(information.getCollectionName()).isEqualTo("Person"); } - /** - * @see DATAMONGO-248 - */ - @Test + @Test // DATAMONGO-248 public void usesCustomCollectionIfGiven() { MongoEntityInformation information = new MappingMongoEntityInformation(info, "foobar"); - assertThat(information.getCollectionName(), is("foobar")); + assertThat(information.getCollectionName()).isEqualTo("foobar"); + } + + @Test // DATAMONGO-1590 + public void considersPersistableIsNew() { + + EntityInformation information = new MappingMongoEntityInformation<>( + persistableImplementingEntityTypeInfo); + + assertThat(information.isNew(new TypeImplementingPersistable(100L, false))).isFalse(); + } + + static final class TypeImplementingPersistable implements Persistable { + + private final Long id; + private final boolean isNew; + + public TypeImplementingPersistable(Long id, boolean isNew) { + this.id = id; + this.isNew = isNew; + } + + public Long getId() { + return this.id; + } + + public boolean isNew() { + return this.isNew; + } + + public String toString() { + return "MappingMongoEntityInformationUnitTests.TypeImplementingPersistable(id=" + this.getId() + ", isNew=" + + this.isNew() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java index df6dd2715a..1c856394d8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,23 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.List; -import org.hamcrest.core.IsNull; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -38,7 +41,7 @@ /** * Unit tests for {@link MongoParametersParameterAccessor}. - * + * * @author Oliver Gierke * @author Christoph Strobl */ @@ -50,14 +53,14 @@ public class MongoParametersParameterAccessorUnitTests { ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); @Test - public void returnsNullForDistanceIfNoneAvailable() throws NoSuchMethodException, SecurityException { + public void returnsUnboundedForDistanceIfNoneAvailable() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class); MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { new Point(10, 20) }); - assertThat(accessor.getDistanceRange().getUpperBound(), is(nullValue())); + assertThat(accessor.getDistanceRange().getUpperBound().isBounded()).isFalse(); } @Test @@ -68,13 +71,10 @@ public void returnsDistanceIfAvailable() throws NoSuchMethodException, SecurityE MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { new Point(10, 20), DISTANCE }); - assertThat(accessor.getDistanceRange().getUpperBound(), is(DISTANCE)); + assertThat(accessor.getDistanceRange().getUpperBound()).isEqualTo(Bound.inclusive(DISTANCE)); } - /** - * @see DATAMONGO-973 - */ - @Test + @Test // DATAMONGO-973 public void shouldReturnAsFullTextStringWhenNoneDefinedForMethod() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); @@ -82,13 +82,10 @@ public void shouldReturnAsFullTextStringWhenNoneDefinedForMethod() throws NoSuch MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { new Point(10, 20), DISTANCE }); - assertThat(accessor.getFullText(), IsNull.nullValue()); + assertThat(accessor.getFullText()).isNull(); } - /** - * @see DATAMONGO-973 - */ - @Test + @Test // DATAMONGO-973 public void shouldProperlyConvertTextCriteria() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByFirstname", String.class, TextCriteria.class); @@ -96,14 +93,11 @@ public void shouldProperlyConvertTextCriteria() throws NoSuchMethodException, Se MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { "spring", TextCriteria.forDefaultLanguage().matching("data") }); - assertThat(accessor.getFullText().getCriteriaObject().toString(), - equalTo("{ \"$text\" : { \"$search\" : \"data\"}}")); + assertThat(accessor.getFullText().getCriteriaObject().toJson()) + .isEqualTo(Document.parse("{ \"$text\" : { \"$search\" : \"data\"}}").toJson()); } - /** - * @see DATAMONGO-1110 - */ - @Test + @Test // DATAMONGO-1110 public void shouldDetectMinAndMaxDistance() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Range.class); @@ -117,8 +111,46 @@ public void shouldDetectMinAndMaxDistance() throws NoSuchMethodException, Securi Range range = accessor.getDistanceRange(); - assertThat(range.getLowerBound(), is(min)); - assertThat(range.getUpperBound(), is(max)); + assertThat(range.getLowerBound()).isEqualTo(Bound.inclusive(min)); + assertThat(range.getUpperBound()).isEqualTo(Bound.inclusive(max)); + } + + @Test // DATAMONGO-1854 + public void shouldDetectCollation() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Collation.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Collation collation = Collation.of("en_US"); + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { "dalinar", collation }); + + assertThat(accessor.getCollation()).isEqualTo(collation); + } + + @Test // GH-2107 + public void shouldReturnUpdateIfPresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findAndModifyByFirstname", String.class, UpdateDefinition.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Update update = new Update(); + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { "dalinar", update }); + + assertThat(accessor.getUpdate()).isSameAs(update); + } + + @Test // GH-2107 + public void shouldReturnNullIfNoUpdatePresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { new Point(0,0) }); + + assertThat(accessor.getUpdate()).isNull(); } interface PersonRepository extends Repository { @@ -130,5 +162,10 @@ interface PersonRepository extends Repository { List findByLocationNear(Point point, Range distances); List findByFirstname(String firstname, TextCriteria fullText); + + List findByFirstname(String firstname, Collation collation); + + List findAndModifyByFirstname(String firstname, UpdateDefinition update); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java index e7468d4a10..93674e23fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,138 +15,173 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Near; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.ParametersSource; /** * Unit tests for {@link MongoParameters}. - * + * * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class MongoParametersUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoParametersUnitTests { @Mock MongoQueryMethod queryMethod; @Test - public void discoversDistanceParameter() throws NoSuchMethodException, SecurityException { + void discoversDistanceParameter() throws NoSuchMethodException, SecurityException { + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getNumberOfParameters(), is(2)); - assertThat(parameters.getMaxDistanceIndex(), is(1)); - assertThat(parameters.getBindableParameters().getNumberOfParameters(), is(1)); + assertThat(parameters.getNumberOfParameters()).isEqualTo(2); + assertThat(parameters.getMaxDistanceIndex()).isEqualTo(1); + assertThat(parameters.getBindableParameters().getNumberOfParameters()).isOne(); Parameter parameter = parameters.getParameter(1); - assertThat(parameter.isSpecialParameter(), is(true)); - assertThat(parameter.isBindable(), is(false)); + assertThat(parameter.isSpecialParameter()).isTrue(); + assertThat(parameter.isBindable()).isFalse(); } @Test - public void doesNotConsiderPointAsNearForSimpleQuery() throws Exception { + void doesNotConsiderPointAsNearForSimpleQuery() throws Exception { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getNearIndex(), is(-1)); + assertThat(parameters.getNearIndex()).isEqualTo(-1); } - @Test(expected = IllegalStateException.class) - public void rejectsMultiplePointsForGeoNearMethod() throws Exception { + @Test + void rejectsMultiplePointsForGeoNearMethod() throws Exception { + Method method = PersonRepository.class.getMethod("findByLocationNearAndOtherLocation", Point.class, Point.class); - new MongoParameters(method, true); + + assertThatIllegalStateException().isThrownBy(() -> new MongoParameters(ParametersSource.of(method), true)); } - @Test(expected = IllegalStateException.class) - public void rejectsMultipleDoubleArraysForGeoNearMethod() throws Exception { + @Test + void rejectsMultipleDoubleArraysForGeoNearMethod() throws Exception { + Method method = PersonRepository.class.getMethod("invalidDoubleArrays", double[].class, double[].class); - new MongoParameters(method, true); + + assertThatIllegalStateException().isThrownBy(() -> new MongoParameters(ParametersSource.of(method), true)); } @Test - public void doesNotRejectMultiplePointsForSimpleQueryMethod() throws Exception { + void doesNotRejectMultiplePointsForSimpleQueryMethod() throws Exception { + Method method = PersonRepository.class.getMethod("someOtherMethod", Point.class, Point.class); - new MongoParameters(method, false); + new MongoParameters(ParametersSource.of(method), false); } @Test - public void findsAnnotatedPointForGeoNearQuery() throws Exception { + void findsAnnotatedPointForGeoNearQuery() throws Exception { + Method method = PersonRepository.class.getMethod("findByOtherLocationAndLocationNear", Point.class, Point.class); - MongoParameters parameters = new MongoParameters(method, true); - assertThat(parameters.getNearIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), true); + assertThat(parameters.getNearIndex()).isOne(); } @Test - public void findsAnnotatedDoubleArrayForGeoNearQuery() throws Exception { + void findsAnnotatedDoubleArrayForGeoNearQuery() throws Exception { + Method method = PersonRepository.class.getMethod("validDoubleArrays", double[].class, double[].class); - MongoParameters parameters = new MongoParameters(method, true); - assertThat(parameters.getNearIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), true); + assertThat(parameters.getNearIndex()).isOne(); } - /** - * @see DATAMONGO-973 - */ - @Test - public void shouldFindTextCriteriaAtItsIndex() throws SecurityException, NoSuchMethodException { + @Test // DATAMONGO-973 + void shouldFindTextCriteriaAtItsIndex() throws SecurityException, NoSuchMethodException { Method method = PersonRepository.class.getMethod("findByNameAndText", String.class, TextCriteria.class); - MongoParameters parameters = new MongoParameters(method, false); - assertThat(parameters.getFullTextParameterIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + assertThat(parameters.getFullTextParameterIndex()).isOne(); } - /** - * @see DATAMONGO-973 - */ - @Test - public void shouldTreatTextCriteriaParameterAsSpecialParameter() throws SecurityException, NoSuchMethodException { + @Test // DATAMONGO-973 + void shouldTreatTextCriteriaParameterAsSpecialParameter() throws SecurityException, NoSuchMethodException { Method method = PersonRepository.class.getMethod("findByNameAndText", String.class, TextCriteria.class); - MongoParameters parameters = new MongoParameters(method, false); - assertThat(parameters.getParameter(parameters.getFullTextParameterIndex()).isSpecialParameter(), is(true)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + assertThat(parameters.getParameter(parameters.getFullTextParameterIndex()).isSpecialParameter()).isTrue(); } - /** - * @see DATAMONGO-1110 - */ - @Test - public void shouldFindMinAndMaxDistanceParameters() throws NoSuchMethodException, SecurityException { + @Test // DATAMONGO-1110 + void shouldFindMinAndMaxDistanceParameters() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Range.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getRangeIndex(), is(1)); - assertThat(parameters.getMaxDistanceIndex(), is(-1)); + assertThat(parameters.getRangeIndex()).isOne(); + assertThat(parameters.getMaxDistanceIndex()).isEqualTo(-1); } - /** - * @see DATAMONGO-1110 - */ - @Test - public void shouldNotHaveMinDistanceIfOnlyOneDistanceParameterPresent() throws NoSuchMethodException, - SecurityException { + @Test // DATAMONGO-1110 + void shouldNotHaveMinDistanceIfOnlyOneDistanceParameterPresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getRangeIndex()).isEqualTo(-1); + assertThat(parameters.getMaxDistanceIndex()).isOne(); + } + + @Test // DATAMONGO-1854 + void shouldReturnMinusOneIfCollationParameterDoesNotExist() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getCollationParameterIndex()).isEqualTo(-1); + } + + @Test // DATAMONGO-1854 + void shouldReturnIndexOfCollationParameterIfExists() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByText", String.class, Collation.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getCollationParameterIndex()).isOne(); + } - assertThat(parameters.getRangeIndex(), is(-1)); - assertThat(parameters.getMaxDistanceIndex(), is(1)); + @Test // GH-2107 + void shouldReturnIndexUpdateIfExists() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findAndModifyByFirstname", String.class, UpdateDefinition.class, Pageable.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getUpdateIndex()).isOne(); + } + + @Test // GH-2107 + void shouldReturnInvalidIndexIfUpdateDoesNotExist() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("someOtherMethod", Point.class, Point.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getUpdateIndex()).isEqualTo(-1); } interface PersonRepository { @@ -166,5 +201,9 @@ interface PersonRepository { List findByNameAndText(String name, TextCriteria text); List findByLocationNear(Point point, Range range); + + List findByText(String text, Collation collation); + + List findAndModifyByFirstname(String firstname, UpdateDefinition update, Pageable page); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java index 9780f32be9..609e0a0018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,35 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.repository.query.StubParameterAccessor.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.reflect.Method; import java.util.List; +import java.util.regex.Pattern; +import org.bson.BsonRegularExpression; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.Venue; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.geo.GeoJsonLineString; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.mapping.DBRef; @@ -58,77 +58,66 @@ import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; import org.springframework.data.repository.query.parser.PartTree; -import com.mongodb.DBObject; - /** * Unit test for {@link MongoQueryCreator}. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl */ -public class MongoQueryCreatorUnitTests { - - Method findByFirstname, findByFirstnameAndFriend, findByFirstnameNotNull; - - MappingContext, MongoPersistentProperty> context; - MongoConverter converter; +class MongoQueryCreatorUnitTests { - @Rule public ExpectedException expection = ExpectedException.none(); + private MappingContext, MongoPersistentProperty> context; + private MongoConverter converter; - @Before - public void setUp() throws SecurityException, NoSuchMethodException { + @BeforeEach + void beforeEach() { context = new MongoMappingContext(); - - DbRefResolver resolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); - converter = new MappingMongoConverter(resolver, context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); } @Test - public void createsQueryCorrectly() throws Exception { + void createsQueryCorrectly() { PartTree tree = new PartTree("findByFirstName", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Oliver"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").is("Oliver")))); + assertThat(query).isEqualTo(query(where("firstName").is("Oliver"))); } - /** - * @see DATAMONGO-469 - */ - @Test - public void createsAndQueryCorrectly() { + @Test // DATAMONGO-469 + void createsAndQueryCorrectly() { Person person = new Person(); MongoQueryCreator creator = new MongoQueryCreator(new PartTree("findByFirstNameAndFriend", Person.class), getAccessor(converter, "Oliver", person), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").is("Oliver").and("friend").is(person)))); + assertThat(query).isEqualTo(query(where("firstName").is("Oliver").and("friend").is(person))); } @Test - public void createsNotNullQueryCorrectly() { + void createsNotNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameNotNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); - assertThat(query, is(new Query(Criteria.where("firstName").ne(null)))); + assertThat(query).isEqualTo(new Query(Criteria.where("firstName").ne(null))); } @Test - public void createsIsNullQueryCorrectly() { + void createsIsNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameIsNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); - assertThat(query, is(new Query(Criteria.where("firstName").is(null)))); + assertThat(query).isEqualTo(new Query(Criteria.where("firstName").is(null))); } @Test - public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { + void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5, Metrics.KILOMETERS); @@ -139,7 +128,7 @@ public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception } @Test - public void bindsDistanceParameterToNearCorrectly() throws Exception { + void bindsDistanceParameterToNearCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5); @@ -150,139 +139,112 @@ public void bindsDistanceParameterToNearCorrectly() throws Exception { } @Test - public void createsLessThanEqualQueryCorrectly() throws Exception { + void createsLessThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeLessThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); Query reference = query(where("age").lte(18)); - assertThat(creator.createQuery(), is(reference)); + assertThat(creator.createQuery()).isEqualTo(reference); } @Test - public void createsGreaterThanEqualQueryCorrectly() throws Exception { + void createsGreaterThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeGreaterThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); Query reference = query(where("age").gte(18)); - assertThat(creator.createQuery(), is(reference)); + assertThat(creator.createQuery()).isEqualTo(reference); } - /** - * @see DATAMONGO-338 - */ - @Test - public void createsExistsClauseCorrectly() { + @Test // DATAMONGO-338 + void createsExistsClauseCorrectly() { PartTree tree = new PartTree("findByAgeExists", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, true), context); Query query = query(where("age").exists(true)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } - /** - * @see DATAMONGO-338 - */ - @Test - public void createsRegexClauseCorrectly() { + @Test // DATAMONGO-338 + void createsRegexClauseCorrectly() { PartTree tree = new PartTree("findByFirstNameRegex", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, ".*"), context); Query query = query(where("firstName").regex(".*")); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } - /** - * @see DATAMONGO-338 - */ - @Test - public void createsTrueClauseCorrectly() { + @Test // DATAMONGO-338 + void createsTrueClauseCorrectly() { PartTree tree = new PartTree("findByActiveTrue", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); Query query = query(where("active").is(true)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } - /** - * @see DATAMONGO-338 - */ - @Test - public void createsFalseClauseCorrectly() { + @Test // DATAMONGO-338 + void createsFalseClauseCorrectly() { PartTree tree = new PartTree("findByActiveFalse", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); Query query = query(where("active").is(false)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } - /** - * @see DATAMONGO-413 - */ - @Test - public void createsOrQueryCorrectly() { + @Test // DATAMONGO-413 + void createsOrQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameOrAge", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Dave", 42), context); Query query = creator.createQuery(); - assertThat(query, is(query(new Criteria().orOperator(where("firstName").is("Dave"), where("age").is(42))))); + assertThat(query).isEqualTo(query(new Criteria().orOperator(where("firstName").is("Dave"), where("age").is(42)))); } - /** - * @see DATAMONGO-347 - */ - @Test - public void createsQueryReferencingADBRefCorrectly() { + @Test // DATAMONGO-347 + void createsQueryReferencingADBRefCorrectly() { User user = new User(); user.id = new ObjectId(); PartTree tree = new PartTree("findByCreator", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, user), context); - DBObject queryObject = creator.createQuery().getQueryObject(); + Document queryObject = creator.createQuery().getQueryObject(); - assertThat(queryObject.get("creator"), is((Object) user)); + assertThat(queryObject.get("creator")).isEqualTo(user); } - /** - * @see DATAMONGO-418 - */ - @Test - public void createsQueryWithStartingWithPredicateCorrectly() { + @Test // DATAMONGO-418 + void createsQueryWithStartingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameStartingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Matt"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex("^Matt")))); + assertThat(query).isEqualTo(query(where("username").regex("^Matt"))); } - /** - * @see DATAMONGO-418 - */ - @Test - public void createsQueryWithEndingWithPredicateCorrectly() { + @Test // DATAMONGO-418 + void createsQueryWithEndingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameEndingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "ews"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex("ews$")))); + assertThat(query).isEqualTo(query(where("username").regex("ews$"))); } - /** - * @see DATAMONGO-418 - */ - @Test - public void createsQueryWithContainingPredicateCorrectly() { + @Test // DATAMONGO-418 + void createsQueryWithContainingPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex(".*thew.*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*thew.*"))); } private void assertBindsDistanceToQuery(Point point, Distance distance, Query reference) throws Exception { @@ -298,178 +260,149 @@ private void assertBindsDistanceToQuery(Point point, Distance distance, Query re Query query = new MongoQueryCreator(tree, new ConvertingParameterAccessor(converter, accessor), context) .createQuery(); - assertThat(query, is(query)); + assertThat(query).isEqualTo(query); } - /** - * @see DATAMONGO-770 - */ - @Test - public void createsQueryWithFindByIgnoreCaseCorrectly() { + @Test // DATAMONGO-770 + void createsQueryWithFindByIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByfirstNameIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave$", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i"))); } - /** - * @see DATAMONGO-770 - */ - @Test - public void createsQueryWithFindByNotIgnoreCaseCorrectly() { + @Test // GH-4404 + void createsQueryWithFindByInClauseHavingIgnoreCaseCorrectly() { + + PartTree tree = new PartTree("findAllByFirstNameInIgnoreCase", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, List.of("da've", "carter")), context); + + Query query = creator.createQuery(); + assertThat(query).isEqualTo(query(where("firstName") + .in(List.of(new BsonRegularExpression("^\\Qda've\\E$", "i"), new BsonRegularExpression("^carter$", "i"))))); + } + + @Test // DATAMONGO-770 + void createsQueryWithFindByNotIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameNotIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query.toString(), is(query(where("firstName").not().regex("^dave$", "i")).toString())); + assertThat(query.toString()).isEqualTo(query(where("firstName").not().regex("^dave$", "i")).toString()); } - /** - * @see DATAMONGO-770 - */ - @Test - public void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { + @Test // DATAMONGO-770 + void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameStartingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave", "i"))); } - /** - * @see DATAMONGO-770 - */ - @Test - public void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { + @Test // DATAMONGO-770 + void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameEndingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("dave$", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("dave$", "i"))); } - /** - * @see DATAMONGO-770 - */ - @Test - public void createsQueryWithFindByContainingIgnoreCaseCorrectly() { + @Test // DATAMONGO-770 + void createsQueryWithFindByContainingIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameContainingIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex(".*dave.*", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex(".*dave.*", "i"))); } - /** - * @see DATAMONGO-770 - */ - @Test - public void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { - - expection.expect(IllegalArgumentException.class); - expection.expectMessage("must be of type String"); + @Test // DATAMONGO-770 + void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { PartTree tree = new PartTree("findByFirstNameAndAgeIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "foo", 42), context); - creator.createQuery(); + assertThatIllegalArgumentException().isThrownBy(creator::createQuery) + .withMessageContaining("must be of type String"); } - /** - * @see DATAMONGO-770 - */ - @Test - public void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { + @Test // DATAMONGO-770 + void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { PartTree tree = new PartTree("findByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42)))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i").and("age").is(42))); } - /** - * @see DATAMONGO-566 - */ - @Test - public void shouldCreateDeleteByQueryCorrectly() { + @Test // DATAMONGO-566 + void shouldCreateDeleteByQueryCorrectly() { PartTree tree = new PartTree("deleteByFirstName", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(tree.isDelete(), is(true)); - assertThat(query, is(query(where("firstName").is("dave")))); + assertThat(tree.isDelete()).isTrue(); + assertThat(query).isEqualTo(query(where("firstName").is("dave"))); } - /** - * @see DATAMONGO-566 - */ - @Test - public void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { + @Test // DATAMONGO-566 + void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { PartTree tree = new PartTree("deleteByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(tree.isDelete(), is(true)); - assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42)))); + assertThat(tree.isDelete()).isTrue(); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i").and("age").is(42))); } - /** - * @see DATAMONGO-1075 - */ - @Test - public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { + @Test // DATAMONGO-1075 + void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("emailAddresses").in("dave")))); + assertThat(query).isEqualTo(query(where("emailAddresses").in("dave"))); } - /** - * @see DATAMONGO-1075 - */ - @Test - public void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { + @Test // DATAMONGO-1075 + void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("emailAddresses").not().in("dave")))); + assertThat(query).isEqualTo(query(where("emailAddresses").not().in("dave"))); } - /** - * @see DATAMONGO-1075 - */ - @Test - public void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { + @Test // DATAMONGO-1075, DATAMONGO-1425 + void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { PartTree tree = new PartTree("findByUsernameNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex(".*thew.*").not()))); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*thew.*")).getQueryObject().toJson()); } - /** - * @see DATAMONGO-1139 - */ - @Test - public void createsNonShericalNearForDistanceWithDefaultMetric() { + @Test // DATAMONGO-1139 + void createsNonSphericalNearForDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -478,14 +411,11 @@ public void createsNonShericalNearForDistanceWithDefaultMetric() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("location").near(point).maxDistance(1.0)))); + assertThat(query).isEqualTo(query(where("location").near(point).maxDistance(1.0))); } - /** - * @see DATAMONGO-1136 - */ - @Test - public void shouldCreateWithinQueryCorrectly() { + @Test // DATAMONGO-1136 + void shouldCreateWithinQueryCorrectly() { Point first = new Point(1, 1); Point second = new Point(2, 2); @@ -496,14 +426,11 @@ public void shouldCreateWithinQueryCorrectly() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, shape), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address.geo").within(shape)))); + assertThat(query).isEqualTo(query(where("address.geo").within(shape))); } - /** - * @see DATAMONGO-1110 - */ - @Test - public void shouldCreateNearSphereQueryForSphericalProperty() { + @Test // DATAMONGO-1110 + void shouldCreateNearSphereQueryForSphericalProperty() { Point point = new Point(10, 20); @@ -511,14 +438,11 @@ public void shouldCreateNearSphereQueryForSphericalProperty() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address2dSphere.geo").nearSphere(point)))); + assertThat(query).isEqualTo(query(where("address2dSphere.geo").nearSphere(point))); } - /** - * @see DATAMONGO-1110 - */ - @Test - public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { + @Test // DATAMONGO-1110 + void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -527,14 +451,11 @@ public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDef MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address2dSphere.geo").nearSphere(point).maxDistance(1.0)))); + assertThat(query).isEqualTo(query(where("address2dSphere.geo").nearSphere(point).maxDistance(1.0))); } - /** - * @see DATAMONGO-1110 - */ - @Test - public void shouldCreateNearQueryForMinMaxDistance() { + @Test // DATAMONGO-1110 + void shouldCreateNearQueryForMinMaxDistance() { Point point = new Point(10, 20); Range range = Distance.between(new Distance(10), new Distance(20)); @@ -543,129 +464,209 @@ public void shouldCreateNearQueryForMinMaxDistance() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, range), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D)))); + assertThat(query).isEqualTo(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D))); } - /** - * @see DATAMONGO-1229 - */ - @Test - public void appliesIgnoreCaseToLeafProperty() { + @Test // DATAMONGO-1229 + void appliesIgnoreCaseToLeafProperty() { PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "Street"); - assertThat(new MongoQueryCreator(tree, accessor, context).createQuery(), is(notNullValue())); + assertThat(new MongoQueryCreator(tree, accessor, context).createQuery()).isNotNull(); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void ignoreCaseShouldEscapeSource() { + @Test // DATAMONGO-1232 + void ignoreCaseShouldEscapeSource() { PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("^\\Qcon.flux+\\E$", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("^\\Qcon.flux+\\E$", "i"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { + @Test // DATAMONGO-1232 + void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("^\\Qdawns.light+\\E", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("^\\Qdawns.light+\\E", "i"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { + @Test // DATAMONGO-1232 + void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("\\Qnew.ton+\\E$", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("\\Qnew.ton+\\E$", "i"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + @Test // DATAMONGO-1232 + void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*\\Qfire.fight+\\E.*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*\\Qfire.fight+\\E.*"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + @Test // DATAMONGO-1232 + void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*\\Qsteel.heart+\\E")))); + assertThat(query).isEqualTo(query(where("username").regex(".*\\Qsteel.heart+\\E"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + @Test // DATAMONGO-1232 + void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("\\Qcala.mity+\\E.*")))); + assertThat(query).isEqualTo(query(where("username").regex("\\Qcala.mity+\\E.*"))); } - /** - * @see DATAMONGO-1232 - */ - @Test - public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + @Test // DATAMONGO-1232 + void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*"))); } - /** - * @see DATAMONGO-1342 - */ - @Test - public void bindsNullValueToContainsClause() { + @Test // DATAMONGO-1342 + void bindsNullValueToContainsClause() { PartTree partTree = new PartTree("emailAddressesContains", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, new Object[] { null }); Query query = new MongoQueryCreator(partTree, accessor, context).createQuery(); - assertThat(query, is(query(where("emailAddresses").in((Object) null)))); + assertThat(query).isEqualTo(query(where("emailAddresses").in((Object) null))); + } + + @Test // DATAMONGO-1424 + void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + + PartTree tree = new PartTree("findByUsernameNotLike", User.class); + ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); + + Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); + + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*\\Qfire.fight+\\E.*")).getQueryObject().toJson()); + } + + @Test // DATAMONGO-1424 + void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + + PartTree tree = new PartTree("findByUsernameNotLike", User.class); + ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); + + Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); + + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*\\Qsteel.heart+\\E")).getQueryObject().toJson()); + } + + @Test // DATAMONGO-1424 + void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + + PartTree tree = new PartTree("findByUsernameNotLike", User.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "cala.mity+*"), context); + Query query = creator.createQuery(); + + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex("\\Qcala.mity+\\E.*")).getQueryObject().toJson()); + } + + @Test // DATAMONGO-1424 + void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + + PartTree tree = new PartTree("findByUsernameNotLike", User.class); + ConvertingParameterAccessor accessor = getAccessor(converter, "*"); + + Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*")).getQueryObject().toJson()); + } + + @Test // DATAMONGO-1588 + void queryShouldAcceptSubclassOfDeclaredArgument() { + + PartTree tree = new PartTree("findByLocationNear", User.class); + ConvertingParameterAccessor accessor = getAccessor(converter, new GeoJsonPoint(-74.044502D, 40.689247D)); + + Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); + assertThat(query.getQueryObject()).containsKey("location"); + } + + @Test // DATAMONGO-1588 + void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { + + PartTree tree = new PartTree("findByLocationNear", User.class); + ConvertingParameterAccessor accessor = getAccessor(converter, + new GeoJsonLineString(new Point(-74.044502D, 40.689247D), new Point(-73.997330D, 40.730824D))); + + assertThatIllegalArgumentException().isThrownBy(() -> new MongoQueryCreator(tree, accessor, context).createQuery()) + .withMessageContaining("Expected parameter type of " + Point.class); + } + + @Test // DATAMONGO-2003 + void createsRegexQueryForPatternCorrectly() { + + PartTree tree = new PartTree("findByFirstNameRegex", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, Pattern.compile(".*")), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("firstName").regex(".*"))); + } + + @Test // DATAMONGO-2003 + void createsRegexQueryForPatternWithOptionsCorrectly() { + + Pattern pattern = Pattern.compile(".*", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE); + + PartTree tree = new PartTree("findByFirstNameRegex", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, pattern), context); + assertThat(creator.createQuery()).isEqualTo(query(where("firstName").regex(".*", "iu"))); + } + + @Test // DATAMONGO-2071 + void betweenShouldAllowSingleRageParameter() { + + PartTree tree = new PartTree("findByAgeBetween", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, + getAccessor(converter, Range.of(Bound.exclusive(10), Bound.exclusive(11))), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("age").gt(10).lt(11))); + } + + @Test // DATAMONGO-2394 + void nearShouldUseMetricDistanceForGeoJsonTypes() { + + GeoJsonPoint point = new GeoJsonPoint(27.987901, 86.9165379); + PartTree tree = new PartTree("findByLocationNear", User.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, + getAccessor(converter, point, new Distance(1, Metrics.KILOMETERS)), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("location").nearSphere(point).maxDistance(1000.0D))); } interface PersonRepository extends Repository { @@ -686,16 +687,21 @@ class User { Address address; Address2dSphere address2dSphere; + + Point location; } static class Address { String street; + Point geo; } static class Address2dSphere { + String street; + @GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE) Point geo; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java new file mode 100644 index 0000000000..74ff20b148 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java @@ -0,0 +1,231 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoPage; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFindNear; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.client.result.DeleteResult; + +/** + * Unit tests for {@link MongoQueryExecution}. + * + * @author Mark Paluch + * @author Oliver Gierke + * @author Artyom Gabeev + * @author Christoph Strobl + * @soundtrack U Can't Touch This - MC Hammer + */ +@ExtendWith(MockitoExtension.class) +class MongoQueryExecutionUnitTests { + + @Mock MongoOperations mongoOperationsMock; + @Mock ExecutableFind findOperationMock; + @Mock FindWithQuery operationMock; + @Mock TerminatingFind terminatingMock; + @Mock TerminatingFindNear terminatingGeoMock; + @Mock DbRefResolver dbRefResolver; + + private SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); + private Point POINT = new Point(10, 20); + private Distance DISTANCE = new Distance(2.5, Metrics.KILOMETERS); + private RepositoryMetadata metadata = new DefaultRepositoryMetadata(PersonRepository.class); + private MongoMappingContext context = new MongoMappingContext(); + private ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + private Method method = ReflectionUtils.findMethod(PersonRepository.class, "findByLocationNear", Point.class, + Distance.class, + Pageable.class); + private MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + private MappingMongoConverter converter; + + @BeforeEach + @SuppressWarnings("unchecked") + void setUp() { + + converter = new MappingMongoConverter(dbRefResolver, context); + + } + + @Test // DATAMONGO-1464 + void pagedExecutionShouldNotGenerateCountQueryIfQueryReportedNoResults() { + + doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); + doReturn(Collections.emptyList()).when(terminatingMock).all(); + + PagedExecution execution = new PagedExecution(operationMock, PageRequest.of(0, 10)); + execution.execute(new Query()); + + verify(terminatingMock).all(); + verify(terminatingMock, never()).count(); + } + + @Test // DATAMONGO-1464 + void pagedExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { + + doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); + doReturn(Arrays.asList(new Person(), new Person(), new Person(), new Person())).when(terminatingMock).all(); + + PagedExecution execution = new PagedExecution(operationMock, PageRequest.of(0, 10)); + execution.execute(new Query()); + + verify(terminatingMock).all(); + verify(terminatingMock, never()).count(); + } + + @Test // DATAMONGO-1464 + void pagedExecutionRetrievesObjectsForPageableOutOfRange() { + + doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); + doReturn(Collections.emptyList()).when(terminatingMock).all(); + + PagedExecution execution = new PagedExecution(operationMock, PageRequest.of(2, 10)); + execution.execute(new Query()); + + verify(terminatingMock).all(); + verify(terminatingMock).count(); + } + + @Test // DATAMONGO-1464 + void pagingGeoExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { + + GeoResult result = new GeoResult<>(new Person(), DISTANCE); + when(mongoOperationsMock.getConverter()).thenReturn(converter); + when(mongoOperationsMock.query(any(Class.class))).thenReturn(findOperationMock); + when(findOperationMock.near(any(NearQuery.class))).thenReturn(terminatingGeoMock); + doReturn(new GeoResults<>(Arrays.asList(result, result, result, result))).when(terminatingGeoMock).all(); + + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(converter, + new MongoParametersParameterAccessor(queryMethod, new Object[] { POINT, DISTANCE, PageRequest.of(0, 10) })); + + PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock, EXPRESSION_PARSER, + QueryMethodEvaluationContextProvider.DEFAULT); + + PagingGeoNearExecution execution = new PagingGeoNearExecution(findOperationMock, queryMethod, accessor, query); + execution.execute(new Query()); + + verify(terminatingGeoMock).all(); + } + + @Test // DATAMONGO-1464 + void pagingGeoExecutionRetrievesObjectsForPageableOutOfRange() { + + when(mongoOperationsMock.getConverter()).thenReturn(converter); + when(mongoOperationsMock.query(any(Class.class))).thenReturn(findOperationMock); + when(findOperationMock.near(any(NearQuery.class))).thenReturn(terminatingGeoMock); + doReturn(new GeoResults<>(Collections.emptyList())).when(terminatingGeoMock).all(); + doReturn(terminatingMock).when(findOperationMock).matching(any(Query.class)); + + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(converter, + new MongoParametersParameterAccessor(queryMethod, new Object[] { POINT, DISTANCE, PageRequest.of(2, 10) })); + + PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock, EXPRESSION_PARSER, + QueryMethodEvaluationContextProvider.DEFAULT); + + PagingGeoNearExecution execution = new PagingGeoNearExecution(findOperationMock, queryMethod, accessor, query); + execution.execute(new Query()); + + verify(terminatingGeoMock).all(); + verify(terminatingMock).count(); + } + + @Test // DATAMONGO-2351 + void acknowledgedDeleteReturnsDeletedCount() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteAllByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + when(mongoOperationsMock.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(DeleteResult.acknowledged(10)); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(10L); + } + + @Test // DATAMONGO-2351 + void unacknowledgedDeleteReturnsZeroDeletedCount() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteAllByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + when(mongoOperationsMock.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(DeleteResult.unacknowledged()); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(0L); + } + + @Test // DATAMONGO-1997 + void deleteExecutionWithEntityReturnTypeTriggersFindAndRemove() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Person person = new Person(); + + when(mongoOperationsMock.findAndRemove(any(Query.class), any(Class.class), anyString())).thenReturn(person); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(person); + } + + interface PersonRepository extends Repository { + + GeoPage findByLocationNear(Point point, Distance distance, Pageable pageable); + + Long deleteAllByLastname(String lastname); + + Person deleteByLastname(String lastname); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java index efea28a23b..8f9824e14d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2016 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.Collection; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoPage; @@ -31,11 +30,18 @@ import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.User; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.annotation.Collation; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Aggregation; import org.springframework.data.mongodb.repository.Contact; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; @@ -47,12 +53,13 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ public class MongoQueryMethodUnitTests { MongoMappingContext context; - @Before + @BeforeEach public void setUp() { context = new MongoMappingContext(); } @@ -63,8 +70,8 @@ public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exce MongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method"); MongoEntityMetadata metadata = queryMethod.getEntityInformation(); - assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class))); - assertThat(metadata.getCollectionName(), is("contact")); + assertThat(metadata.getJavaType()).isAssignableFrom(Address.class); + assertThat(metadata.getCollectionName()).isEqualTo("contact"); } @Test @@ -73,8 +80,8 @@ public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Excep MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method"); MongoEntityMetadata entityInformation = queryMethod.getEntityInformation(); - assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class))); - assertThat(entityInformation.getCollectionName(), is("person")); + assertThat(entityInformation.getJavaType()).isAssignableFrom(Person.class); + assertThat(entityInformation.getCollectionName()).isEqualTo("person"); } @Test @@ -82,34 +89,35 @@ public void discoversUserAsDomainTypeForGeoPageQueryMethod() throws Exception { MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, Pageable.class); - assertThat(queryMethod.isGeoNearQuery(), is(true)); - assertThat(queryMethod.isPageQuery(), is(true)); + assertThat(queryMethod.isGeoNearQuery()).isTrue(); + assertThat(queryMethod.isPageQuery()).isTrue(); queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class); - assertThat(queryMethod.isGeoNearQuery(), is(true)); - assertThat(queryMethod.isPageQuery(), is(false)); - assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); - - assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(), - is(true)); - assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(), - is(true)); - assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(), - is(true)); + assertThat(queryMethod.isGeoNearQuery()).isTrue(); + assertThat(queryMethod.isPageQuery()).isFalse(); + assertThat(queryMethod.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + + assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); } - @Test(expected = IllegalArgumentException.class) - public void rejectsGeoPageQueryWithoutPageable() throws Exception { - queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class); + @Test + public void rejectsGeoPageQueryWithoutPageable() { + assertThatIllegalArgumentException() + .isThrownBy(() -> queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullMappingContext() throws Exception { Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class); - new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), - new SpelAwareProxyProjectionFactory(), null); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new MongoQueryMethod(method, + new DefaultRepositoryMetadata(PersonRepository.class), new SpelAwareProxyProjectionFactory(), null)); } @Test @@ -118,8 +126,8 @@ public void considersMethodReturningGeoPageAsPagingMethod() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, Pageable.class); - assertThat(method.isPageQuery(), is(true)); - assertThat(method.isCollectionQuery(), is(false)); + assertThat(method.isPageQuery()).isTrue(); + assertThat(method.isCollectionQuery()).isFalse(); } @Test @@ -128,88 +136,235 @@ public void createsMongoQueryMethodObjectForMethodReturningAnInterface() throws queryMethod(SampleRepository2.class, "methodReturningAnInterface"); } - /** - * @see DATAMONGO-957 - */ - @Test + @Test // DATAMONGO-957 public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().hasValues(), is(false)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().hasValues()).isFalse(); } - /** - * @see DATAMONGO-957 - */ - @Test + @Test // DATAMONGO-957 public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); } + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithBatchSizeCorrectly() throws Exception { - /** - * @see DATAMONGO-1403 - */ - @Test - public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception { + MongoQueryMethod method = queryMethod(PersonRepository.class, "batchSize"); - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSpellFixedMaxExecutionTime"); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize()).isEqualTo(100); + } + + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithNegativeBatchSizeCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "negativeBatchSize"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize()).isEqualTo(-200); } - /** - * @see DATAMONGO-957 - */ - @Test - public void createsMongoQueryMethodWithMaxScanCorrectly() throws Exception { + @Test // DATAMONGO-1403 + public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxScan"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSpellFixedMaxExecutionTime"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxScan(), is(10L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); } - /** - * @see DATAMONGO-957 - */ - @Test + @Test // DATAMONGO-957 public void createsMongoQueryMethodWithCommentCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithComment"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getComment(), is("foo bar")); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getComment()).isEqualTo("foo bar"); } - /** - * @see DATAMONGO-957 - */ - @Test - public void createsMongoQueryMethodWithSnapshotCorrectly() throws Exception { + @Test // DATAMONGO-1480 + public void createsMongoQueryMethodWithNoCursorTimeoutCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSnapshotUsage"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithNoCursorTimeout"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getSnapshot(), is(true)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getFlags()) + .contains(org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT); } - /** - * @see DATAMONGO-1266 - */ - @Test + @Test // DATAMONGO-1480, DATAMONGO-2572 + public void createsMongoQueryMethodWithMultipleFlagsCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMultipleFlags"); + + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getFlags()).contains( + org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, + org.springframework.data.mongodb.core.query.Meta.CursorOption.SECONDARY_READS); + } + + @Test // DATAMONGO-1266 public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); - assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); + assertThat(method.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + } + + @Test // DATAMONGO-2153 + public void findsAnnotatedAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregation"); + + assertThat(method.hasAnnotatedAggregation()).isTrue(); + assertThat(method.getAnnotatedAggregation()).hasSize(1); + } + + @Test // DATAMONGO-2153 + public void detectsCollationForAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithCollation"); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2107 + void detectsModifyingQueryByUpdateType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, Update.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void detectsModifyingQueryByUpdateDefinitionType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, + UpdateDefinition.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void detectsModifyingQueryByAggregationUpdateDefinitionType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, + AggregationUpdate.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void queryCreationFailsOnInvalidUpdate() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndUpdateByLastname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("findAndUpdateByLastname"); + } + + @Test // GH-2107 + void queryCreationForUpdateMethodFailsOnInvalidReturnType() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndIncrementVisitsByFirstname", String.class) + .verify()) // + .withMessageContaining("Update") // + .withMessageContaining("numeric") // + .withMessageContaining("findAndIncrementVisitsByFirstname"); + } + + @Test // GH-3002 + void readsCollationFromAtCollationAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithCollationFromAtCollationByFirstname", + String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void readsCollationFromAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithCollationFromAtQueryByFirstname", + String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void annotatedCollationClashSelectsAtCollationAnnotationValue() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, + "findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2971 + void readsReadPreferenceAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceFromAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void annotatedReadPreferenceClashSelectsAtReadPreferenceAnnotationValue() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceAtRepositoryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("primaryPreferred"); + } + + @Test // GH-2971 + void detectsReadPreferenceForAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithReadPreference"); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-4546 + void errorsOnInvalidAggregation() { + + assertThatIllegalStateException() // + .isThrownBy(() -> queryMethod(InvalidAggregationMethodRepo.class, "findByAggregation").verify()) // + .withMessageContaining("Invalid aggregation") // + .withMessageContaining("findByAggregation"); } private MongoQueryMethod queryMethod(Class repository, String name, Class... parameters) throws Exception { @@ -219,6 +374,7 @@ private MongoQueryMethod queryMethod(Class repository, String name, Class. return new MongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context); } + @ReadPreference(value = "primaryPreferred") interface PersonRepository extends Repository { // Misses Pageable @@ -235,25 +391,66 @@ interface PersonRepository extends Repository { @Meta List emptyMetaAnnotation(); - @Meta(maxExcecutionTime = 100) + @Meta(cursorBatchSize = 100) + List batchSize(); + + @Meta(cursorBatchSize = -200) + List negativeBatchSize(); + + @Meta(maxExecutionTimeMs = 100) List metaWithMaxExecutionTime(); @Meta(maxExecutionTimeMs = 100) List metaWithSpellFixedMaxExecutionTime(); - @Meta(maxScanDocuments = 10) - List metaWithMaxScan(); - @Meta(comment = "foo bar") List metaWithComment(); - @Meta(snapshot = true) - List metaWithSnapshotUsage(); + @Meta(flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT }) + List metaWithNoCursorTimeout(); + + @Meta(flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, + org.springframework.data.mongodb.core.query.Meta.CursorOption.SECONDARY_READS }) + List metaWithMultipleFlags(); - /** - * @see DATAMONGO-1266 - */ + // DATAMONGO-1266 void deleteByUserName(String userName); + + @Aggregation("{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }") + List findByAggregation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", + collation = "de_AT") + List findByAggregationWithCollation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", readPreference = "secondaryPreferred") + List findByAggregationWithReadPreference(); + + void findAndUpdateBy(String firstname, Update update); + + void findAndUpdateBy(String firstname, UpdateDefinition update); + + void findAndUpdateBy(String firstname, AggregationUpdate update); + + @Collation("en_US") + List findWithCollationFromAtCollationByFirstname(String firstname); + + @Query(collation = "en_US") + List findWithCollationFromAtQueryByFirstname(String firstname); + + @Collation("de_AT") + @Query(collation = "en_US") + List findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + List findWithReadPreferenceFromAtReadPreferenceByFirstname(String firstname); + + @Query(readPreference = "secondaryPreferred") + List findWithReadPreferenceFromAtQueryByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + @Query(readPreference = "primaryPreferred") + List findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname(String firstname); } interface SampleRepository extends Repository { @@ -268,6 +465,21 @@ interface SampleRepository2 extends Repository { Customer methodReturningAnInterface(); } + interface InvalidUpdateMethodRepo extends Repository { + + @org.springframework.data.mongodb.repository.Update + void findAndUpdateByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : 1 } }") + Person findAndIncrementVisitsByFirstname(String firstname); + } + + interface InvalidAggregationMethodRepo extends Repository { + + @Aggregation("[{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }]") + List findByAggregation(); + } + interface Customer { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java index dc61cfbfd2..e0b9b77099 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2015 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,181 +15,172 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.core.query.IsTextQuery.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.reflect.Method; +import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.bson.json.JsonParseException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.Query; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; - -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; -import com.mongodb.util.JSONParseException; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; /** * Unit tests for {@link PartTreeMongoQuery}. - * + * * @author Christoph Strobl * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class PartTreeMongoQueryUnitTests { +@ExtendWith(MockitoExtension.class) +class PartTreeMongoQueryUnitTests { @Mock MongoOperations mongoOperationsMock; + @Mock ExecutableFind findOperationMock; - MongoMappingContext mappingContext; - - public @Rule ExpectedException exception = ExpectedException.none(); + private MongoMappingContext mappingContext; - @Before - public void setUp() { + @BeforeEach + void setUp() { mappingContext = new MongoMappingContext(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); - MongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); - when(mongoOperationsMock.getConverter()).thenReturn(converter); + doReturn(converter).when(mongoOperationsMock).getConverter(); + doReturn(findOperationMock).when(mongoOperationsMock).query(any()); } - /** - * @see DATAMOGO-952 - */ - @Test - public void rejectsInvalidFieldSpecification() { + @Test // DATAMOGO-952 + void rejectsInvalidFieldSpecification() { - exception.expect(IllegalStateException.class); - exception.expectMessage("findByLastname"); - - deriveQueryFromMethod("findByLastname", new Object[] { "foo" }); + assertThatIllegalStateException().isThrownBy(() -> deriveQueryFromMethod("findByLastname", "foo")) + .withMessageContaining("findByLastname"); } - /** - * @see DATAMOGO-952 - */ - @Test - public void singleFieldJsonIncludeRestrictionShouldBeConsidered() { + @Test // DATAMOGO-952 + void singleFieldJsonIncludeRestrictionShouldBeConsidered() { - org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstname", - new Object[] { "foo" }); + org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstname", "foo"); - assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 1).get())); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 1)); } - /** - * @see DATAMOGO-952 - */ - @Test - public void multiFieldJsonIncludeRestrictionShouldBeConsidered() { + @Test // DATAMOGO-952 + void multiFieldJsonIncludeRestrictionShouldBeConsidered() { - org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstnameAndLastname", - new Object[] { "foo", "bar" }); + org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstnameAndLastname", "foo", + "bar"); - assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 1).add("lastname", 1).get())); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 1).append("lastname", 1)); } - /** - * @see DATAMOGO-952 - */ - @Test - public void multiFieldJsonExcludeRestrictionShouldBeConsidered() { + @Test // DATAMOGO-952 + void multiFieldJsonExcludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstnameAndLastname", - new Object[] { "foo", "bar" }); + "foo", "bar"); - assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 0).add("lastname", 0).get())); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 0).append("lastname", 0)); } - /** - * @see DATAMOGO-973 - */ - @Test - public void shouldAddFullTextParamCorrectlyToDerivedQuery() { + @Test // DATAMOGO-973 + void shouldAddFullTextParamCorrectlyToDerivedQuery() { - org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstname", - new Object[] { "text", TextCriteria.forDefaultLanguage().matching("search") }); + org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstname", "text", + TextCriteria.forDefaultLanguage().matching("search")); - assertThat(query, isTextQuery().searchingFor("search").where(new Criteria("firstname").is("text"))); + assertThat(query.getQueryObject()).containsEntry("$text.$search", "search").containsEntry("firstname", "text"); } - /** - * @see DATAMONGO-1180 - */ - @Test - public void propagatesRootExceptionForInvalidQuery() { + @Test // DATAMONGO-1180 + void propagatesRootExceptionForInvalidQuery() { - exception.expect(IllegalStateException.class); - exception.expectCause(is(org.hamcrest.Matchers. instanceOf(JSONParseException.class))); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> deriveQueryFromMethod("findByAge", 1)) + .withCauseInstanceOf(JsonParseException.class); + } - deriveQueryFromMethod("findByAge", new Object[] { 1 }); + @Test // DATAMONGO-1345, DATAMONGO-1735 + void doesNotDeriveFieldSpecForNormalDomainType() { + assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty(); } - /** - * @see DATAMONGO-1345 - */ - @Test - public void doesNotDeriveFieldSpecForNormalDomainType() { - assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject(), is(nullValue())); + @Test // DATAMONGO-1345 + void restrictsQueryToFieldsRequiredForProjection() { + + Document fieldsObject = deriveQueryFromMethod("findPersonProjectedBy", new Object[0]).getFieldsObject(); + + assertThat(fieldsObject.get("firstname")).isEqualTo(1); + assertThat(fieldsObject.get("lastname")).isEqualTo(1); + } + + @Test // DATAMONGO-1345 + void restrictsQueryToFieldsRequiredForDto() { + + Document fieldsObject = deriveQueryFromMethod("findPersonDtoByAge", new Object[] { 42 }).getFieldsObject(); + + assertThat(fieldsObject.get("firstname")).isEqualTo(1); + assertThat(fieldsObject.get("lastname")).isEqualTo(1); } - /** - * @see DATAMONGO-1345 - */ - @Test - public void restrictsQueryToFieldsRequiredForProjection() { + @Test // DATAMONGO-1345 + void usesDynamicProjection() { - DBObject fieldsObject = deriveQueryFromMethod("findPersonProjectedBy", new Object[0]).getFieldsObject(); + Document fields = deriveQueryFromMethod("findDynamicallyProjectedBy", ExtendedProjection.class).getFieldsObject(); - assertThat(fieldsObject.get("firstname"), is((Object) 1)); - assertThat(fieldsObject.get("lastname"), is((Object) 1)); + assertThat(fields.get("firstname")).isEqualTo(1); + assertThat(fields.get("lastname")).isEqualTo(1); + assertThat(fields.get("age")).isEqualTo(1); } - /** - * @see DATAMONGO-1345 - */ - @Test - public void restrictsQueryToFieldsRequiredForDto() { + @Test // DATAMONGO-1500 + void shouldLeaveParameterConversionToQueryMapper() { - DBObject fieldsObject = deriveQueryFromMethod("findPersonDtoByAge", new Object[] { 42 }).getFieldsObject(); + org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findBySex", Sex.FEMALE); - assertThat(fieldsObject.get("firstname"), is((Object) 1)); - assertThat(fieldsObject.get("lastname"), is((Object) 1)); + assertThat(query.getQueryObject().get("sex")).isEqualTo(Sex.FEMALE); + assertThat(query.getFieldsObject().get("firstname")).isEqualTo(1); } - /** - * @see DATAMONGO-1345 - */ - @Test - public void usesDynamicProjection() { + @Test // DATAMONGO-1729, DATAMONGO-1735 + void doesNotCreateFieldsObjectForOpenProjection() { + + org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy"); - DBObject fields = deriveQueryFromMethod("findDynamicallyProjectedBy", ExtendedProjection.class).getFieldsObject(); + assertThat(query.getFieldsObject()).isEmpty(); + } + + @Test // DATAMONGO-1865 + void limitingReturnsTrueIfTreeIsLimiting() { + assertThat(createQueryForMethod("findFirstBy").isLimiting()).isTrue(); + } - assertThat(fields.get("firstname"), is((Object) 1)); - assertThat(fields.get("lastname"), is((Object) 1)); - assertThat(fields.get("age"), is((Object) 1)); + @Test // DATAMONGO-1865 + void limitingReturnsFalseIfTreeIsNotLimiting() { + assertThat(createQueryForMethod("findPersonBy").isLimiting()).isFalse(); } private org.springframework.data.mongodb.core.query.Query deriveQueryFromMethod(String method, Object... args) { @@ -215,10 +206,9 @@ private PartTreeMongoQuery createQueryForMethod(String methodName, Class... p MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(Repo.class), factory, mappingContext); - return new PartTreeMongoQuery(queryMethod, mongoOperationsMock); - } catch (NoSuchMethodException e) { - throw new IllegalArgumentException(e.getMessage(), e); - } catch (SecurityException e) { + return new PartTreeMongoQuery(queryMethod, mongoOperationsMock, new SpelExpressionParser(), + QueryMethodEvaluationContextProvider.DEFAULT); + } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } } @@ -249,6 +239,13 @@ interface Repo extends MongoRepository { PersonDto findPersonDtoByAge(Integer age); T findDynamicallyProjectedBy(Class type); + + @Query(fields = "{ 'firstname' : 1 }") + List findBySex(Sex sex); + + OpenProjection findAllBy(); + + Person findFirstBy(); } interface PersonProjection { @@ -273,4 +270,12 @@ public PersonDto(String firstname, String lastname) { this.lastname = lastname; } } + + interface OpenProjection { + + String getFirstname(); + + @Value("#{target.firstname + ' ' + target.lastname}") + String getFullname(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java new file mode 100644 index 0000000000..21d5dc71fb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java @@ -0,0 +1,139 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.lang.reflect.Method; +import java.util.Arrays; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; + +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; +import org.springframework.data.util.ClassTypeInformation; +import org.springframework.util.ClassUtils; + +import com.mongodb.client.result.DeleteResult; + +/** + * Unit tests for {@link ReactiveMongoQueryExecution}. + * + * @author Mark Paluch + * @author Artyom Gabeev + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveMongoQueryExecutionUnitTests { + + @Mock private ReactiveMongoOperations operations; + @Mock private MongoParameterAccessor parameterAccessor; + @Mock private MongoQueryMethod method; + + @Test // DATAMONGO-1444 + public void geoNearExecutionShouldApplyQuerySettings() throws Exception { + + Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear"); + Query query = new Query(); + when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); + when(parameterAccessor.getDistanceRange()) + .thenReturn(Range.from(Bound.inclusive(new Distance(10))).to(Bound.inclusive(new Distance(15)))); + when(parameterAccessor.getPageable()).thenReturn(PageRequest.of(1, 10)); + + new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, + Person.class, "person"); + + ArgumentCaptor queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class); + verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); + + NearQuery nearQuery = queryArgumentCaptor.getValue(); + assertThat(nearQuery.toDocument().get("near")).isEqualTo(Arrays.asList(1d, 2d)); + assertThat(nearQuery.getSkip()).isEqualTo(10L); + assertThat(nearQuery.getMinDistance()).isEqualTo(new Distance(10)); + assertThat(nearQuery.getMaxDistance()).isEqualTo(new Distance(15)); + } + + @Test // DATAMONGO-1444 + public void geoNearExecutionShouldApplyMinimalSettings() throws Exception { + + Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear"); + Query query = new Query(); + when(parameterAccessor.getPageable()).thenReturn(Pageable.unpaged()); + when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); + when(parameterAccessor.getDistanceRange()).thenReturn(Range.unbounded()); + + new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, + Person.class, "person"); + + ArgumentCaptor queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class); + verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); + + NearQuery nearQuery = queryArgumentCaptor.getValue(); + assertThat(nearQuery.toDocument().get("near")).isEqualTo(Arrays.asList(1d, 2d)); + assertThat(nearQuery.getSkip()).isEqualTo(0L); + assertThat(nearQuery.getMinDistance()).isNull(); + assertThat(nearQuery.getMaxDistance()).isNull(); + } + + @Test // DATAMONGO-2351 + public void acknowledgedDeleteReturnsDeletedCount() { + + when(operations.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(Mono.just(DeleteResult.acknowledged(10))); + + Mono.from((Publisher) new DeleteExecution(operations, method).execute(new Query(), Class.class, "")) // + .as(StepVerifier::create) // + .expectNext(10L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2351 + public void unacknowledgedDeleteReturnsZeroDeletedCount() { + + when(operations.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(Mono.just(DeleteResult.unacknowledged())); + + Mono.from((Publisher) new DeleteExecution(operations, method).execute(new Query(), Class.class, "")) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + interface GeoRepo { + Flux> geoNear(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java new file mode 100644 index 0000000000..82cd0a157c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java @@ -0,0 +1,366 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; + +import org.springframework.data.mongodb.repository.query.MongoQueryMethodUnitTests.PersonRepository; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.List; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.User; +import org.springframework.data.mongodb.core.annotation.Collation; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Contact; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; + +/** + * Unit test for {@link ReactiveMongoQueryMethod}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Jorge Rodríguez + */ +public class ReactiveMongoQueryMethodUnitTests { + + MongoMappingContext context; + + @BeforeEach + public void setUp() { + context = new MongoMappingContext(); + } + + @Test // DATAMONGO-1444 + public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exception { + + ReactiveMongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method"); + MongoEntityMetadata metadata = queryMethod.getEntityInformation(); + + assertThat(metadata.getJavaType()).isAssignableFrom(Address.class); + assertThat(metadata.getCollectionName()).isEqualTo("contact"); + } + + @Test // DATAMONGO-1444 + public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Exception { + + MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method"); + MongoEntityMetadata entityInformation = queryMethod.getEntityInformation(); + + assertThat(entityInformation.getJavaType()).isAssignableFrom(Person.class); + assertThat(entityInformation.getCollectionName()).isEqualTo("person"); + } + + @Test // DATAMONGO-1444 + public void discoversUserAsDomainTypeForGeoPagingQueryMethod() throws Exception { + + MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, + Distance.class, Pageable.class); + assertThat(queryMethod.isGeoNearQuery()).isFalse(); + assertThat(queryMethod.isPageQuery()).isFalse(); + + queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class); + assertThat(queryMethod.isGeoNearQuery()).isFalse(); + assertThat(queryMethod.isPageQuery()).isFalse(); + assertThat(queryMethod.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + + assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery()) + .isFalse(); + assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + } + + @Test // DATAMONGO-1444 + public void rejectsNullMappingContext() throws Exception { + + Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class); + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), + new SpelAwareProxyProjectionFactory(), null)); + } + + @Test // DATAMONGO-1444 + public void rejectsMonoPageableResult() { + assertThatIllegalStateException() + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoByLastname", String.class, Pageable.class).verify()); + } + + @Test // DATAMONGO-1444 + public void createsMongoQueryMethodObjectForMethodReturningAnInterface() throws Exception { + queryMethod(SampleRepository2.class, "methodReturningAnInterface"); + } + + @Test // DATAMONGO-1444 + public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation"); + + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().hasValues()).isFalse(); + } + + @Test // DATAMONGO-1444 + public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime"); + + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); + } + + @Test // DATAMONGO-1444 + public void throwsExceptionOnWrappedPage() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoPageByLastname", String.class, Pageable.class).verify()); + } + + @Test // DATAMONGO-1444 + public void throwsExceptionOnWrappedSlice() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoSliceByLastname", String.class, Pageable.class).verify()); + } + + @Test // DATAMONGO-1444 + public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + } + + @Test // DATAMONGO-2153 + public void findsAnnotatedAggregation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregation"); + + Assertions.assertThat(method.hasAnnotatedAggregation()).isTrue(); + Assertions.assertThat(method.getAnnotatedAggregation()).hasSize(1); + } + + @Test // DATAMONGO-2153 + public void detectsCollationForAggregation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithCollation"); + + Assertions.assertThat(method.hasAnnotatedCollation()).isTrue(); + Assertions.assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2107 + public void queryCreationFailsOnInvalidUpdate() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndUpdateByLastname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("findAndUpdateByLastname"); + } + + @Test // GH-2107 + public void queryCreationForUpdateMethodFailsOnInvalidReturnType() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndIncrementVisitsByFirstname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("numeric") // + .withMessageContaining("findAndIncrementVisitsByFirstname"); + } + + @Test // GH-3002 + void readsCollationFromAtCollationAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findWithCollationFromAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void readsCollationFromAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findWithCollationFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void annotatedCollationClashSelectsAtCollationAnnotationValue() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + + @Test // GH-2971 + void readsReadPreferenceAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceFromAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void annotatedReadPreferenceClashSelectsAtReadPreferenceAnnotationValue() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceAtRepositoryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("primaryPreferred"); + } + + @Test // GH-2971 + void detectsReadPreferenceForAggregation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findByAggregationWithReadPreference"); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + private ReactiveMongoQueryMethod queryMethod(Class repository, String name, Class... parameters) + throws Exception { + + Method method = repository.getMethod(name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + return new ReactiveMongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context); + } + + @ReadPreference(value = "primaryPreferred") + interface PersonRepository extends Repository { + + Mono findMonoByLastname(String lastname, Pageable pageRequest); + + Mono> findMonoPageByLastname(String lastname, Pageable pageRequest); + + Mono> findMonoSliceByLastname(String lastname, Pageable pageRequest); + + // Misses Pageable + Flux findByLocationNear(Point point, Distance distance); + + Flux findByLocationNear(Point point, Distance distance, Pageable pageable); + + Mono> findByEmailAddress(String lastname, Point location); + + Flux findByFirstname(String firstname, Point location); + + Flux> findByLastname(String lastname, Point location); + + @Meta + Flux emptyMetaAnnotation(); + + @Meta(maxExecutionTimeMs = 100) + Flux metaWithMaxExecutionTime(); + + void deleteByUserName(String userName); + + @Aggregation("{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }") + Flux findByAggregation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", + collation = "de_AT") + Flux findByAggregationWithCollation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", readPreference = "secondaryPreferred") + Flux findByAggregationWithReadPreference(); + + @Collation("en_US") + List findWithCollationFromAtCollationByFirstname(String firstname); + + @Query(collation = "en_US") + List findWithCollationFromAtQueryByFirstname(String firstname); + + @Collation("de_AT") + @Query(collation = "en_US") + List findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + Flux findWithReadPreferenceFromAtReadPreferenceByFirstname(String firstname); + + @Query(readPreference = "secondaryPreferred") + Flux findWithReadPreferenceFromAtQueryByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + @Query(readPreference = "primaryPreferred") + Flux findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname(String firstname); + } + + interface SampleRepository extends Repository { + + List
                    method(); + } + + interface SampleRepository2 extends Repository { + + List method(); + + Customer methodReturningAnInterface(); + } + + interface InvalidUpdateMethodRepo extends Repository { + + @org.springframework.data.mongodb.repository.Update + Mono findAndUpdateByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : 1 } }") + Mono findAndIncrementVisitsByFirstname(String firstname); + } + + interface Customer {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java new file mode 100644 index 0000000000..c6047ce30d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java @@ -0,0 +1,343 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link ReactiveStringBasedAggregation}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveStringBasedAggregationUnitTests { + + SpelExpressionParser PARSER = new SpelExpressionParser(); + + @Mock ReactiveMongoOperations operations; + @Mock DbRefResolver dbRefResolver; + MongoConverter converter; + + private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }"; + private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }"; + private static final String RAW_OUT = "{ '$out' : 'authors' }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }"; + + private static final Document SORT = Document.parse(RAW_SORT_STRING); + private static final Document GROUP_BY_LASTNAME = Document.parse(RAW_GROUP_BY_LASTNAME_STRING); + + @BeforeEach + public void setUp() { + + converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); + when(operations.getConverter()).thenReturn(converter); + when(operations.aggregate(any(TypedAggregation.class), any())).thenReturn(Flux.empty()); + when(operations.execute(any())).thenReturn(Flux.empty()); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregation() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregationConsidersMeta() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).contains("expensive-aggregation"); + assertThat(options.getCursorBatchSize()).isEqualTo(42); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregationWithSortParameter() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation", + Sort.by(Direction.DESC, "lastname")); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).isEmpty(); + assertThat(options.getCursorBatchSize()).isNull(); + } + + @Test // DATAMONGO-2153 + public void replaceParameter() { + + AggregationInvocation invocation = executeAggregation("parameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + public void replaceSpElParameter() { + + AggregationInvocation invocation = executeAggregation("spelParameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + public void aggregateWithCollation() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation"); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("de_AT")); + } + + @Test // DATAMONGO-2153 + public void aggregateWithCollationParameter() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation", Collation.of("en_US")); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("en_US")); + } + + @Test // DATAMONGO-2557 + void aggregationRetrievesCodecFromDriverJustOnceForMultipleAggregationOperationsInPipeline() { + + executeAggregation("multiOperationPipeline", "firstname"); + verify(operations).execute(any()); + } + + @Test // GH-3230 + void aggregatePicksUpHintFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withHint"); + assertThat(hintOf(invocation)).isEqualTo("idx"); + } + + private AggregationInvocation executeAggregation(String name, Object... args) { + + Class[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(size -> new Class[size]); + ReactiveStringBasedAggregation aggregation = createAggregationForMethod(name, argTypes); + + ArgumentCaptor aggregationCaptor = ArgumentCaptor.forClass(TypedAggregation.class); + ArgumentCaptor targetTypeCaptor = ArgumentCaptor.forClass(Class.class); + + Object result = Flux.from((Publisher) aggregation.execute(args)).blockLast(); + + verify(operations).aggregate(aggregationCaptor.capture(), targetTypeCaptor.capture()); + + return new AggregationInvocation(aggregationCaptor.getValue(), targetTypeCaptor.getValue(), result); + } + + @Test // GH-4088 + void aggregateWithVoidReturnTypeSkipsResultOnOutStage() { + + AggregationInvocation invocation = executeAggregation("outSkipResult"); + + assertThat(skipResultsOf(invocation)).isTrue(); + } + + @Test // GH-4088 + void aggregateWithOutStageDoesNotSkipResults() { + + AggregationInvocation invocation = executeAggregation("outDoNotSkipResult"); + + assertThat(skipResultsOf(invocation)).isFalse(); + } + + @Test // GH-2971 + void aggregatePicksUpReadPreferenceFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withReadPreference"); + assertThat(readPreferenceOf(invocation)).isEqualTo(ReadPreference.secondaryPreferred()); + } + + private ReactiveStringBasedAggregation createAggregationForMethod(String name, Class... parameters) { + + Method method = ClassUtils.getMethod(SampleRepository.class, name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); + return new ReactiveStringBasedAggregation(queryMethod, operations, PARSER, + ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + } + + private List pipelineOf(AggregationInvocation invocation) { + + AggregationOperationContext context = new TypeBasedAggregationOperationContext( + invocation.aggregation.getInputType(), converter.getMappingContext(), new QueryMapper(converter)); + + return invocation.aggregation.toPipeline(context); + } + + private Class inputTypeOf(AggregationInvocation invocation) { + return invocation.aggregation.getInputType(); + } + + @Nullable + private Collation collationOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getCollation().orElse(null) + : null; + } + + @Nullable + private Object hintOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null) + : null; + } + + private Boolean skipResultsOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults() + : false; + } + + @Nullable + private ReadPreference readPreferenceOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getReadPreference() + : null; + } + + private Class targetTypeOf(AggregationInvocation invocation) { + return invocation.getTargetType(); + } + + private interface SampleRepository extends ReactiveCrudRepository { + + @Meta(cursorBatchSize = 42, comment = "expensive-aggregation") + @Aggregation({ RAW_GROUP_BY_LASTNAME_STRING, RAW_SORT_STRING }) + Mono plainStringAggregation(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Mono plainStringAggregation(Sort sort); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER) + Mono parameterReplacementAggregation(String attribute); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER) + Mono spelParameterReplacementAggregation(String arg0); + + @Aggregation(pipeline = {RAW_GROUP_BY_LASTNAME_STRING, GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER}) + Mono multiOperationPipeline(String arg0); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + Mono aggregateWithCollation(); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + Mono aggregateWithCollation(Collation collation); + + @Hint("idx") + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String withHint(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + Flux outDoNotSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + Mono outSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }, readPreference = "secondaryPreferred") + Mono withReadPreference(); + } + + static class PersonAggregate { + + } + + static final class AggregationInvocation { + + private final TypedAggregation aggregation; + private final Class targetType; + private final Object result; + + public AggregationInvocation(TypedAggregation aggregation, Class targetType, Object result) { + this.aggregation = aggregation; + this.targetType = targetType; + this.result = result; + } + + public TypedAggregation getAggregation() { + return this.aggregation; + } + + public Class getTargetType() { + return this.targetType; + } + + public Object getResult() { + return this.result; + } + + public String toString() { + return "ReactiveStringBasedAggregationUnitTests.AggregationInvocation(aggregation=" + this.getAggregation() + + ", targetType=" + this.getTargetType() + ", result=" + this.getResult() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java new file mode 100644 index 0000000000..72f9626a57 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java @@ -0,0 +1,360 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.core.env.MapPropertySource; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.ReactiveEvaluationContextExtension; +import org.springframework.expression.spel.standard.SpelExpressionParser; + +/** + * Unit tests for {@link ReactiveStringBasedMongoQuery}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveStringBasedMongoQueryUnitTests { + + ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + StandardEnvironment environment = new StandardEnvironment(); + + @Mock ReactiveMongoOperations operations; + @Mock DbRefResolver factory; + @Mock ReactiveFind reactiveFind; + + MongoConverter converter; + + Map properties = new HashMap<>(); + MapPropertySource propertySource = new MapPropertySource("mock", properties); + + @BeforeEach + public void setUp() { + + environment.getPropertySources().addFirst(propertySource); + + this.converter = new MappingMongoConverter(factory, new MongoMappingContext()); + + when(operations.query(any())).thenReturn(reactiveFind); + when(operations.execute(any())).thenReturn(Flux.empty()); + } + + @Test // DATAMONGO-1444 + public void bindsSimplePropertyCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // DATAMONGO-1444 + public void bindsComplexPropertyCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByAddress", Address.class); + + Address address = new Address("Foo", "0123", "Bar"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, address); + + Document dbObject = new Document(); + converter.write(address, dbObject); + dbObject.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + Document queryObject = new Document("address", dbObject); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1444 + public void constructsDeleteQueryCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class); + assertThat(mongoQuery.isDeleteQuery()).isTrue(); + } + + @Test // DATAMONGO-1444 + public void preventsDeleteAndCountFlagAtTheSameTime() { + assertThatIllegalArgumentException().isThrownBy(() -> createQueryForMethod("invalidMethod", String.class)); + } + + @Test // DATAMONGO-2030 + public void shouldSupportExistsProjection() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("existsByLastname", String.class); + + assertThat(mongoQuery.isExistsQuery()).isTrue(); + } + + @Test // DATAMONGO-1444 + public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + new Document("firstname", "first").append("lastname", "last"), Collections.singletonMap("lastname", 1)); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", + Document.class, Map.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor).block(); + + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()); + assertThat(query.getFieldsObject()).isEqualTo(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()); + } + + @Test // DATAMONGO-1444 + public void shouldParseQueryWithParametersInExpression() throws Exception { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 1, 2, 3, 4); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class, + int.class, int.class, int.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor).block(); + + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") + .getQueryObject()); + } + + @Test // DATAMONGO-1444 + public void shouldParseJsonKeyReplacementCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure", + String.class, String.class); + ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor).block(); + + assertThat(query.getQueryObject()).isEqualTo(new Document().append("key", "value")); + } + + @Test // DATAMONGO-1444 + public void shouldSupportExpressionsInCustomQueries() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpression", String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldSupportPropertiesInCustomQueries() throws Exception { + + properties.put("foo", "bar"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'bar'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // DATAMONGO-1444 + public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", + boolean.class, String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // DATAMONGO-1444 + public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects", + boolean.class, String.class, String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // DATAMONGO-1444 + public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception { + + byte[] binaryData = "Matthews".getBytes("UTF-8"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { '$binary' : '" + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + 0 + "'}}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1894 + void shouldConsiderReactiveSpelExtension() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("withReactiveSpelExtensions"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{lastname: true}", "{project: true}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + private ReactiveStringBasedMongoQuery createQueryForMethod( + String name, Class... parameters) + throws Exception { + + Method method = SampleRepository.class.getMethod(name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); + QueryMethodValueEvaluationContextAccessor accessor = new QueryMethodValueEvaluationContextAccessor( + environment, Collections.singletonList(ReactiveSpelExtension.INSTANCE)); + return new ReactiveStringBasedMongoQuery(queryMethod, operations, new ValueExpressionDelegate(accessor, PARSER)); + } + + private interface SampleRepository extends Repository { + + @Query("{ 'lastname' : ?0 }") + Mono findByLastname(String lastname); + + @Query("{ 'lastname' : ?0 }") + Mono findByLastnameAsBinary(byte[] lastname); + + @Query("{ 'address' : ?0 }") + Mono findByAddress(Address address); + + @Query(value = "{ 'lastname' : ?0 }", delete = true) + Mono removeByLastname(String lastname); + + @Query(value = "{ 'lastname' : ?0 }", delete = true, count = true) + Mono invalidMethod(String lastname); + + @Query(value = "?0", fields = "?1") + Mono findByParameterizedCriteriaAndFields(Document criteria, Map fields); + + @Query("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}") + Flux findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4); + + @Query("{ ?0 : ?1}") + Mono methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement); + + @Query("{'lastname': ?#{[0]} }") + Flux findByQueryWithExpression(String param0); + + @Query("{'lastname': ?${foo} }") + Flux findByQueryWithProperty(); + + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }}") + Flux findByQueryWithExpressionAndNestedObject(boolean param0, String param1); + + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}") + Flux findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2); + + @Query(value = "{ 'lastname' : ?0 }", exists = true) + Mono existsByLastname(String lastname); + + @Query(value = "{ 'lastname' : ?#{hasRole()} }", fields = "{project: ?#{hasRole()}}") + Mono withReactiveSpelExtensions(); + } + + public enum ReactiveSpelExtension implements ReactiveEvaluationContextExtension { + + INSTANCE; + + @Override + public Mono getExtension() { + return Mono.just(SpelExtension.INSTANCE); + } + + @Override + public String getExtensionId() { + return "sample"; + } + } + + public enum SpelExtension implements EvaluationContextExtension { + + INSTANCE; + + @Override + public Object getRootObject() { + return this; + } + + @Override + public String getExtensionId() { + return "sample"; + } + + public boolean hasRole() { + return true; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java new file mode 100644 index 0000000000..32f9092fbf --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; + +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +/** + * Unit tests for {@link StringBasedAggregation}. + * + * @author Christoph Strobl + */ +public class StringBasedAggregationOperationUnitTests { + + @ParameterizedTest // GH-4712 + @ValueSource(strings = { "$project", "'$project'", "\"$project\"" }) + void extractsAggregationOperatorFromAggregationStringWithoutBindingParameters(String operator) { + + StringAggregationOperation agg = new StringAggregationOperation("{ %s : { 'fn' : 1 } }".formatted(operator), + Object.class, (it) -> Assertions.fail("o_O Parameter binding")); + + assertThat(agg.getOperator()).isEqualTo("$project"); + } + + @Test // GH-4712 + void fallbackToParameterBindingIfAggregationOperatorCannotBeExtractedFromAggregationStringWithoutBindingParameters() { + + StringAggregationOperation agg = new StringAggregationOperation("{ happy-madison : { 'fn' : 1 } }", Object.class, + (it) -> new Document("$project", "")); + + assertThat(agg.getOperator()).isEqualTo("$project"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java new file mode 100644 index 0000000000..85a8650b26 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java @@ -0,0 +1,454 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link StringBasedAggregation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class StringBasedAggregationUnitTests { + + private SpelExpressionParser PARSER = new SpelExpressionParser(); + + @Mock MongoOperations operations; + @Mock DbRefResolver dbRefResolver; + @Mock AggregationResults aggregationResults; + private MongoConverter converter; + + private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }"; + private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }"; + private static final String RAW_OUT = "{ '$out' : 'authors' }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }"; + + private static final Document SORT = Document.parse(RAW_SORT_STRING); + private static final Document GROUP_BY_LASTNAME = Document.parse(RAW_GROUP_BY_LASTNAME_STRING); + + @BeforeEach + void setUp() { + + converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); + when(operations.getConverter()).thenReturn(converter); + when(operations.aggregate(any(TypedAggregation.class), any())).thenReturn(aggregationResults); + when(operations.execute(any())).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + } + + @Test // DATAMONGO-2153 + void plainStringAggregation() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153, DATAMONGO-2449 + void plainStringAggregationConsidersMeta() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).contains("expensive-aggregation"); + assertThat(options.getCursorBatchSize()).isEqualTo(42); + assertThat(options.isAllowDiskUse()).isTrue(); + assertThat(options.getMaxTime()).isEqualTo(Duration.ofMillis(100)); + } + + @Test // DATAMONGO-2153, DATAMONGO-2449 + void returnSingleObject() { + + PersonAggregate expected = new PersonAggregate(); + when(aggregationResults.getUniqueMappedResult()).thenReturn(Collections.singletonList(expected)); + + AggregationInvocation invocation = executeAggregation("returnSingleEntity"); + assertThat(invocation.result).isEqualTo(expected); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).isEmpty(); + assertThat(options.getCursorBatchSize()).isNull(); + assertThat(options.isAllowDiskUse()).isFalse(); + assertThat(options.getMaxTime()).isEqualTo(Duration.ZERO); + } + + @Test // DATAMONGO-2153 + void returnSingleObjectThrowsError() { + + when(aggregationResults.getUniqueMappedResult()).thenThrow(new IllegalArgumentException("o_O")); + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> executeAggregation("returnSingleEntity")); + } + + @Test // DATAMONGO-2153 + void returnCollection() { + + List expected = Collections.singletonList(new PersonAggregate()); + when(aggregationResults.getMappedResults()).thenReturn(expected); + + assertThat(executeAggregation("returnCollection").result).isEqualTo(expected); + } + + @Test // GH-3623 + void returnNullWhenSingleResultIsNotPresent() { + + when(aggregationResults.getMappedResults()).thenReturn(Collections.emptyList()); + + assertThat(executeAggregation("simpleReturnType").result).isNull(); + } + + @Test // DATAMONGO-2153 + void returnRawResultType() { + assertThat(executeAggregation("returnRawResultType").result).isEqualTo(aggregationResults); + } + + @Test // DATAMONGO-2153 + void plainStringAggregationWithSortParameter() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation", + Sort.by(Direction.DESC, "lastname")); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153 + void replaceParameter() { + + AggregationInvocation invocation = executeAggregation("parameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + void replaceSpElParameter() { + + AggregationInvocation invocation = executeAggregation("spelParameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + void aggregateWithCollation() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation"); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("de_AT")); + } + + @Test // DATAMONGO-2153 + void aggregateWithCollationParameter() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation", Collation.of("en_US")); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("en_US")); + } + + @Test // GH-3543 + void aggregationWithSliceReturnType() { + + StringBasedAggregation sba = createAggregationForMethod("aggregationWithSliceReturnType", Pageable.class); + + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + + assertThat(result).isInstanceOf(Slice.class); + } + + @Test // GH-3543 + void aggregationWithStreamReturnType() { + + when(operations.aggregateStream(any(TypedAggregation.class), any())).thenReturn(Stream.empty()); + + StringBasedAggregation sba = createAggregationForMethod("aggregationWithStreamReturnType", Pageable.class); + + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + + assertThat(result).isInstanceOf(Stream.class); + } + + @Test // DATAMONGO-2506 + void aggregateRaisesErrorOnInvalidReturnType() { + + Method method = ClassUtils.getMethod(UnsupportedRepository.class, "pageIsUnsupported", Pageable.class); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) // + .isThrownBy(() -> new StringBasedAggregation(queryMethod, operations, PARSER, + QueryMethodEvaluationContextProvider.DEFAULT)) // + .withMessageContaining("pageIsUnsupported") // + .withMessageContaining("Page"); + } + + @Test // GH-3230 + void aggregatePicksUpHintFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withHint"); + assertThat(hintOf(invocation)).isEqualTo("idx"); + } + + @Test // GH-4088 + void aggregateWithVoidReturnTypeSkipsResultOnOutStage() { + + AggregationInvocation invocation = executeAggregation("outSkipResult"); + + assertThat(skipResultsOf(invocation)).isTrue(); + } + + @Test // GH-4088 + void aggregateWithOutStageDoesNotSkipResults() { + + AggregationInvocation invocation = executeAggregation("outDoNotSkipResult"); + + assertThat(skipResultsOf(invocation)).isFalse(); + } + + @Test // GH-2971 + void aggregatePicksUpReadPreferenceFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withReadPreference"); + assertThat(readPreferenceOf(invocation)).isEqualTo(ReadPreference.secondaryPreferred()); + } + + private AggregationInvocation executeAggregation(String name, Object... args) { + + Class[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new); + StringBasedAggregation aggregation = createAggregationForMethod(name, argTypes); + + ArgumentCaptor aggregationCaptor = ArgumentCaptor.forClass(TypedAggregation.class); + ArgumentCaptor targetTypeCaptor = ArgumentCaptor.forClass(Class.class); + + Object result = aggregation.execute(args); + + verify(operations).aggregate(aggregationCaptor.capture(), targetTypeCaptor.capture()); + + return new AggregationInvocation(aggregationCaptor.getValue(), targetTypeCaptor.getValue(), result); + } + + private StringBasedAggregation createAggregationForMethod(String name, Class... parameters) { + + Method method = ClassUtils.getMethod(SampleRepository.class, name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + return new StringBasedAggregation(queryMethod, operations, PARSER, QueryMethodEvaluationContextProvider.DEFAULT); + } + + private List pipelineOf(AggregationInvocation invocation) { + + AggregationOperationContext context = new TypeBasedAggregationOperationContext( + invocation.aggregation.getInputType(), converter.getMappingContext(), new QueryMapper(converter)); + + return invocation.aggregation.toPipeline(context); + } + + private Class inputTypeOf(AggregationInvocation invocation) { + return invocation.aggregation.getInputType(); + } + + @Nullable + private Collation collationOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getCollation().orElse(null) + : null; + } + + @Nullable + private Object hintOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null) + : null; + } + + private Boolean skipResultsOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults() + : false; + } + + @Nullable + private ReadPreference readPreferenceOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getReadPreference() + : null; + } + + private Class targetTypeOf(AggregationInvocation invocation) { + return invocation.getTargetType(); + } + + private interface SampleRepository extends Repository { + + @Meta(cursorBatchSize = 42, comment = "expensive-aggregation", allowDiskUse = true, maxExecutionTimeMs = 100) + @Aggregation({ RAW_GROUP_BY_LASTNAME_STRING, RAW_SORT_STRING }) + PersonAggregate plainStringAggregation(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + PersonAggregate plainStringAggregation(Sort sort); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + PersonAggregate returnSingleEntity(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + List returnCollection(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + AggregationResults returnRawResultType(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + AggregationResults returnRawResults(); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER) + PersonAggregate parameterReplacementAggregation(String attribute); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER) + PersonAggregate spelParameterReplacementAggregation(String arg0); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER }) + PersonAggregate multiOperationPipeline(String arg0); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + PersonAggregate aggregateWithCollation(); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + PersonAggregate aggregateWithCollation(Collation collation); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Slice aggregationWithSliceReturnType(Pageable page); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Stream aggregationWithStreamReturnType(Pageable page); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String simpleReturnType(); + + @Hint("idx") + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String withHint(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + List outDoNotSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + void outSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }, readPreference = "secondaryPreferred") + void withReadPreference(); + } + + private interface UnsupportedRepository extends Repository { + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Page pageIsUnsupported(Pageable page); + } + + static class PersonAggregate { + + } + + private static final class AggregationInvocation { + + private final TypedAggregation aggregation; + private final Class targetType; + private final Object result; + + public AggregationInvocation(TypedAggregation aggregation, Class targetType, Object result) { + this.aggregation = aggregation; + this.targetType = targetType; + this.result = result; + } + + public TypedAggregation getAggregation() { + return this.aggregation; + } + + public Class getTargetType() { + return this.targetType; + } + + public Object getResult() { + return this.result; + } + + public String toString() { + return "StringBasedAggregationUnitTests.AggregationInvocation(aggregation=" + this.getAggregation() + + ", targetType=" + this.getTargetType() + ", result=" + this.getResult() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java index 41e0c6ef8c..51f210f024 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,40 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; - -import javax.xml.bind.DatatypeConverter; - -import org.bson.BSON; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import java.util.UUID; + +import org.bson.BsonBinarySubType; +import org.bson.Document; +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; -import org.springframework.data.mongodb.core.DBObjectTestUtils; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.core.env.MapPropertySource; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.DbCallback; +import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper; @@ -47,246 +63,225 @@ import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; -import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.EvaluationException; import org.springframework.expression.spel.standard.SpelExpressionParser; -import com.mongodb.BasicDBObject; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.DBObject; -import com.mongodb.DBRef; +import com.mongodb.MongoClientSettings; +import com.mongodb.reactivestreams.client.MongoClients; /** * Unit tests for {@link StringBasedMongoQuery}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class StringBasedMongoQueryUnitTests { - SpelExpressionParser PARSER = new SpelExpressionParser(); + ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + StandardEnvironment environment = new StandardEnvironment(); @Mock MongoOperations operations; + @Mock ExecutableFind findOperation; @Mock DbRefResolver factory; MongoConverter converter; - @Before - public void setUp() { + Map properties = new HashMap<>(); + MapPropertySource propertySource = new MapPropertySource("mock", properties); - when(operations.getConverter()).thenReturn(converter); + @BeforeEach + public void setUp() { this.converter = new MappingMongoConverter(factory, new MongoMappingContext()); + environment.getPropertySources().addFirst(propertySource); + + doReturn(findOperation).when(operations).query(any()); + doReturn(MongoClientSettings.getDefaultCodecRegistry()).when(operations).execute(any()); } @Test - public void bindsSimplePropertyCorrectly() throws Exception { + public void bindsSimplePropertyCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews"); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test - public void bindsComplexPropertyCorrectly() throws Exception { + public void bindsComplexPropertyCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByAddress", Address.class); Address address = new Address("Foo", "0123", "Bar"); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, address); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, address); - DBObject dbObject = new BasicDBObject(); - converter.write(address, dbObject); - dbObject.removeField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + Document document = new Document(); + converter.write(address, document); + document.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); - BasicDBObject queryObject = new BasicDBObject("address", dbObject); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + Document queryObject = new Document("address", document); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); } @Test - public void bindsMultipleParametersCorrectly() throws Exception { + public void bindsMultipleParametersCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAndAddress", String.class, Address.class); Address address = new Address("Foo", "0123", "Bar"); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews", address); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews", address); - DBObject addressDbObject = new BasicDBObject(); - converter.write(address, addressDbObject); - addressDbObject.removeField(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + Document addressDocument = new Document(); + converter.write(address, addressDocument); + addressDocument.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - DBObject reference = new BasicDBObject("address", addressDbObject); - reference.put("lastname", "Matthews"); + Document reference = new Document("lastname", "Matthews"); + reference.append("address", addressDocument); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); - assertThat(query.getQueryObject(), is(reference)); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.toJson()); } @Test - public void bindsNullParametersCorrectly() throws Exception { + public void bindsNullParametersCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByAddress", Address.class); ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { null }); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject().containsField("address"), is(true)); - assertThat(query.getQueryObject().get("address"), is(nullValue())); + assertThat(query.getQueryObject().containsKey("address")).isTrue(); + assertThat(query.getQueryObject().get("address")).isNull(); } - /** - * @see DATAMONGO-821 - */ - @Test - public void bindsDbrefCorrectly() throws Exception { + @Test // DATAMONGO-821 + public void bindsDbrefCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByHavingSizeFansNotZero"); - ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] {}); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new BasicQuery("{ fans : { $not : { $size : 0 } } }").getQueryObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ fans : { $not : { $size : 0 } } }").getQueryObject()); } - /** - * @see DATAMONGO-566 - */ - @Test - public void constructsDeleteQueryCorrectly() throws Exception { + @Test // DATAMONGO-566 + public void constructsDeleteQueryCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class); - assertThat(mongoQuery.isDeleteQuery(), is(true)); + assertThat(mongoQuery.isDeleteQuery()).isTrue(); } - /** - * @see DATAMONGO-566 - */ - @Test(expected = IllegalArgumentException.class) - public void preventsDeleteAndCountFlagAtTheSameTime() throws Exception { - createQueryForMethod("invalidMethod", String.class); + @Test // DATAMONGO-566 + public void preventsDeleteAndCountFlagAtTheSameTime() { + assertThatIllegalArgumentException().isThrownBy(() -> createQueryForMethod("invalidMethod", String.class)); } - /** - * @see DATAMONGO-420 - */ - @Test - public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception { + @Test // DATAMONGO-420 + public void shouldSupportFindByParameterizedCriteriaAndFields() { - ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { - new BasicDBObject("firstname", "first").append("lastname", "last"), Collections.singletonMap("lastname", 1) }); - StringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", DBObject.class, - Map.class); + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(converter, + StubParameterAccessor.getAccessor(converter, // + new Document("firstname", "first").append("lastname", "last"), // + Collections.singletonMap("lastname", 1))); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", Document.class, + Map.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject())); - assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()); + assertThat(query.getFieldsObject()).isEqualTo(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()); } - /** - * @see DATAMONGO-420 - */ - @Test - public void shouldSupportRespectExistingQuotingInFindByTitleBeginsWithExplicitQuoting() throws Exception { + @Test // DATAMONGO-420 + public void shouldSupportRespectExistingQuotingInFindByTitleBeginsWithExplicitQuoting() { - ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { "fun" }); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "fun"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByTitleBeginsWithExplicitQuoting", String.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new BasicQuery("{title: {$regex: '^fun', $options: 'i'}}").getQueryObject())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(new BasicQuery("{title: {$regex: '^fun', $options: 'i'}}").getQueryObject().toJson()); } - /** - * @see DATAMONGO-995, DATAMONGO-420 - */ - @Test - public void shouldParseQueryWithParametersInExpression() throws Exception { + @Test // DATAMONGO-995, DATAMONGO-420 + public void shouldParseQueryWithParametersInExpression() { - ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { 1, 2, 3, 4 }); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 1, 2, 3, 4); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class, int.class, int.class, int.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") - .getQueryObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") + .getQueryObject()); } - /** - * @see DATAMONGO-995, DATAMONGO-420 - */ - @Test - public void bindsSimplePropertyAlreadyQuotedCorrectly() throws Exception { + @Test // DATAMONGO-995, DATAMONGO-420 + public void bindsSimplePropertyAlreadyQuotedCorrectly() { - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-995, DATAMONGO-420 - */ - @Test - public void bindsSimplePropertyAlreadyQuotedWithRegexCorrectly() throws Exception { + @Test // DATAMONGO-995, DATAMONGO-420 + public void bindsSimplePropertyAlreadyQuotedWithRegexCorrectly() { - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "^Mat.*"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "^Mat.*"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-995, DATAMONGO-420 - */ - @Test - public void bindsSimplePropertyWithRegexCorrectly() throws Exception { + @Test // DATAMONGO-995, DATAMONGO-420 + public void bindsSimplePropertyWithRegexCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "^Mat.*"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "^Mat.*"); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-1070 - */ - @Test - public void parsesDbRefDeclarationsCorrectly() throws Exception { + @Test // DATAMONGO-1070 + public void parsesDbRefDeclarationsCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithManuallyDefinedDbRef", String.class); ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "myid"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); - DBRef dbRef = DBObjectTestUtils.getTypedValue(query.getQueryObject(), "reference", DBRef.class); - assertThat(dbRef.getId(), is((Object) "myid")); - assertThat(dbRef.getCollectionName(), is("reference")); + Document dbRef = DocumentTestUtils.getTypedValue(query.getQueryObject(), "reference", Document.class); + assertThat(dbRef).isEqualTo(new Document("$ref", "reference").append("$id", "myid")); } - /** - * @see DATAMONGO-1072 - */ - @Test - public void shouldParseJsonKeyReplacementCorrectly() throws Exception { + @Test // DATAMONGO-1072 + public void shouldParseJsonKeyReplacementCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure", String.class, String.class); @@ -294,81 +289,478 @@ public void shouldParseJsonKeyReplacementCorrectly() throws Exception { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); - assertThat(query.getQueryObject(), is(new BasicDBObjectBuilder().add("key", "value").get())); + assertThat(query.getQueryObject()).isEqualTo(new Document().append("key", "value")); } - /** - * @see DATAMONGO-990 - */ - @Test - public void shouldSupportExpressionsInCustomQueries() throws Exception { + @Test // DATAMONGO-990 + public void shouldSupportExpressionsInCustomQueries() { - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpression", String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-1244 - */ - @Test - public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception { + @Test // GH-3050 + public void shouldSupportExpressionsAndPropertiesInCustomQueries() { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews"); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndProperty", String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : 'Matthews', 'firstname' : 'some-default'}"); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldSupportPropertiesInCustomQueries() { + + properties.put("foo", "bar"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'bar'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldFailWhenPropertiesWithNoDefaultValueInCustomQueries() { + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + assertThatThrownBy(() -> mongoQuery.createQuery(accessor)) + .isInstanceOf(EvaluationException.class) + .hasMessageContaining("Could not resolve placeholder 'foo' in value \"${foo}\""); + } + + @Test // DATAMONGO-1244 + public void shouldSupportExpressionsInCustomQueriesWithNestedObject() { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, true, "param1"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", boolean.class, String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-1244 - */ - @Test - public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() throws Exception { + @Test // DATAMONGO-1244 + public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() { - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects", boolean.class, String.class, String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( "{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } - /** - * @see DATAMONGO-1290 - */ - @Test - public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception { + @Test // DATAMONGO-1290 + public void shouldSupportNonQuotedBinaryDataReplacement() { - byte[] binaryData = "Matthews".getBytes("UTF-8"); - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData); + byte[] binaryData = "Matthews".getBytes(StandardCharsets.UTF_8); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, (Object) binaryData); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '" - + DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : " + BSON.B_GENERAL + "}}"); + + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + BsonBinarySubType.BINARY.getValue() + "'}}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2029 + public void shouldSupportNonQuotedBinaryCollectionDataReplacement() { + + byte[] binaryData = "Matthews".getBytes(StandardCharsets.UTF_8); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Collections.singletonList(binaryData)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinaryIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { $in: [{'$binary' : '" + + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + BsonBinarySubType.BINARY.getValue() + "'}] }}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1911 + public void shouldSupportNonQuotedUUIDReplacement() { + + UUID uuid = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, (Object) uuid); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUID", UUID.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $binary:\"5PHq4zvkTYa5WbZAgvtjNg==\", $type: \"03\"}}"); + + // CodecRegistry registry = + // MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build().getCodecRegistry(); + + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build()) + .getDatabase("database").getCodecRegistry(); + + // OverridableUuidRepresentationCodecRegistry + + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2029 + public void shouldSupportNonQuotedUUIDCollectionReplacement() { + + UUID uuid1 = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + UUID uuid2 = UUID.fromString("864de43b-cafe-f1e4-3663-fb8240b659b9"); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Arrays.asList(uuid1, uuid2)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUIDIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $in: [{ $binary : \"5PHq4zvkTYa5WbZAgvtjNg==\", $type : \"03\" }, { $binary : \"5PH+yjvkTYa5WbZAgvtjNg==\", $type : \"03\" }]}}"); + + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build()) + .getDatabase("database").getCodecRegistry(); + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2427 + public void shouldSupportNonQuotedUUIDCollectionReplacementWhenUsingNonLegacyUUIDCodec() { + + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.STANDARD).build()) + .getDatabase("database").getCodecRegistry(); + when(operations.execute(any(DbCallback.class))).thenReturn(registry); + + UUID uuid1 = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + UUID uuid2 = UUID.fromString("864de43b-cafe-f1e4-3663-fb8240b659b9"); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Arrays.asList(uuid1, uuid2)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUIDIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $in: [{ $binary : \"hk3kO+Pq8eQ2Y/uCQLZZuQ==\", $type : \"04\" }, { $binary : \"hk3kO8r+8eQ2Y/uCQLZZuQ==\", $type : \"04\" }]}}"); + + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1911 + public void shouldSupportQuotedUUIDReplacement() { + + UUID uuid = UUID.randomUUID(); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, (Object) uuid); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsStringUUID", UUID.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : '" + uuid.toString() + "'}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1454 + public void shouldSupportExistsProjection() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("existsByLastname", String.class); + + assertThat(mongoQuery.isExistsQuery()).isTrue(); + } + + @Test // DATAMONGO-1565 + public void bindsPropertyReferenceMultipleTimesCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByAgeQuotedAndUnquoted", Integer.TYPE); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 3); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + List or = new ArrayList<>(); + or.add(new Document("age", 3)); + or.add(new Document("displayAge", "3")); + Document queryObject = new Document("$or", or); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // DATAMONGO-1565 + public void shouldIgnorePlaceholderPatternInReplacementValue() { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "argWith?1andText", + "nothing-special"); + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByStringWithWildcardChar", String.class, String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isEqualTo(Document.parse("{ \"arg0\" : \"argWith?1andText\" , \"arg1\" : \"nothing-special\"}")); + } + + @Test // DATAMONGO-1565 + public void shouldQuoteStringReplacementCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews', password: 'foo"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isNotEqualTo(new Document().append("lastname", "Matthews").append("password", "foo")); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "Matthews', password: 'foo")); + } + + @Test // DATAMONGO-1565 + public void shouldQuoteStringReplacementContainingQuotesCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews\", password: \"foo"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isNotEqualTo(new Document().append("lastname", "Matthews").append("password", "foo")); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "Matthews\", password: \"foo")); + } + + @Test // DATAMONGO-1565 + public void shouldQuoteStringReplacementWithQuotationsCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + "\"Dave Matthews\", password: 'foo"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "\"Dave Matthews\", password: 'foo")); + } + + @Test // DATAMONGO-1565, DATAMONGO-1575 + public void shouldQuoteComplexQueryStringCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "{ $ne : \"calamity\" }"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "{ $ne : \"calamity\" }")); + } + + @Test // DATAMONGO-1565, DATAMONGO-1575 + public void shouldQuotationInQuotedComplexQueryString() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + "{ $ne : \"\\\"calamity\\\"\" }"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "{ $ne : \"\\\"calamity\\\"\" }")); + } + + @Test // DATAMONGO-1575, DATAMONGO-1770 + public void shouldTakeBsonParameterAsIs() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByWithBsonArgument", Document.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + new Document("$regex", "^calamity$")); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", new Document("$regex", "^calamity$"))); + } + + @Test // DATAMONGO-1575, DATAMONGO-1770 + public void shouldReplaceParametersInInQuotedExpressionOfNestedQueryOperator() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameRegex", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", new Document("$regex", "^(calamity)"))); + } + + @Test // DATAMONGO-1603 + public void shouldAllowReuseOfPlaceholderWithinQuery() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByReusingPlaceholdersMultipleTimes", String.class, + String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity")); + } + + @Test // DATAMONGO-1603 + public void shouldAllowReuseOfQuotedPlaceholderWithinQuery() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByReusingPlaceholdersMultipleTimesWhenQuoted", + String.class, String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity")); + } + + @Test // DATAMONGO-1603 + public void shouldAllowReuseOfQuotedPlaceholderWithinQueryAndIncludeSuffixCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod( + "findByReusingPlaceholdersMultipleTimesWhenQuotedAndSomeStuffAppended", String.class, String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamitys")); + } + + @Test // DATAMONGO-1603 + public void shouldAllowQuotedParameterWithSuffixAppended() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByWhenQuotedAndSomeStuffAppended", String.class, + String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalias")); + } + + @Test // DATAMONGO-1603 + public void shouldCaptureReplacementWithComplexSuffixCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByMultiRegex", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject()).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"firstname\" : { \"$regex\" : \".*calamity.*\" , \"$options\" : \"i\"}} , { \"lastname\" : { \"$regex\" : \".*calamityxyz.*\" , \"$options\" : \"i\"}}]}")); + } + + @Test // DATAMONGO-1603 + public void shouldAllowPlaceholderReuseInQuotedValue() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameRegex", String.class, String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject()) + .isEqualTo(Document.parse("{ 'lastname' : { '$regex' : '^(calamity|John regalia|regalia)'} }")); + } + + @Test // DATAMONGO-1605 + public void findUsingSpelShouldRetainParameterType() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByUsingSpel", Object.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 100.01D); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", 100.01D)); + } + + @Test // DATAMONGO-1605 + public void findUsingSpelShouldRetainNullValues() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByUsingSpel", Object.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { null }); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", null)); } - private StringBasedMongoQuery createQueryForMethod(String name, Class... parameters) throws Exception { + @Test // DATAMONGO-2119 + public void spelShouldIgnoreJsonParseErrorsForRegex() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByPersonLastnameRegex", Person.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + new Person("Molly", "Chandler")); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject().toJson()) + .isEqualTo(new BasicQuery("{lastname: {$regex: 'Chandler'}}").getQueryObject().toJson()); + } + + @Test // DATAMONGO-2149 + public void shouldParseFieldsProjectionWithSliceCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findWithSliceInProjection", String.class, int.class, + int.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Bruce Banner", 0, 5); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getFieldsObject()).isEqualTo(Document.parse("{ \"fans\" : { \"$slice\" : [0, 5] } }")); + } + + @Test // DATAMONGO-1593 + public void shouldRenderObjectIdParameterCorrectly() { + + ObjectId id = new ObjectId(); + + StringBasedMongoQuery mongoQuery = createQueryForMethod("singeObjectIdArgInQueryString", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, id.toString()); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", id)); + } + + @Test // DATAMONGO-1593 + public void shouldRenderMultipleObjectIdParametersCorrectly() { + + ObjectId id = new ObjectId(); + ObjectId readUsersId = new ObjectId(); + + StringBasedMongoQuery mongoQuery = createQueryForMethod("multipleObjectIdArgsInQueryString", String.class, + String.class); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, id.toString(), + readUsersId.toString()); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - Method method = SampleRepository.class.getMethod(name, parameters); - ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); - MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), - factory, converter.getMappingContext()); - return new StringBasedMongoQuery(queryMethod, operations, PARSER, DefaultEvaluationContextProvider.INSTANCE); + assertThat(query.getQueryObject().get("arg0")).isEqualTo(id); + assertThat(query.getQueryObject().get("$or")).isInstanceOf(List.class); + assertThat(DocumentTestUtils.getAsDBList(query.getQueryObject(), "$or").get(0)) + .isEqualTo(new Document("arg1.value0", readUsersId)); + assertThat(DocumentTestUtils.getAsDBList(query.getQueryObject(), "$or").get(1)) + .isEqualTo(new Document("arg1.value1", readUsersId)); + } + + private StringBasedMongoQuery createQueryForMethod(String name, Class... parameters) { + + try { + + Method method = SampleRepository.class.getMethod(name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + QueryMethodValueEvaluationContextAccessor accessor = new QueryMethodValueEvaluationContextAccessor( + environment, Collections.emptySet()); + return new StringBasedMongoQuery(queryMethod, operations, new ValueExpressionDelegate(accessor, PARSER)); + + } catch (Exception e) { + throw new IllegalArgumentException(e.getMessage(), e); + } } private interface SampleRepository extends Repository { @@ -379,9 +771,27 @@ private interface SampleRepository extends Repository { @Query("{ 'lastname' : ?0 }") Person findByLastnameAsBinary(byte[] lastname); + @Query("{ 'lastname' : { $in: ?0} }") + Person findByLastnameAsBinaryIn(List lastname); + + @Query("{ 'lastname' : ?0 }") + Person findByLastnameAsUUID(UUID lastname); + + @Query("{ 'lastname' : { $in : ?0} }") + Person findByLastnameAsUUIDIn(List lastname); + + @Query("{ 'lastname' : '?0' }") + Person findByLastnameAsStringUUID(UUID lastname); + @Query("{ 'lastname' : '?0' }") Person findByLastnameQuoted(String lastname); + @Query("{ 'lastname' : { '$regex' : '^(?0)'} }") + Person findByLastnameRegex(String lastname); + + @Query("{'$or' : [{'firstname': {'$regex': '.*?0.*', '$options': 'i'}}, {'lastname' : {'$regex': '.*?0xyz.*', '$options': 'i'}} ]}") + Person findByMultiRegex(String arg0); + @Query("{ 'address' : ?0 }") Person findByAddress(Address address); @@ -398,13 +808,13 @@ private interface SampleRepository extends Repository { void invalidMethod(String lastname); @Query(value = "?0", fields = "?1") - DBObject findByParameterizedCriteriaAndFields(DBObject criteria, Map fields); + Document findByParameterizedCriteriaAndFields(Document criteria, Map fields); @Query("{'title': { $regex : '^?0', $options : 'i'}}") - List findByTitleBeginsWithExplicitQuoting(String title); + List findByTitleBeginsWithExplicitQuoting(String title); @Query("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}") - List findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4); + List findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4); @Query("{ 'reference' : { $ref : 'reference', $id : ?0 }}") Object methodWithManuallyDefinedDbRef(String id); @@ -415,10 +825,60 @@ private interface SampleRepository extends Repository { @Query("{'lastname': ?#{[0]} }") List findByQueryWithExpression(String param0); + @Query("{'lastname': ?#{[0]}, 'firstname': ?${absent-property:some-default} }") + List findByQueryWithExpressionAndProperty(String param0); + + @Query("{'lastname': ?${foo} }") + List findByQueryWithProperty(); + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }}") List findByQueryWithExpressionAndNestedObject(boolean param0, String param1); @Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}") List findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2); + + @Query(value = "{ $or : [{'age' : ?0 }, {'displayAge' : '?0'}] }") + boolean findByAgeQuotedAndUnquoted(int age); + + @Query(value = "{ 'lastname' : ?0 }", exists = true) + boolean existsByLastname(String lastname); + + @Query("{ 'arg0' : ?0, 'arg1' : ?1 }") + List findByStringWithWildcardChar(String arg0, String arg1); + + @Query("{ 'arg0' : ?0 }") + List findByWithBsonArgument(Document arg0); + + @Query("{ 'arg0' : ?0, 'arg1' : ?1, 'arg2' : ?0 }") + List findByReusingPlaceholdersMultipleTimes(String arg0, String arg1); + + @Query("{ 'arg0' : ?0, 'arg1' : ?1, 'arg2' : '?0' }") + List findByReusingPlaceholdersMultipleTimesWhenQuoted(String arg0, String arg1); + + @Query("{ 'arg0' : '?0', 'arg1' : ?1, 'arg2' : '?0s' }") + List findByReusingPlaceholdersMultipleTimesWhenQuotedAndSomeStuffAppended(String arg0, String arg1); + + @Query("{ 'arg0' : '?0', 'arg1' : '?1s' }") + List findByWhenQuotedAndSomeStuffAppended(String arg0, String arg1); + + @Query("{ 'lastname' : { '$regex' : '^(?0|John ?1|?1)'} }") + // use spel or some regex string this is bad + Person findByLastnameRegex(String lastname, String alternative); + + @Query("{ arg0 : ?#{[0]} }") + List findByUsingSpel(Object arg0); + + @Query("{ 'lastname' : { '$regex' : ?#{[0].lastname} } }") + Person findByPersonLastnameRegex(Person key); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'fans': { '$slice': [ ?1, ?2 ] } }") + Person findWithSliceInProjection(String id, int skip, int limit); + + @Query("{ 'arg0' : { \"$oid\" : ?0} }") + List singeObjectIdArgInQueryString(String arg0); + + @Query("{ 'arg0' : { \"$oid\" : ?0} , '$or' : [ { 'arg1.value0' : { \"$oid\" : ?1 } }, { 'arg1.value1' : { \"$oid\" : ?1 } } ] }") + List multipleObjectIdArgsInQueryString(String arg0, String arg1); } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java index c13b27eb09..1927378e80 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2015 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,21 @@ import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.lang.Nullable; /** * Simple {@link ParameterAccessor} that returns the given parameters unfiltered. - * + * * @author Oliver Gierke * @author Christoh Strobl * @author Thomas Darimont @@ -37,12 +42,13 @@ class StubParameterAccessor implements MongoParameterAccessor { private final Object[] values; - private Range range = new Range(null, null); + private Range range = Range.unbounded(); + private @Nullable Collation colllation; /** * Creates a new {@link ConvertingParameterAccessor} backed by a {@link StubParameterAccessor} simply returning the * given parameters converted but unfiltered. - * + * * @param converter * @param parameters * @return @@ -60,77 +66,57 @@ public StubParameterAccessor(Object... values) { if (value instanceof Range) { this.range = (Range) value; } else if (value instanceof Distance) { - this.range = new Range(null, (Distance) value); + this.range = Range.from(Bound. unbounded()).to(Bound.inclusive((Distance) value)); + } else if (value instanceof Collation) { + this.colllation = Collation.class.cast(value); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getPageable() - */ + @Override + public ScrollPosition getScrollPosition() { + return null; + } + public Pageable getPageable() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int) - */ public Object getBindableValue(int index) { return values[index]; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#hasBindableNullValue() - */ public boolean hasBindableNullValue() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getSort() - */ public Sort getSort() { - return null; + return Sort.unsorted(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getDistanceRange() - */ @Override public Range getDistanceRange() { return range; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#iterator() - */ public Iterator iterator() { return Arrays.asList(values).iterator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ @Override public TextCriteria getFullText() { return null; } + @Override + public Collation getCollation() { + return this.colllation; + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() */ @@ -139,12 +125,13 @@ public Object[] getValues() { return this.values; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection() - */ @Override - public Class getDynamicProjection() { + public Class findDynamicProjection() { + return null; + } + + @Override + public UpdateDefinition getUpdate() { return null; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java new file mode 100644 index 0000000000..85b487af65 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Optional; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadataPostProcessor.DefaultCrudMethodMetadata; +import org.springframework.data.repository.CrudRepository; +import org.springframework.util.ReflectionUtils; + +/** + * Unit tests for {@link DefaultCrudMethodMetadata}. + * + * @author Christoph Strobl + */ +class DefaultCrudMethodMetadataUnitTests { + + @Test // GH-4542 + void detectsReadPreferenceOnRepositoryInterface() { + + DefaultCrudMethodMetadata metadata = new DefaultCrudMethodMetadata(ReadPreferenceAnnotated.class, + ReflectionUtils.findMethod(ReadPreferenceAnnotated.class, "findAll")); + + assertThat(metadata.getReadPreference()).hasValue(com.mongodb.ReadPreference.primary()); + } + + @Test // GH-4542 + void favorsReadPreferenceOfAnnotatedMethod() { + + DefaultCrudMethodMetadata metadata = new DefaultCrudMethodMetadata(ReadPreferenceAnnotated.class, + ReflectionUtils.findMethod(ReadPreferenceAnnotated.class, "findById", Object.class)); + + assertThat(metadata.getReadPreference()).hasValue(com.mongodb.ReadPreference.secondary()); + } + + @ReadPreference("primary") + interface ReadPreferenceAnnotated extends CrudRepository { + + @Override + @ReadPreference("secondary") + Optional findById(String s); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java new file mode 100644 index 0000000000..10c513dda4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java @@ -0,0 +1,162 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.index.IndexOperationsProvider; +import org.springframework.data.mongodb.repository.query.MongoEntityMetadata; +import org.springframework.data.mongodb.repository.query.MongoQueryMethod; +import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.util.Streamable; + +/** + * Unit tests for {@link IndexEnsuringQueryCreationListener}. + * + * @author Oliver Gierke + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class IndexEnsuringQueryCreationListenerUnitTests { + + private IndexEnsuringQueryCreationListener listener; + + @Mock IndexOperationsProvider provider; + @Mock PartTree partTree; + @Mock PartTreeMongoQuery partTreeQuery; + @Mock MongoQueryMethod queryMethod; + @Mock IndexOperations indexOperations; + @Mock MongoEntityMetadata entityInformation; + + @BeforeEach + void setUp() { + + this.listener = new IndexEnsuringQueryCreationListener(provider); + + partTreeQuery = mock(PartTreeMongoQuery.class, Answers.RETURNS_MOCKS); + when(partTreeQuery.getTree()).thenReturn(partTree); + when(provider.indexOps(anyString(), any())).thenReturn(indexOperations); + when(queryMethod.getEntityInformation()).thenReturn(entityInformation); + when(entityInformation.getCollectionName()).thenReturn("persons"); + } + + @Test // DATAMONGO-1753 + void skipsQueryCreationForMethodWithoutPredicate() { + + when(partTree.hasPredicate()).thenReturn(false); + + listener.onCreation(partTreeQuery); + + verify(provider, times(0)).indexOps(any()); + } + + @Test // DATAMONGO-1854 + void usesCollationWhenPresentAndFixedValue() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("en_US"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new Document("collation", new Document("locale", "en_US"))); + } + + @Test // DATAMONGO-1854 + void usesCollationWhenPresentAndFixedDocumentValue() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("{ 'locale' : 'en_US' }"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new Document("collation", new Document("locale", "en_US"))); + } + + @Test // DATAMONGO-1854 + void skipsCollationWhenPresentButDynamic() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("{ 'locale' : '?0' }"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEmpty(); + } + + @Test // DATAMONGO-1854 + void skipsCollationWhenNotPresent() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(false); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEmpty(); + } + + interface SampleRepository { + + Object findAllBy(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java index 0d0365e557..dcfd57b018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2013 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,16 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -34,10 +34,10 @@ /** * Unit tests for {@link MongoRepositoryFactoryBean}. - * + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MongoRepositoryFactoryBeanUnitTests { @Mock MongoOperations operations; @@ -48,20 +48,20 @@ public class MongoRepositoryFactoryBeanUnitTests { @SuppressWarnings("rawtypes") public void addsIndexEnsuringQueryCreationListenerIfConfigured() { - MongoRepositoryFactoryBean factory = new MongoRepositoryFactoryBean(); + MongoRepositoryFactoryBean factory = new MongoRepositoryFactoryBean(ContactRepository.class); factory.setCreateIndexesForQueryMethods(true); List listeners = getListenersFromFactory(factory); - assertThat(listeners.isEmpty(), is(false)); - assertThat(listeners, hasItem(instanceOf(IndexEnsuringQueryCreationListener.class))); + assertThat(listeners.isEmpty()).isFalse(); + assertThat(listeners.stream().filter(IndexEnsuringQueryCreationListener.class::isInstance)).isNotEmpty(); } @Test @SuppressWarnings("rawtypes") public void doesNotAddIndexEnsuringQueryCreationListenerByDefault() { - List listeners = getListenersFromFactory(new MongoRepositoryFactoryBean()); - assertThat(listeners.size(), is(1)); + List listeners = getListenersFromFactory(new MongoRepositoryFactoryBean(ContactRepository.class)); + assertThat(listeners.size()).isEqualTo(1); } @SuppressWarnings({ "unchecked", "rawtypes" }) @@ -72,7 +72,6 @@ private List getListenersFromFactory(MongoRepositoryFactoryBean factoryB factoryBean.setLazyInit(true); factoryBean.setMongoOperations(operations); - factoryBean.setRepositoryInterface(ContactRepository.class); factoryBean.afterPropertiesSet(); RepositoryFactorySupport factory = factoryBean.createRepositoryFactory(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java index be030e07ad..c40f24dacb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,82 +15,103 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.Serializable; +import java.util.Optional; +import java.util.Set; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.ListCrudRepository; import org.springframework.data.repository.Repository; /** * Unit test for {@link MongoRepositoryFactory}. - * + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoRepositoryFactoryUnitTests { - @Mock - MongoTemplate template; - - @Mock - MongoConverter converter; + @Mock MongoOperations template; - @Mock - @SuppressWarnings("rawtypes") - MappingContext mappingContext; + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); - @Mock - @SuppressWarnings("rawtypes") - MongoPersistentEntity entity; - - @Before - @SuppressWarnings("unchecked") + @BeforeEach public void setUp() { when(template.getConverter()).thenReturn(converter); - when(converter.getMappingContext()).thenReturn(mappingContext); } @Test - @SuppressWarnings("unchecked") public void usesMappingMongoEntityInformationIfMappingContextSet() { - when(mappingContext.getPersistentEntity(Person.class)).thenReturn(entity); - when(entity.getType()).thenReturn(Person.class); - MongoRepositoryFactory factory = new MongoRepositoryFactory(template); MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); - assertTrue(entityInformation instanceof MappingMongoEntityInformation); + assertThat(entityInformation instanceof MappingMongoEntityInformation).isTrue(); } - /** - * @see DATAMONGO-385 - */ - @Test - @SuppressWarnings("unchecked") + @Test // DATAMONGO-385 public void createsRepositoryWithIdTypeLong() { - when(mappingContext.getPersistentEntity(Person.class)).thenReturn(entity); - when(entity.getType()).thenReturn(Person.class); + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + assertThat(repository).isNotNull(); + } + + @Test // GH-2971 + void considersCrudMethodMetadata() { MongoRepositoryFactory factory = new MongoRepositoryFactory(template); MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); - assertThat(repository, is(notNullValue())); + repository.findById(42L); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).findOne(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondary()); + } + + @Test // GH-2971 + void ignoresCrudMethodMetadataOnNonAnnotatedMethods() { + + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findAllById(Set.of(42L)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).find(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isNull(); } - interface MyPersonRepository extends Repository { + interface MyPersonRepository extends ListCrudRepository { + @ReadPreference("secondary") + Optional findById(Long id); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepositoryIntegrationTests.java deleted file mode 100644 index 419683ccc3..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QueryDslMongoRepositoryIntegrationTests.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.repository.Person; -import org.springframework.data.mongodb.repository.QPerson; -import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.querydsl.core.types.Predicate; - -/** - * Integration test for {@link QueryDslMongoRepository}. - * - * @author Thomas Darimont - */ -@ContextConfiguration( - locations = "/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) -public class QueryDslMongoRepositoryIntegrationTests { - - @Autowired MongoOperations operations; - QueryDslMongoRepository repository; - - Person dave, oliver, carter; - QPerson person; - - @Before - public void setup() { - - MongoRepositoryFactory factory = new MongoRepositoryFactory(operations); - MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); - repository = new QueryDslMongoRepository(entityInformation, operations); - - operations.dropCollection(Person.class); - - dave = new Person("Dave", "Matthews", 42); - oliver = new Person("Oliver August", "Matthews", 4); - carter = new Person("Carter", "Beauford", 49); - - person = new QPerson("person"); - - repository.save(Arrays.asList(oliver, dave, carter)); - } - - /** - * @see DATAMONGO-1146 - */ - @Test - public void shouldSupportExistsWithPredicate() throws Exception { - - assertThat(repository.exists(person.firstname.eq("Dave")), is(true)); - assertThat(repository.exists(person.firstname.eq("Unknown")), is(false)); - assertThat(repository.exists((Predicate) null), is(true)); - } - - /** - * @see DATAMONGO-1167 - */ - @Test - public void shouldSupportFindAllWithPredicateAndSort() { - - List users = repository.findAll(person.lastname.isNotNull(), new Sort(Direction.ASC, "firstname")); - - assertThat(users, hasSize(3)); - assertThat(users.get(0).getFirstname(), is(carter.getFirstname())); - assertThat(users.get(2).getFirstname(), is(oliver.getFirstname())); - assertThat(users, hasItems(carter, dave, oliver)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java new file mode 100644 index 0000000000..7d9024e2fb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java @@ -0,0 +1,371 @@ +/* + * Copyright 2015-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QAddress; +import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.QUser; +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoException; +import com.mongodb.client.MongoDatabase; + +/** + * Integration test for {@link QuerydslMongoPredicateExecutor}. + * + * @author Thomas Darimont + * @author Mark Paluch + * @author Christoph Strobl + */ +@ContextConfiguration( + locations = "/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml") +@RunWith(SpringRunner.class) +public class QuerydslMongoPredicateExecutorIntegrationTests { + + @Autowired MongoOperations operations; + @Autowired MongoDatabaseFactory dbFactory; + + QuerydslMongoPredicateExecutor repository; + + Person dave, oliver, carter; + QPerson person; + + @Before + public void setup() { + + MongoRepositoryFactory factory = new MongoRepositoryFactory(operations); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new QuerydslMongoPredicateExecutor<>(entityInformation, operations); + + operations.dropCollection(Person.class); + + dave = new Person("Dave", "Matthews", 42); + oliver = new Person("Oliver August", "Matthews", 4); + carter = new Person("Carter", "Beauford", 49); + + person = new QPerson("person"); + + operations.insertAll(Arrays.asList(oliver, dave, carter)); + } + + @Test // DATAMONGO-1146 + public void shouldSupportExistsWithPredicate() throws Exception { + + assertThat(repository.exists(person.firstname.eq("Dave"))).isTrue(); + assertThat(repository.exists(person.firstname.eq("Unknown"))).isFalse(); + } + + @Test // DATAMONGO-1167 + public void shouldSupportFindAllWithPredicateAndSort() { + + List users = repository.findAll(person.lastname.isNotNull(), Sort.by(Direction.ASC, "firstname")); + + assertThat(users).containsExactly(carter, dave, oliver); + } + + @Test // DATAMONGO-1690 + public void findOneWithPredicateReturnsResultCorrectly() { + assertThat(repository.findOne(person.firstname.eq(dave.getFirstname()))).contains(dave); + } + + @Test // DATAMONGO-1690 + public void findOneWithPredicateReturnsOptionalEmptyWhenNoDataFound() { + assertThat(repository.findOne(person.firstname.eq("batman"))).isNotPresent(); + } + + @Test // DATAMONGO-1690 + public void findOneWithPredicateThrowsExceptionForNonUniqueResults() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findOne(person.firstname.contains("e"))); + } + + @Test // DATAMONGO-1848 + public void findUsingAndShouldWork() { + + assertThat(repository.findAll( + person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())))) + .containsExactly(dave); + } + + @Test // GH-3751 + public void findPage() { + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + PageRequest.of(0, 10)) + .getContent()).containsExactly(dave); + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + Pageable.unpaged()) + .getContent()).containsExactly(dave); + } + + @Test // GH-4771 + public void findUnpagedPage() { + + assertThat(repository.findAll(person.lastname.isNotNull(), Pageable.unpaged(Sort.by("firstname")))) + .containsExactly(carter, dave, oliver); + } + + @Test // DATAMONGO-362, DATAMONGO-1848 + public void springDataMongodbQueryShouldAllowJoinOnDBref() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + User user3 = new User(); + user3.setUsername("user-3"); + + operations.save(user1); + operations.save(user2); + operations.save(user3); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + Person person3 = new Person("Bob", "The Builder"); + person3.setCoworker(user3); + + operations.save(person1); + operations.save(person2); + operations.save(person3); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("user-2")).fetch(); + + assertThat(result).containsExactly(person2); + } + + @Test // DATAMONGO-362, DATAMONGO-1848 + public void springDataMongodbQueryShouldReturnEmptyOnJoinWithNoResults() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + operations.save(user1); + operations.save(user2); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + operations.save(person1); + operations.save(person2); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("does-not-exist")).fetch(); + + assertThat(result).isEmpty(); + } + + @Test // DATAMONGO-595, DATAMONGO-1848 + public void springDataMongodbQueryShouldAllowElemMatchOnArrays() { + + Address adr1 = new Address("Hauptplatz", "4020", "Linz"); + Address adr2 = new Address("Stephansplatz", "1010", "Wien"); + Address adr3 = new Address("Tower of London", "EC3N 4AB", "London"); + + Person person1 = new Person("Max", "The Mighty"); + person1.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr1, adr2))); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr2, adr3))); + + operations.save(person1); + operations.save(person2); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .anyEmbedded(person.shippingAddresses, QAddress.address).on(QAddress.address.city.eq("London")).fetch(); + + assertThat(result).containsExactly(person2); + } + + @Test(expected = PermissionDeniedDataAccessException.class) + // DATAMONGO-1434, DATAMONGO-1848 + public void translatesExceptionsCorrectly() { + + MongoOperations ops = new MongoTemplate(dbFactory) { + + @Override + protected MongoDatabase doGetDatabase() { + throw new MongoException(18, "Authentication Failed"); + } + }; + + MongoRepositoryFactory factory = new MongoRepositoryFactory(ops); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new QuerydslMongoPredicateExecutor<>(entityInformation, ops); + + repository.findOne(person.firstname.contains("batman")); + } + + @Test // GH-3757 + public void findByShouldReturnFirstResult() { + + Person result = repository.findBy(person.firstname.eq(oliver.getFirstname()), + FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); + } + + @Test // GH-3757 + public void findByShouldReturnOneResult() { + + Person result = repository.findBy(person.firstname.eq(oliver.getFirstname()), + FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); + + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.FetchableFluentQuery::one)); + } + + @Test // GH-3757 + public void findByShouldReturnAll() { + + List result = repository.findBy(person.lastname.eq(oliver.getLastname()), + FluentQuery.FetchableFluentQuery::all); + + assertThat(result).hasSize(2); + } + + @Test // GH-3757 + public void findByShouldApplySortAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.sortBy(Sort.by("firstname")).all()); + assertThat(result).containsSequence(dave, oliver); + + result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.sortBy(Sort.by(Sort.Direction.DESC, "firstname")).all()); + assertThat(result).containsSequence(oliver, dave); + } + + @Test // GH-3757 + public void findByShouldApplyProjection() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Person result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.project("firstname").firstValue()); + + assertThat(result.getFirstname()).isNotNull(); + assertThat(result.getLastname()).isNull(); + } + + @Test // GH-3757 + public void findByShouldApplyPagination() { + + Page first = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))); + + assertThat(first.getTotalElements()).isEqualTo(2); + assertThat(first.getContent()).contains(dave); + + Page next = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.getTotalElements()).isEqualTo(2); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-4889 + public void findByShouldApplySlice() { + + Slice first = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))); + + assertThat(first.hasNext()).isTrue(); + assertThat(first.getContent()).contains(dave); + + Slice next = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.hasNext()).isFalse(); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-3757 + public void findByShouldCount() { + + long count = repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(2L); + + count = repository.findBy(person.lastname.eq("foo"), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(0L); + } + + @Test // GH-3757 + public void findByShouldReportExists() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + boolean exists = repository.findBy(person.lastname.eq(oliver.getLastname()), + FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isTrue(); + + probe = new Person(); + probe.setLastname("foo"); + + exists = repository.findBy(person.lastname.eq("foo"), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java index ccb848cb3b..89b82f4171 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,55 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; +import java.util.Objects; +import org.bson.types.ObjectId; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.springframework.beans.DirectFieldAccessor; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.User; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.StringUtils; /** * Unit tests for {@link QuerydslRepositorySupport}. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class QuerydslRepositorySupportTests { @Autowired MongoOperations operations; Person person; + QuerydslRepositorySupport repoSupport; @Before public void setUp() { + operations.remove(new Query(), Outer.class); operations.remove(new Query(), Person.class); + person = new Person("Dave", "Matthews"); operations.save(person); + + repoSupport = new QuerydslRepositorySupport(operations) {}; } @Test @@ -58,13 +72,10 @@ public void providesMongoQuery() { QPerson p = QPerson.person; QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {}; SpringDataMongodbQuery query = support.from(p).where(p.lastname.eq("Matthews")); - assertThat(query.fetchOne(), is(person)); + assertThat(query.fetchOne()).isEqualTo(person); } - /** - * @see DATAMONGO-1063 - */ - @Test + @Test // DATAMONGO-1063 public void shouldAllowAny() { person.setSkills(Arrays.asList("vocalist", "songwriter", "guitarist")); @@ -72,10 +83,320 @@ public void shouldAllowAny() { operations.save(person); QPerson p = QPerson.person; - QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {}; - SpringDataMongodbQuery query = support.from(p).where(p.skills.any().in("guitarist")); + SpringDataMongodbQuery query = repoSupport.from(p).where(p.skills.any().in("guitarist")); + + assertThat(query.fetchOne()).isEqualTo(person); + } + + @Test // DATAMONGO-1394 + public void shouldAllowDbRefAgainstIdProperty() { + + User bart = new User(); + bart.setUsername("bart@simpson.com"); + operations.save(bart); + + person.setCoworker(bart); + operations.save(person); + + QPerson p = QPerson.person; + + SpringDataMongodbQuery queryUsingIdField = repoSupport.from(p).where(p.coworker.id.eq(bart.getId())); + SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); + + assertThat(queryUsingIdField.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdField.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1998 + public void shouldLeaveStringIdThatIsNoValidObjectIdAsItIs() { + + Outer outer = new Outer(); + outer.id = "outer-1"; + outer.inner = new Inner(); + outer.inner.id = "inner-1"; + outer.inner.value = "go climb a rock"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.eq(outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1998 + public void shouldConvertStringIdThatIsAValidObjectIdIntoTheSuch() { + + Outer outer = new Outer(); + outer.id = new ObjectId().toHexString(); + outer.inner = new Inner(); + outer.inner.id = new ObjectId().toHexString(); + outer.inner.value = "eat sleep workout repeat"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.eq(outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1810, DATAMONGO-1848 + public void shouldFetchObjectsViaStringWhenUsingInOnDbRef() { + + User bart = new User(); + DirectFieldAccessor dfa = new DirectFieldAccessor(bart); + dfa.setPropertyValue("id", "bart"); + + bart.setUsername("bart@simpson.com"); + operations.save(bart); + + User lisa = new User(); + dfa = new DirectFieldAccessor(lisa); + dfa.setPropertyValue("id", "lisa"); + + lisa.setUsername("lisa@simposon.com"); + operations.save(lisa); + + person.setCoworker(bart); + operations.save(person); + + QPerson p = QPerson.person; + + SpringDataMongodbQuery queryUsingIdFieldWithinInClause = repoSupport.from(p) + .where(p.coworker.id.in(Arrays.asList(bart.getId(), lisa.getId()))); + + SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); + + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1810, DATAMONGO-1848 + public void shouldFetchObjectsViaStringStoredAsObjectIdWhenUsingInOnDbRef() { + + User bart = new User(); + bart.setUsername("bart@simpson.com"); + operations.save(bart); + + User lisa = new User(); + lisa.setUsername("lisa@simposon.com"); + operations.save(lisa); + + person.setCoworker(bart); + operations.save(person); + + QPerson p = QPerson.person; + + SpringDataMongodbQuery queryUsingIdFieldWithinInClause = repoSupport.from(p) + .where(p.coworker.id.in(Arrays.asList(bart.getId(), lisa.getId()))); + + SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); + + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1848, DATAMONGO-2010 + public void shouldConvertStringIdThatIsAValidObjectIdWhenUsedInInPredicateIntoTheSuch() { + + Outer outer = new Outer(); + outer.id = new ObjectId().toHexString(); + outer.inner = new Inner(); + outer.inner.id = new ObjectId().toHexString(); + outer.inner.value = "eat sleep workout repeat"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.in(outer.inner.id, outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1798 + public void shouldRetainIdPropertyTypeIfInvalidObjectId() { + + Outer outer = new Outer(); + outer.id = "foobar"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.id.eq(outer.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1798 + public void shouldUseStringForValidObjectIdHexStrings() { + + WithMongoId document = new WithMongoId(); + document.id = new ObjectId().toHexString(); + + operations.save(document); + + QQuerydslRepositorySupportTests_WithMongoId o = QQuerydslRepositorySupportTests_WithMongoId.withMongoId; + SpringDataMongodbQuery eqQuery = repoSupport.from(o).where(o.id.eq(document.id)); + + assertThat(eqQuery.fetchOne()).isEqualTo(document); + + SpringDataMongodbQuery inQuery = repoSupport.from(o).where(o.id.in(document.id)); + + assertThat(inQuery.fetchOne()).isEqualTo(document); + } + + @Test // DATAMONGO-2327 + public void toJsonShouldRenderQuery() { + + QPerson p = QPerson.person; + SpringDataMongodbQuery query = repoSupport.from(p).where(p.lastname.eq("Matthews")) + .orderBy(p.firstname.asc()).offset(1).limit(5); + + assertThat(StringUtils.trimAllWhitespace(query.toJson())).isEqualTo("{\"lastname\":\"Matthews\"}"); + } + + @Test // DATAMONGO-2327 + public void toStringShouldRenderQuery() { + + QPerson p = QPerson.person; + User user = new User(); + user.setId("id"); + SpringDataMongodbQuery query = repoSupport.from(p) + .where(p.lastname.eq("Matthews").and(p.coworker.eq(user))); + + assertThat(StringUtils.trimAllWhitespace(query.toString())) + .isEqualTo("find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}})"); + + query = query.orderBy(p.firstname.asc()); + assertThat(StringUtils.trimAllWhitespace(query.toString())).isEqualTo( + "find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}}).sort({\"firstname\":1})"); + + query = query.offset(1).limit(5); + assertThat(StringUtils.trimAllWhitespace(query.toString())).isEqualTo( + "find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}}).sort({\"firstname\":1}).skip(1).limit(5)"); + } + + @Document + public static class Outer { + + @Id String id; + Inner inner; + + public String getId() { + return this.id; + } + + public Inner getInner() { + return this.inner; + } + + public void setId(String id) { + this.id = id; + } + + public void setInner(Inner inner) { + this.inner = inner; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Outer outer = (Outer) o; + return Objects.equals(id, outer.id) && Objects.equals(inner, outer.inner); + } + + @Override + public int hashCode() { + return Objects.hash(id, inner); + } + + public String toString() { + return "QuerydslRepositorySupportTests.Outer(id=" + this.getId() + ", inner=" + this.getInner() + ")"; + } + } + + public static class Inner { + + @Id String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Inner inner = (Inner) o; + return Objects.equals(id, inner.id) && Objects.equals(value, inner.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "QuerydslRepositorySupportTests.Inner(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + @Document + public static class WithMongoId { + + @MongoId(FieldType.STRING) String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithMongoId that = (WithMongoId) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } - assertThat(query.fetchOne(), is(person)); + public String toString() { + return "QuerydslRepositorySupportTests.WithMongoId(id=" + this.getId() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java new file mode 100644 index 0000000000..57ee473e6e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Set; + +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.repository.Repository; + +/** + * Unit test for {@link ReactiveMongoRepositoryFactory}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveMongoRepositoryFactoryUnitTests { + + @Mock ReactiveMongoTemplate template; + + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + + @BeforeEach + public void setUp() { + when(template.getConverter()).thenReturn(converter); + } + + @Test // GH-2971 + void considersCrudMethodMetadata() { + + when(template.findOne(any(), any(), anyString())).thenReturn(Mono.empty()); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findById(42L); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).findOne(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondary()); + } + + @Test // GH-2971 + void ignoresCrudMethodMetadataOnNonAnnotatedMethods() { + + when(template.find(any(), any(), anyString())).thenReturn(Flux.empty()); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findAllById(Set.of(42L)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).find(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isNull(); + } + + interface MyPersonRepository extends ReactiveCrudRepository { + + @ReadPreference("secondary") + Mono findById(Long id); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java new file mode 100644 index 0000000000..807b7aec22 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java @@ -0,0 +1,467 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QAddress; +import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.QUser; +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Tests for {@link ReactiveQuerydslMongoPredicateExecutor}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Rocco Lagrotteria + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class ReactiveQuerydslMongoPredicateExecutorTests { + + @Autowired ReactiveMongoOperations operations; + @Autowired ReactiveMongoDatabaseFactory dbFactory; + + ReactiveQuerydslMongoPredicateExecutor repository; + + Person dave, oliver, carter; + QPerson person; + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return MongoTestUtils.reactiveClient(); + } + + @Override + protected String getDatabaseName() { + return "reactive"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + } + + @BeforeClass + public static void cleanDb() { + + try (MongoClient client = MongoTestUtils.reactiveClient()) { + MongoTestUtils.createOrReplaceCollectionNow("reactive", "person", client); + MongoTestUtils.createOrReplaceCollectionNow("reactive", "user", client); + } + } + + @Before + public void setup() { + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(operations); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new ReactiveQuerydslMongoPredicateExecutor<>(entityInformation, operations); + + dave = new Person("Dave", "Matthews", 42); + oliver = new Person("Oliver August", "Matthews", 4); + carter = new Person("Carter", "Beauford", 49); + + person = new QPerson("person"); + + Flux.merge(operations.insert(oliver), operations.insert(dave), operations.insert(carter)).then() // + .as(StepVerifier::create).verifyComplete(); + } + + @After + public void tearDown() { + operations.remove(new BasicQuery("{}"), "person").then().as(StepVerifier::create).verifyComplete(); + operations.remove(new BasicQuery("{}"), "uer").then().as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportExistsWithPredicate() { + + repository.exists(person.firstname.eq("Dave")) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + repository.exists(person.firstname.eq("Unknown")) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportCountWithPredicate() { + + repository.count(person.firstname.eq("Dave")) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + repository.count(person.firstname.eq("Unknown")) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportFindAllWithPredicateAndSort() { + + repository.findAll(person.lastname.isNotNull(), Sort.by(Direction.ASC, "firstname")) // + .as(StepVerifier::create) // + .expectNext(carter, dave, oliver) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateReturnsResultCorrectly() { + + repository.findOne(person.firstname.eq(dave.getFirstname())) // + .as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateReturnsEmptyWhenNoDataFound() { + + repository.findOne(person.firstname.eq("batman")) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateThrowsExceptionForNonUniqueResults() { + + repository.findOne(person.firstname.contains("e")) // + .as(StepVerifier::create) // + .expectError(IncorrectResultSizeDataAccessException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void findUsingAndShouldWork() { + + repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname()))) // + .as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void queryShouldTerminateWithUnsupportedOperationWithJoinOnDBref() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + User user3 = new User(); + user3.setUsername("user-3"); + + Flux.merge(operations.save(user1), operations.save(user2), operations.save(user3)) // + .then() // + .as(StepVerifier::create) // + .verifyComplete(); // + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + Person person3 = new Person("Bob", "The Builder"); + person3.setCoworker(user3); + + operations.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + operations.save(person2)// + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + operations.save(person3) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("user-2")).fetch(); + + result.as(StepVerifier::create) // + .expectError(UnsupportedOperationException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void queryShouldTerminateWithUnsupportedOperationOnJoinWithNoResults() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + operations.insertAll(Arrays.asList(user1, user2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + operations.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("does-not-exist")).fetch(); + + result.as(StepVerifier::create) // + .expectError(UnsupportedOperationException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void springDataMongodbQueryShouldAllowElemMatchOnArrays() { + + Address adr1 = new Address("Hauptplatz", "4020", "Linz"); + Address adr2 = new Address("Stephansplatz", "1010", "Wien"); + Address adr3 = new Address("Tower of London", "EC3N 4AB", "London"); + + Person person1 = new Person("Max", "The Mighty"); + person1.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr1, adr2))); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr2, adr3))); + + operations.insertAll(Arrays.asList(person1, person2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .anyEmbedded(person.shippingAddresses, QAddress.address).on(QAddress.address.city.eq("London")).fetch(); + + result.as(StepVerifier::create) // + .expectNext(person2) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182, DATAMONGO-2265 + public void translatesExceptionsCorrectly() { + + ReactiveMongoOperations ops = new ReactiveMongoTemplate(dbFactory) { + + @Override + protected Mono doGetDatabase() { + return Mono.error(new MongoException(18, "Authentication Failed")); + } + }; + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(ops); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new ReactiveQuerydslMongoPredicateExecutor<>(entityInformation, ops); + + repository.findOne(person.firstname.contains("batman")) // + .as(StepVerifier::create) // + .expectError(PermissionDeniedDataAccessException.class) // + .verify(); + } + + @Test // GH-3757 + public void findByShouldReturnFirstResult() { + + repository.findBy(person.firstname.eq(oliver.getFirstname()), FluentQuery.ReactiveFluentQuery::first) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldReturnOneResult() { + + repository.findBy(person.firstname.eq(oliver.getFirstname()), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .verifyError(IncorrectResultSizeDataAccessException.class); + } + + @Test // GH-3757 + public void findByShouldReturnAll() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::all) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplySortAll() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), it -> it.sortBy(Sort.by("firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(dave, oliver) // + .verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.sortBy(Sort.by(Direction.DESC, "firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(oliver, dave) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplyProjection() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), it -> it.project("firstname").first()) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getFirstname()).isNotNull(); + assertThat(it.getLastname()).isNull(); + }).verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplyPagination() { + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).containsOnly(dave); + }).verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).containsOnly(oliver); + }).verifyComplete(); + } + + @Test // GH-4889 + public void findByShouldApplySlice() { + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isTrue(); + assertThat(it.getContent()).containsOnly(dave); + }).verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isFalse(); + assertThat(it.getContent()).containsOnly(oliver); + }).verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldCount() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + + repository.findBy(person.lastname.eq("foo"), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldReportExists() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + repository.findBy(person.lastname.eq("foo"), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java index d7221de3f4..60c02ee775 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2016 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static java.util.Arrays.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.domain.ExampleMatcher.*; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -28,38 +28,52 @@ import java.util.Set; import java.util.UUID; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.domain.Example; -import org.springframework.data.domain.ExampleMatcher.StringMatcher; +import org.springframework.data.domain.ExampleMatcher; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.MongoTransactionManager; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.Address; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.data.repository.query.FluentQuery; import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.transaction.support.TransactionTemplate; /** - * @author A. B. M. Kowser + * @author A. B. M. Kowser * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Jens Schauder */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class SimpleMongoRepositoryTests { +@ExtendWith({ MongoTemplateExtension.class, MongoServerCondition.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class SimpleMongoRepositoryTests implements StateFunctions { - @Autowired private MongoTemplate template; + @Template(initialEntitySet = Person.class) // + private static MongoTestTemplate template; private Person oliver, dave, carter, boyd, stefan, leroi, alicia; private List all; @@ -67,10 +81,23 @@ public class SimpleMongoRepositoryTests { private MongoEntityInformation personEntityInformation = new CustomizedPersonInformation(); private SimpleMongoRepository repository; - @Before - public void setUp() { - repository = new SimpleMongoRepository(personEntityInformation, template); + @BeforeEach + void setUp() { + repository = new SimpleMongoRepository<>(personEntityInformation, template); + } + + @Override + public void clear() { + + if (repository == null) { + setUp(); + } + repository.deleteAll(); + } + + @Override + public void setupState() { oliver = new Person("Oliver August", "Matthews", 4); dave = new Person("Dave", "Matthews", 42); @@ -80,60 +107,52 @@ public void setUp() { leroi = new Person("Leroi", "Moore", 41); alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); - all = repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + all = repository.saveAll(asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); } @Test - public void findALlFromCustomCollectionName() { - List result = repository.findAll(); - assertThat(result, hasSize(all.size())); + void findAllFromCustomCollectionName() { + assertThat(repository.findAll()).hasSameSizeAs(all); } @Test - public void findOneFromCustomCollectionName() { - Person result = repository.findOne(dave.getId()); - assertThat(result, is(dave)); + void findOneFromCustomCollectionName() { + assertThat(repository.findById(dave.getId())).contains(dave); } @Test - public void deleteFromCustomCollectionName() { + @DirtiesState + void deleteFromCustomCollectionName() { + repository.delete(dave); - List result = repository.findAll(); - assertThat(result, hasSize(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(repository.findAll()).hasSize(all.size() - 1).doesNotContain(dave); } @Test - public void deleteByIdFromCustomCollectionName() { - repository.delete(dave.getId()); - List result = repository.findAll(); + @DirtiesState + void deleteByIdFromCustomCollectionName() { + + repository.deleteById(dave.getId()); - assertThat(result, hasSize(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(repository.findAll()).hasSize(all.size() - 1).doesNotContain(dave); } - /** - * @see DATAMONGO-1054 - */ - @Test - public void shouldInsertSingle() { + @Test // DATAMONGO-1054 + @DirtiesState + void shouldInsertSingle() { String randomId = UUID.randomUUID().toString(); Person person1 = new Person("First1" + randomId, "Last2" + randomId, 42); person1 = repository.insert(person1); - Person saved = repository.findOne(person1.getId()); - - assertThat(saved, is(equalTo(person1))); + assertThat(repository.findById(person1.getId())).contains(person1); } - /** - * @see DATAMONGO-1054 - */ - @Test - public void shouldInsertMultipleFromList() { + @Test // DATAMONGO-1054 + @DirtiesState + void shouldInsertMultipleFromList() { String randomId = UUID.randomUUID().toString(); Map idToPerson = new HashMap(); @@ -147,15 +166,13 @@ public void shouldInsertMultipleFromList() { List saved = repository.insert(persons); - assertThat(saved, hasSize(persons.size())); + assertThat(saved).hasSameSizeAs(persons); assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved); } - /** - * @see DATAMONGO-1054 - */ - @Test - public void shouldInsertMutlipleFromSet() { + @Test // DATAMONGO-1054 + @DirtiesState + void shouldInsertMutlipleFromSet() { String randomId = UUID.randomUUID().toString(); Map idToPerson = new HashMap(); @@ -169,47 +186,62 @@ public void shouldInsertMutlipleFromSet() { List saved = repository.insert(persons); - assertThat(saved, hasSize(persons.size())); + assertThat(saved).hasSameSizeAs(persons); assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findByExampleShouldLookUpEntriesCorrectly() { + @Test // DATAMONGO-1245, DATAMONGO-1464 + void findByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); trimDomainType(sample, "id", "createdAt", "email"); - Page result = repository.findAll(Example.of(sample), new PageRequest(0, 10)); + Page result = repository.findAll(Example.of(sample), PageRequest.of(0, 10)); - assertThat(result.getContent(), hasItems(dave, oliver)); - assertThat(result.getContent(), hasSize(2)); + assertThat(result.getContent()).hasSize(2).contains(dave, oliver); + assertThat(result.getTotalPages()).isEqualTo(1); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldLookUpEntriesCorrectly() { + @Test // GH-3751 + void findByExampleShouldReturnUnpagedResults() { Person sample = new Person(); sample.setLastname("Matthews"); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); + Page result = repository.findAll(Example.of(sample), Pageable.unpaged()); - assertThat(result, containsInAnyOrder(dave, oliver)); - assertThat(result, hasSize(2)); + assertThat(result.getContent()).hasSize(2).contains(dave, oliver); + assertThat(result.getTotalPages()).isEqualTo(1); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() { + @Test // DATAMONGO-1464 + void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { + + Person sample = new Person(); + sample.setLastname("Matthews"); + trimDomainType(sample, "id", "createdAt", "email"); + + Page result = repository.findAll(Example.of(sample), PageRequest.of(0, 1)); + + assertThat(result.getContent()).hasSize(1); + assertThat(result.getTotalPages()).isEqualTo(2); + } + + @Test // DATAMONGO-1245 + void findAllByExampleShouldLookUpEntriesCorrectly() { + + Person sample = new Person(); + sample.setLastname("Matthews"); + trimDomainType(sample, "id", "createdAt", "email"); + + assertThat(repository.findAll(Example.of(sample))).hasSize(2).contains(dave, oliver); + } + + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -221,17 +253,12 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() sample.setAddress(dave.getAddress()); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasItem(dave)); - assertThat(result, hasSize(1)); + assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(dave); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -243,17 +270,12 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedOb sample.setAddress(new Address(null, null, "Washington")); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasItems(dave, oliver)); - assertThat(result, hasSize(2)); + assertThat(repository.findAll(Example.of(sample))).hasSize(2).contains(dave, oliver); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -263,16 +285,13 @@ public void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInSt trimDomainType(sample, "id", "createdAt", "email"); Example example = Example.of(sample, matching().withIncludeNullValues()); - List result = repository.findAll(example); - assertThat(result, empty()); + assertThat(repository.findAll(example)).isEmpty(); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -282,34 +301,25 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInS trimDomainType(sample, "id", "createdAt", "email"); Example example = Example.of(sample, matching().withIncludeNullValues()); - List result = repository.findAll(example); - assertThat(result, hasItem(dave)); - assertThat(result, hasSize(1)); + assertThat(repository.findAll(example)).hasSize(1).contains(dave); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldRespectStringMatchMode() { + @Test // DATAMONGO-1245 + void findAllByExampleShouldRespectStringMatchMode() { Person sample = new Person(); sample.setLastname("Mat"); trimDomainType(sample, "id", "createdAt", "email"); Example example = Example.of(sample, matching().withStringMatcher(StringMatcher.STARTING)); - List result = repository.findAll(example); - assertThat(result, hasItems(dave, oliver)); - assertThat(result, hasSize(2)); + assertThat(repository.findAll(example)).hasSize(2).contains(dave, oliver); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldResolveDbRefCorrectly() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldResolveDbRefCorrectly() { User user = new User(); user.setId("c0nf1ux"); @@ -325,17 +335,12 @@ public void findAllByExampleShouldResolveDbRefCorrectly() { sample.setCreator(user); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasItem(megan)); - assertThat(result, hasSize(1)); + assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { Person megan = new Person("megan", "tarash"); megan.setLocation(new Point(41.85003D, -87.65005D)); @@ -346,17 +351,12 @@ public void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { sample.setLocation(megan.getLocation()); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasItem(megan)); - assertThat(result, hasSize(1)); + assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { Person megan = new Person("megan", "tarash"); megan.setLocation(new GeoJsonPoint(41.85003D, -87.65005D)); @@ -367,17 +367,12 @@ public void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { sample.setLocation(megan.getLocation()); trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasItem(megan)); - assertThat(result, hasSize(1)); + assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(megan); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findAllByExampleShouldProcessInheritanceCorrectly() { + @Test // DATAMONGO-1245 + @DirtiesState + void findAllByExampleShouldProcessInheritanceCorrectly() { PersonExtended reference = new PersonExtended(); reference.setLastname("Matthews"); @@ -389,64 +384,261 @@ public void findAllByExampleShouldProcessInheritanceCorrectly() { trimDomainType(sample, "id", "createdAt", "email"); - List result = repository.findAll(Example.of(sample)); - - assertThat(result, hasSize(1)); - assertThat(result, hasItem(reference)); + assertThat(repository.findAll(Example.of(sample))).hasSize(1).contains(reference); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void findOneByExampleShouldLookUpEntriesCorrectly() { + @Test // DATAMONGO-1245 + void findOneByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setFirstname("Dave"); sample.setLastname("Matthews"); trimDomainType(sample, "id", "createdAt", "email"); - Person result = repository.findOne(Example.of(sample)); - - assertThat(result, is(equalTo(dave))); + assertThat(repository.findOne(Example.of(sample))).isPresent().contains(dave); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void existsByExampleShouldLookUpEntriesCorrectly() { + @Test // DATAMONGO-1245 + void existsByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setFirstname("Dave"); sample.setLastname("Matthews"); trimDomainType(sample, "id", "createdAt", "email"); - boolean result = repository.exists(Example.of(sample)); - - assertThat(result, is(true)); + assertThat(repository.exists(Example.of(sample))).isTrue(); } - /** - * @see DATAMONGO-1245 - */ - @Test - public void countByExampleShouldLookUpEntriesCorrectly() { + @Test // DATAMONGO-1245 + void countByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); trimDomainType(sample, "id", "createdAt", "email"); - long result = repository.count(Example.of(sample)); + assertThat(repository.count(Example.of(sample))).isEqualTo(2L); + } + + @Test // DATAMONGO-1896 + @DirtiesState + void saveAllUsesEntityCollection() { + + Person first = new PersonExtended(); + first.setEmail("foo@bar.com"); + ReflectionTestUtils.setField(first, "id", null); + + Person second = new PersonExtended(); + second.setEmail("bar@foo.com"); + ReflectionTestUtils.setField(second, "id", null); + + repository.deleteAll(); + + repository.saveAll(asList(first, second)); + + assertThat(repository.findAll()).containsExactlyInAnyOrder(first, second); + } + + @Test // DATAMONGO-2130 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @DirtiesState + void countShouldBePossibleInTransaction() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + long countPreTx = repository.count(); + + long count = tt.execute(status -> { + + Person sample = new Person(); + sample.setLastname("Matthews"); + + repository.save(sample); + + return repository.count(); + }); + + assertThat(count).isEqualTo(countPreTx + 1); + } + + @Test // DATAMONGO-2130 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @DirtiesState + void existsShouldBePossibleInTransaction() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + boolean exists = tt.execute(status -> { + + Person sample = new Person(); + sample.setLastname("Matthews"); + + repository.save(sample); + + return repository.existsById(sample.getId()); + }); + + assertThat(exists).isTrue(); + } + + @Test // DATAMONGO-2652 + @DirtiesState + void deleteAllByIds() { + + repository.deleteAllById(asList(dave.getId(), carter.getId())); + + assertThat(repository.findAll()) // + .hasSize(all.size() - 2).doesNotContain(dave, carter); + } + + @Test // GH-3757 + void findByShouldReturnFirstResult() { + + Person probe = new Person(); + probe.setFirstname(oliver.getFirstname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::firstValue); + + assertThat(result).isEqualTo(oliver); + } + + @Test // GH-3757 + void findByShouldReturnOneResult() { + + Person probe = new Person(); + probe.setFirstname(oliver.getFirstname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); - assertThat(result, is(equalTo(2L))); + Person probeByLastname = new Person(); + probeByLastname.setLastname(oliver.getLastname()); + + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> repository.findBy(Example.of(probeByLastname, getMatcher()), FluentQuery.FetchableFluentQuery::one)); + } + + @Test // GH-3757 + void findByShouldReturnAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::all); + + assertThat(result).hasSize(2); + } + + @Test // GH-3757 + void findByShouldApplySortAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(Example.of(probe, getMatcher()), + it -> it.sortBy(Sort.by("firstname")).all()); + assertThat(result).containsSequence(dave, oliver); + + result = repository.findBy(Example.of(probe, getMatcher()), + it -> it.sortBy(Sort.by(Sort.Direction.DESC, "firstname")).all()); + assertThat(result).containsSequence(oliver, dave); + } + + @Test // GH-3757 + void findByShouldApplyProjection() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), it -> it.project("firstname").firstValue()); + + assertThat(result.getFirstname()).isNotNull(); + assertThat(result.getLastname()).isNull(); + } + + @Test // GH-3757 + void findByShouldApplyPagination() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Page first = repository.findBy(Example.of(probe, getMatcher()), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))); + assertThat(first.getTotalElements()).isEqualTo(2); + assertThat(first.getContent()).contains(dave); + + Page next = repository.findBy(Example.of(probe, getMatcher()), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.getTotalElements()).isEqualTo(2); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-4889 + void findByShouldApplySlice() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Slice first = repository.findBy(Example.of(probe, getMatcher()), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))); + assertThat(first.hasNext()).isTrue(); + assertThat(first.getContent()).contains(dave); + + Slice next = repository.findBy(Example.of(probe, getMatcher()), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.hasNext()).isFalse(); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-3757 + void findByShouldCount() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + long count = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(2L); + + probe = new Person(); + probe.setLastname("foo"); + + count = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(0L); + } + + @Test // GH-3757 + void findByShouldReportExists() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + boolean exists = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isTrue(); + + probe = new Person(); + probe.setLastname("foo"); + + exists = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isFalse(); + } + + private ExampleMatcher getMatcher() { + return matching().withIgnorePaths("age", "createdAt", "sex", "email", "id"); } private void assertThatAllReferencePersonsWereStoredCorrectly(Map references, List saved) { for (Person person : saved) { Person reference = references.get(person.getId()); - assertThat(person, is(equalTo(reference))); + assertThat(person).isEqualTo(reference); } } @@ -488,6 +680,11 @@ public String getCollectionName() { public String getIdAttribute() { return "id"; } + + @Override + public Collation getCollation() { + return null; + } } @Document diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java new file mode 100644 index 0000000000..f784aea6e8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java @@ -0,0 +1,242 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadataPostProcessor.DefaultCrudMethodMetadata; +import org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery; + +/** + * Unit tests for {@link SimpleMongoRepository}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class SimpleMongoRepositoryUnitTests { + + SimpleMongoRepository repository; + @Mock MongoOperations mongoOperations; + @Mock MongoEntityInformation entityInformation; + + @BeforeEach + public void setUp() { + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToCountForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.count(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).count(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToExistsForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.exists(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).exists(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindWithSortForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), Sort.by("nothing")); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindWithPageableForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), PageRequest.of(1, 1, Sort.by("nothing"))); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindOneForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findOne(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).findOne(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @ParameterizedTest // GH-2971 + @MethodSource("findAllCalls") + void shouldAddReadPreferenceToFindAllMethods(Consumer> findCall) + throws NoSuchMethodException { + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreference.class, TestRepositoryWithReadPreference.class.getMethod("dummy"))); + + findCall.accept(repository); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFindOne() throws NoSuchMethodException { + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreference.class, TestRepositoryWithReadPreference.class.getMethod("dummy"))); + + repository.findOne(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).findOne(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFluentFetchable() throws NoSuchMethodException { + + ExecutableFind finder = mock(ExecutableFind.class); + when(mongoOperations.query(any())).thenReturn(finder); + when(finder.inCollection(any())).thenReturn(finder); + when(finder.matching(any(Query.class))).thenReturn(finder); + when(finder.as(any())).thenReturn(finder); + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreferenceMethod.class, TestRepositoryWithReadPreferenceMethod.class.getMethod("dummy"))); + + repository.findBy(Example.of(new TestDummy()), FetchableFluentQuery::all); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(finder).matching(query.capture()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private static Stream findAllCalls() { + + Consumer> findAll = SimpleMongoRepository::findAll; + Consumer> findAllWithSort = repo -> repo.findAll(Sort.by("age")); + Consumer> findAllWithPage = repo -> repo + .findAll(PageRequest.of(1, 20, Sort.by("age"))); + Consumer> findAllWithExample = repo -> repo + .findAll(Example.of(new TestDummy())); + Consumer> findAllWithExampleAndSort = repo -> repo + .findAll(Example.of(new TestDummy()), Sort.by("age")); + Consumer> findAllWithExampleAndPage = repo -> repo + .findAll(Example.of(new TestDummy()), PageRequest.of(1, 20, Sort.by("age"))); + + return Stream.of(Arguments.of(findAll), // + Arguments.of(findAllWithSort), // + Arguments.of(findAllWithPage), // + Arguments.of(findAllWithExample), // + Arguments.of(findAllWithExampleAndSort), // + Arguments.of(findAllWithExampleAndPage)); + } + + static class TestDummy { + + } + + interface TestRepository { + + } + + @ReadPreference("secondaryPreferred") + interface TestRepositoryWithReadPreference { + + void dummy(); + } + + interface TestRepositoryWithReadPreferenceMethod { + + @ReadPreference("secondaryPreferred") + void dummy(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java new file mode 100644 index 0000000000..ad53592b76 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java @@ -0,0 +1,182 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assumptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.VersionedPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplicaSet; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.MongoClient; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration +public class SimpleMongoRepositoryVersionedEntityTests { + + @Configuration + static class Config extends MongoClientClosingTestConfiguration { + + @Override + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>(Arrays.asList(VersionedPerson.class)); + } + } + + @Autowired private MongoTemplate template; + + private MongoEntityInformation personEntityInformation; + private SimpleMongoRepository repository; + + private VersionedPerson sarah; + + @BeforeEach + public void setUp() { + + MongoPersistentEntity entity = template.getConverter().getMappingContext() + .getRequiredPersistentEntity(VersionedPerson.class); + + personEntityInformation = new MappingMongoEntityInformation(entity); + repository = new SimpleMongoRepository<>(personEntityInformation, template); + repository.deleteAll(); + + sarah = repository.save(new VersionedPerson("Sarah", "Connor")); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + repository.delete(sarah); + + assertThat(template.count(query(where("id").is(sarah.getId())), VersionedPerson.class)).isZero(); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteWithMatchingVersionInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + long countBefore = repository.count(); + + initTxTemplate().execute(status -> { + + VersionedPerson t800 = repository.save(new VersionedPerson("T-800")); + repository.delete(t800); + + return Void.TYPE; + }); + + assertThat(repository.count()).isEqualTo(countBefore); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + sarah.setVersion(5L); + + assertThatExceptionOfType(OptimisticLockingFailureException.class).isThrownBy(() -> repository.delete(sarah)); + + assertThat(template.count(query(where("id").is(sarah.getId())), VersionedPerson.class)).isOne(); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteWithVersionMismatchInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + long countBefore = repository.count(); + + assertThatExceptionOfType(OptimisticLockingFailureException.class) + .isThrownBy(() -> initTxTemplate().execute(status -> { + + VersionedPerson t800 = repository.save(new VersionedPerson("T-800")); + t800.setVersion(5L); + repository.delete(t800); + + return Void.TYPE; + })); + + assertThat(repository.count()).isEqualTo(countBefore); + } + + @Test // DATAMONGO-2195 + public void deleteNonExisting() { + assertThatThrownBy(() -> repository.delete(new VersionedPerson("T-800"))) + .isInstanceOf(OptimisticLockingFailureException.class); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteNonExistingInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + initTxTemplate().execute(status -> { + + assertThatThrownBy(() -> repository.delete(new VersionedPerson("T-800"))) + .isInstanceOf(OptimisticLockingFailureException.class); + + return Void.TYPE; + }); + } + + TransactionTemplate initTxTemplate() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + return tt; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java new file mode 100644 index 0000000000..0b172de2cd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java @@ -0,0 +1,246 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.query.FluentQuery; + +/** + * Unit tests for {@link SimpleReactiveMongoRepository}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class SimpleReactiveMongoRepositoryUnitTests { + + private SimpleReactiveMongoRepository repository; + @Mock Mono mono; + @Mock Flux flux; + @Mock ReactiveMongoOperations mongoOperations; + @Mock MongoEntityInformation entityInformation; + + @BeforeEach + void setUp() { + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToCountForExampleIfPresent() { + + when(mongoOperations.count(any(), any(), any())).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.count(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).count(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToExistsForExampleIfPresent() { + + when(mongoOperations.exists(any(), any(), any())).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.exists(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).exists(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindForExampleIfPresent() { + + when(mongoOperations.find(any(), any(), any())).thenReturn(flux); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindWithSortForExampleIfPresent() { + + when(mongoOperations.find(any(), any(), any())).thenReturn(flux); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), Sort.by("nothing")).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindOneForExampleIfPresent() { + + when(entityInformation.getCollectionName()).thenReturn("testdummy"); + doReturn(flux).when(mongoOperations).find(any(Query.class), eq(TestDummy.class), eq("testdummy")); + when(flux.buffer(anyInt())).thenReturn(flux); + when(flux.map(any())).thenReturn(flux); + when(flux.next()).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findOne(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @ParameterizedTest // GH-2971 + @MethodSource("findAllCalls") + void shouldAddReadPreferenceToFindAllMethods( + Function, Flux> findCall) { + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + when(mongoOperations.find(any(), any(), any())).thenReturn(Flux.just("ok")); + + findCall.apply(repository).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFindOne() { + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + when(mongoOperations.find(any(), any(), any())).thenReturn(Flux.just("ok")); + + repository.findOne(Example.of(new SimpleMongoRepositoryUnitTests.TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFluentFetchable() { + + ReactiveFind finder = mock(ReactiveFind.class); + when(mongoOperations.query(any())).thenReturn(finder); + when(finder.inCollection(any())).thenReturn(finder); + when(finder.matching(any(Query.class))).thenReturn(finder); + when(finder.as(any())).thenReturn(finder); + when(finder.all()).thenReturn(Flux.just("ok")); + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + + repository.findBy(Example.of(new TestDummy()), FluentQuery.ReactiveFluentQuery::all).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(finder).matching(query.capture()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private static Stream findAllCalls() { + + Function, Flux> findAll = SimpleReactiveMongoRepository::findAll; + Function, Flux> findAllWithSort = repo -> repo + .findAll(Sort.by("age")); + Function, Flux> findAllWithExample = repo -> repo + .findAll(Example.of(new TestDummy())); + Function, Flux> findAllWithExampleAndSort = repo -> repo + .findAll(Example.of(new TestDummy()), Sort.by("age")); + + return Stream.of(Arguments.of(findAll), // + Arguments.of(findAllWithSort), // + Arguments.of(findAllWithExample), // + Arguments.of(findAllWithExampleAndSort)); + } + + private static class TestDummy { + + } + + @ReadPreference("secondaryPreferred") + interface TestRepositoryWithReadPreference { + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java new file mode 100644 index 0000000000..10f5f334a8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java @@ -0,0 +1,120 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Collections; +import java.util.Set; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.VersionedPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class SimpleReactiveMongoRepositoryVersionedEntityTests { + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return MongoTestUtils.reactiveClient(); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(VersionedPerson.class); + } + } + + @Autowired // + private ReactiveMongoTemplate template; + + private MongoEntityInformation personEntityInformation; + private SimpleReactiveMongoRepository repository; + + private VersionedPerson sarah; + + @Before + public void setUp() { + + MongoPersistentEntity entity = template.getConverter().getMappingContext() + .getRequiredPersistentEntity(VersionedPerson.class); + + personEntityInformation = new MappingMongoEntityInformation(entity); + repository = new SimpleReactiveMongoRepository<>(personEntityInformation, template); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + sarah = repository.save(new VersionedPerson("Sarah", "Connor")).block(); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + repository.delete(sarah).as(StepVerifier::create).verifyComplete(); + + template.count(query(where("id").is(sarah.getId())), VersionedPerson.class) // + .as(StepVerifier::create) // + .expectNext(0L).verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + sarah.setVersion(5L); + + repository.delete(sarah).as(StepVerifier::create).verifyError(OptimisticLockingFailureException.class); + + template.count(query(where("id").is(sarah.getId())), VersionedPerson.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteNonExisting() { + + repository.delete(new VersionedPerson("T-800")).as(StepVerifier::create) + .verifyError(OptimisticLockingFailureException.class); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java index 00fb05b0b7..56e17b7590 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2014 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,41 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DBObjectTestUtils.*; +import static com.querydsl.core.types.ExpressionUtils.path; +import static com.querydsl.core.types.ExpressionUtils.predicate; +import static com.querydsl.core.types.dsl.Expressions.*; +import static org.assertj.core.api.Assertions.*; +import java.util.Arrays; +import java.util.Collections; + +import org.bson.Document; import org.bson.types.ObjectId; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.QAddress; import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.User; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import com.querydsl.core.types.Ops; +import com.querydsl.core.types.Predicate; +import com.querydsl.core.types.PredicateOperation; +import com.querydsl.core.types.dsl.BooleanExpression; import com.querydsl.core.types.dsl.BooleanOperation; import com.querydsl.core.types.dsl.PathBuilder; import com.querydsl.core.types.dsl.SimplePath; @@ -44,19 +57,22 @@ /** * Unit tests for {@link SpringDataMongodbSerializer}. - * + * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch + * @author Mikhail Kaduchka + * @author Enrique Leon Molina */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class SpringDataMongodbSerializerUnitTests { @Mock DbRefResolver dbFactory; - MongoConverter converter; - SpringDataMongodbSerializer serializer; + private MongoConverter converter; + private SpringDataMongodbSerializer serializer; - @Before - public void setUp() { + @BeforeEach + void setUp() { MongoMappingContext context = new MongoMappingContext(); @@ -65,111 +81,207 @@ public void setUp() { } @Test - public void uses_idAsKeyForIdProperty() { + void uses_idAsKeyForIdProperty() { StringPath path = QPerson.person.id; - assertThat(serializer.getKeyForPath(path, path.getMetadata()), is("_id")); + assertThat(serializer.getKeyForPath(path, path.getMetadata())).isEqualTo("_id"); } @Test - public void buildsNestedKeyCorrectly() { + void buildsNestedKeyCorrectly() { StringPath path = QPerson.person.address.street; - assertThat(serializer.getKeyForPath(path, path.getMetadata()), is("street")); + assertThat(serializer.getKeyForPath(path, path.getMetadata())).isEqualTo("street"); } @Test - public void convertsComplexObjectOnSerializing() { + void convertsComplexObjectOnSerializing() { Address address = new Address(); address.street = "Foo"; address.zipCode = "01234"; - DBObject result = serializer.asDBObject("foo", address); - assertThat(result, is(instanceOf(BasicDBObject.class))); - BasicDBObject dbObject = (BasicDBObject) result; + Document document = serializer.asDocument("foo", address); - Object value = dbObject.get("foo"); - assertThat(value, is(notNullValue())); - assertThat(value, is(instanceOf(BasicDBObject.class))); + Object value = document.get("foo"); + assertThat(value).isNotNull().isInstanceOf(Document.class); Object reference = converter.convertToMongoType(address); - assertThat(value, is(reference)); + assertThat(value).isEqualTo(reference); } - /** - * @see DATAMONGO-376 - */ - @Test - public void returnsEmptyStringIfNoPathExpressionIsGiven() { + @Test // DATAMONGO-376 + void returnsEmptyStringIfNoPathExpressionIsGiven() { QAddress address = QPerson.person.shippingAddresses.any(); - assertThat(serializer.getKeyForPath(address, address.getMetadata()), is("")); + assertThat(serializer.getKeyForPath(address, address.getMetadata())).isEmpty(); } - /** - * @see DATAMONGO-467 - */ - @Test - public void convertsIdPropertyCorrectly() { + @Test // DATAMONGO-467, DATAMONGO-1798 + void appliesImplicitIdConversion() { ObjectId id = new ObjectId(); PathBuilder
                    builder = new PathBuilder
                    (Address.class, "address"); StringPath idPath = builder.getString("id"); - DBObject result = (DBObject) serializer.visit((BooleanOperation) idPath.eq(id.toString()), (Void) null); - assertThat(result.get("_id"), is(notNullValue())); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); - assertThat(result.get("_id"), is((Object) id)); + Document result = (Document) serializer.visit((BooleanOperation) idPath.eq(id.toString()), null); + assertThat(result.get("_id")).isNotNull().isInstanceOf(ObjectId.class); } - /** - * @see DATAMONGO-761 - */ - @Test - public void looksUpKeyForNonPropertyPath() { + @Test // DATAMONGO-761 + void looksUpKeyForNonPropertyPath() { PathBuilder
                    builder = new PathBuilder
                    (Address.class, "address"); SimplePath firstElementPath = builder.getArray("foo", String[].class).get(0); String path = serializer.getKeyForPath(firstElementPath, firstElementPath.getMetadata()); - assertThat(path, is("0")); + assertThat(path).isEqualTo("0"); } - /** - * @see DATAMONGO-969 - */ - @Test - public void shouldConvertObjectIdEvenWhenNestedInOperatorDbObject() { + @Test // DATAMONGO-1485 + void takesCustomConversionForEnumsIntoAccount() { - ObjectId value = new ObjectId("53bb9fd14438765b29c2d56e"); - DBObject serialized = serializer.asDBObject("_id", new BasicDBObject("$ne", value.toString())); + MongoMappingContext context = new MongoMappingContext(); + + MappingMongoConverter converter = new MappingMongoConverter(dbFactory, context); + converter.setCustomConversions(new MongoCustomConversions(Collections.singletonList(new SexTypeWriteConverter()))); + converter.afterPropertiesSet(); + + this.converter = converter; + this.serializer = new SpringDataMongodbSerializer(this.converter); + + Object mappedPredicate = serializer.handle(QPerson.person.sex.eq(Sex.FEMALE)); - DBObject _id = getAsDBObject(serialized, "_id"); - ObjectId $ne = getTypedValue(_id, "$ne", ObjectId.class); - assertThat($ne, is(value)); + assertThat(mappedPredicate).isInstanceOf(Document.class); + assertThat(((Document) mappedPredicate).get("sex")).isEqualTo("f"); } - /** - * @see DATAMONGO-969 - */ - @Test - public void shouldConvertCollectionOfObjectIdEvenWhenNestedInOperatorDbObject() { + @Test // DATAMONGO-1848, DATAMONGO-1943 + void shouldRemarshallListsAndDocuments() { + + BooleanExpression criteria = QPerson.person.lastname.isNotEmpty() + .and(QPerson.person.firstname.containsIgnoreCase("foo")).not(); + + assertThat(serializer.handle(criteria)).isEqualTo(Document.parse("{ \"$or\" : [ { \"lastname\" : { \"$not\" : { " + + "\"$ne\" : \"\"}}} , { \"firstname\" : { \"$not\" : { \"$regex\" : \".*\\\\Qfoo\\\\E.*\" , \"$options\" : \"i\"}}}]}")); + } + + @Test // DATAMONGO-2228 + void retainsOpsInAndExpression() { + + PredicateOperation testExpression = predicate(Ops.AND, + predicate(Ops.OR, predicate(Ops.EQ, path(Object.class, "firstname"), constant("John")), + predicate(Ops.EQ, path(Object.class, "firstname"), constant("Sarah"))), + predicate(Ops.OR, predicate(Ops.EQ, path(Object.class, "lastname"), constant("Smith")), + predicate(Ops.EQ, path(Object.class, "lastname"), constant("Connor")))); + + assertThat(serializer.handle(testExpression)).isEqualTo(Document.parse( + "{\"$and\": [{\"$or\": [{\"firstname\": \"John\"}, {\"firstname\": \"Sarah\"}]}, {\"$or\": [{\"lastname\": \"Smith\"}, {\"lastname\": \"Connor\"}]}]}")); + } + + @Test // DATAMONGO-2475 + void chainedOrsInSameDocument() { + + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .or(QPerson.person.lastname.eq("lastname_value")).or(QPerson.person.age.goe(30)).or(QPerson.person.age.loe(20)) + .or(QPerson.person.uniqueId.isNull()); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$or\": [{\"firstname\": \"firstname_value\"}, {\"lastname\": \"lastname_value\"}, {\"age\": {\"$gte\": 30}}, {\"age\": {\"$lte\": 20}}, {\"uniqueId\": {\"$exists\": false}}]}")); + } + + @Test // DATAMONGO-2475 + void chainedNestedOrsInSameDocument() { + + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .or(QPerson.person.lastname.eq("lastname_value")).or(QPerson.person.address.street.eq("spring")); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$or\": [{\"firstname\": \"firstname_value\"}, {\"lastname\": \"lastname_value\"}, {\"add.street\": \"spring\"}]}")); + } + + @Test // DATAMONGO-2475 + void chainedAndsInSameDocument() { - ObjectId firstId = new ObjectId("53bb9fd14438765b29c2d56e"); - ObjectId secondId = new ObjectId("53bb9fda4438765b29c2d56f"); + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .and(QPerson.person.lastname.eq("lastname_value")).and(QPerson.person.age.goe(30)) + .and(QPerson.person.age.loe(20)).and(QPerson.person.uniqueId.isNull()); - BasicDBList objectIds = new BasicDBList(); - objectIds.add(firstId.toString()); - objectIds.add(secondId.toString()); + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$and\": [{\"firstname\": \"firstname_value\", \"lastname\": \"lastname_value\", \"age\": {\"$gte\": 30}, \"uniqueId\": {\"$exists\": false}}, {\"age\": {\"$lte\": 20}}]}")); + } + + @Test // DATAMONGO-2475 + void chainMultipleAndFlattensCorrectly() { + + Document p1doc = Document.parse("{ \"$or\" : [ { \"firstname\" : \"fn\"}, { \"lastname\" : \"ln\" } ] }"); + Document p2doc = Document + .parse("{ \"$or\" : [ { \"age\" : { \"$gte\" : 20 } }, { \"age\" : { \"$lte\" : 30} } ] }"); + Document p3doc = Document.parse("{ \"$or\" : [ { \"add.city\" : \"c\"}, { \"add.zipCode\" : \"0\" } ] }"); + Document expected = new Document("$and", Arrays.asList(p1doc, p2doc, p3doc)); + + Predicate predicate1 = QPerson.person.firstname.eq("fn").or(QPerson.person.lastname.eq("ln")); + Predicate predicate2 = QPerson.person.age.goe(20).or(QPerson.person.age.loe(30)); + Predicate predicate3 = QPerson.person.address.city.eq("c").or(QPerson.person.address.zipCode.eq("0")); + PredicateOperation testExpression = predicate(Ops.AND, predicate1, predicate2, predicate3); + + assertThat(serializer.handle(testExpression)).isEqualTo(expected); + } + + @Test // GH-4037 + void parsesDocumentReference() { + + User user = new User(); + user.setId("007"); + Predicate predicate = QPerson.person.spiritAnimal.eq(user); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse("{ 'spiritAnimal' : '007' }")); + } + + @Test // GH-4037 + void parsesDocumentReferenceOnId() { + + User user = new User(); + user.setId("007"); + Predicate predicate = QPerson.person.spiritAnimal.id.eq("007"); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse("{ 'spiritAnimal' : '007' }")); + } - DBObject serialized = serializer.asDBObject("_id", new BasicDBObject("$in", objectIds)); + @Test // GH-4709 + void appliesConversionToIdType() { - DBObject _id = getAsDBObject(serialized, "_id"); - Object[] $in = getTypedValue(_id, "$in", Object[].class); + Predicate predicate = QSpringDataMongodbSerializerUnitTests_Outer.outer.embeddedObject.id + .eq("64268a7b17ac6a00018bf312"); - assertThat($in, Matchers. arrayContaining(firstId, secondId)); + assertThat(serializer.handle(predicate)) + .isEqualTo(new Document("embedded_object._id", new ObjectId("64268a7b17ac6a00018bf312"))); + } + + @Test // GH-4709 + void appliesConversionToIdTypeForExplicitTypeRef() { + + Predicate predicate = QQuerydslRepositorySupportTests_WithMongoId.withMongoId.id.eq("64268a7b17ac6a00018bf312"); + + assertThat(serializer.handle(predicate)).isEqualTo(new Document("_id", "64268a7b17ac6a00018bf312")); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "record") + class Outer { + + @Id private String id; + + @Field("embedded_object") private Inner embeddedObject; + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "embedded_object") + class Inner { + @Id private String id; + } + + public class WithMongoId { + @MongoId private String id; } class Address { @@ -178,4 +290,25 @@ class Address { @Field("zip_code") String zipCode; @Field("bar") String[] foo; } + + @WritingConverter + public class SexTypeWriteConverter implements Converter { + + @Override + public String convert(Sex source) { + + if (source == null) { + return null; + } + + switch (source) { + case MALE: + return "m"; + case FEMALE: + return "f"; + default: + throw new IllegalArgumentException("o_O"); + } + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java new file mode 100644 index 0000000000..8dc952e8a7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.util; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verifyNoInteractions; + +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit test for {@link SliceUtils}. + * + * @author Christoph Strobl + */ +class SliceUtilsUnitTests { + + @ParameterizedTest // GH-4889 + @MethodSource("paged") + void pagedPageableModifiesQuery(Pageable page) { + + Query source = new BasicQuery(Document.parse("{ 'spring' : 'data' }")); + + Query target = SliceUtils.limitResult(source, page); + + assertThat(target.getQueryObject()).isEqualTo(source.getQueryObject()); + assertThat(target).isNotSameAs(source); + assertThat(target.isLimited()).isTrue(); + assertThat(target.getSkip()).isEqualTo(page.getOffset()); + assertThat(target.getLimit()).isEqualTo(page.toLimit().max() + 1); + assertThat(target.getSortObject()).isEqualTo(source.getSortObject()); + } + + @ParameterizedTest // GH-4889 + @MethodSource("unpaged") + void unpagedPageableDoesNotModifyQuery(Pageable page) { + + Query source = spy(new BasicQuery(Document.parse("{ 'spring' : 'data' }"))); + + Query target = SliceUtils.limitResult(source, page); + + verifyNoInteractions(source); + + assertThat(target).isSameAs(source); + assertThat(target.isLimited()).isFalse(); + } + + public static Stream paged() { + return Stream.of(Arguments.of(Pageable.ofSize(1)), Arguments.of(PageRequest.of(0, 10)), + Arguments.of(PageRequest.of(0, 10, Direction.ASC, "name"))); + } + + public static Stream unpaged() { + return Stream.of(Arguments.of(Pageable.unpaged()), Arguments.of(Pageable.unpaged(Sort.by("name")))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java new file mode 100644 index 0000000000..b0a9b5608d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java @@ -0,0 +1,60 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.springframework.data.domain.Persistable; + +/** + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + */ +public class AfterTransactionAssertion { + + private final T persistable; + private boolean expectToBePresent; + + public AfterTransactionAssertion(T persistable) { + this.persistable = persistable; + } + + public void isPresent() { + expectToBePresent = true; + } + + public void isNotPresent() { + expectToBePresent = false; + } + + public Object getId() { + return persistable.getId(); + } + + public boolean shouldBePresent() { + return expectToBePresent; + } + + public T getPersistable() { + return this.persistable; + } + + public boolean isExpectToBePresent() { + return this.expectToBePresent; + } + + public void setExpectToBePresent(boolean expectToBePresent) { + this.expectToBePresent = expectToBePresent; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java new file mode 100644 index 0000000000..9e6eef1e6e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java @@ -0,0 +1,80 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.assertj.core.error.BasicErrorMessageFactory; +import org.assertj.core.error.ErrorMessageFactory; +import org.assertj.core.internal.StandardComparisonStrategy; + +/** + * Utility class providing factory methods for {@link ErrorMessageFactory}. + * + * @author Mark Paluch + */ +class AssertErrors { + + /** + * Creates a new {@link ShouldHaveProperty}. + * + * @param actual the actual value in the failed assertion. + * @param key the key used in the failed assertion to compare the actual property key to. + * @param value the value used in the failed assertion to compare the actual property value to. + * @return the created {@link ErrorMessageFactory}. + */ + public static ErrorMessageFactory shouldHaveProperty(Object actual, String key, Object value) { + return new ShouldHaveProperty(actual, key, value); + } + + /** + * Creates a new {@link ShouldNotHaveProperty}. + * + * @param actual the actual value in the failed assertion. + * @param key the key used in the failed assertion to compare the actual property key to. + * @param value the value used in the failed assertion to compare the actual property value to. + * @return the created {@link ErrorMessageFactory}. + */ + public static ErrorMessageFactory shouldNotHaveProperty(Object actual, String key, Object value) { + return new ShouldNotHaveProperty(actual, key, value); + } + + private static class ShouldHaveProperty extends BasicErrorMessageFactory { + + private ShouldHaveProperty(Object actual, String key, Object value) { + super("\n" + // + "Expecting:\n" + // + " <%s>\n" + // + "to have property with key:\n" + // + " <%s>\n" + // + "and value:\n" + // + " <%s>\n" + // + "%s", actual, key, value, StandardComparisonStrategy.instance()); + } + } + + private static class ShouldNotHaveProperty extends BasicErrorMessageFactory { + + private ShouldNotHaveProperty(Object actual, String key, Object value) { + super("\n" + // + "Expecting:\n" + // + " <%s>\n" + // + "not to have property with key:\n" + // + " <%s>\n" + // + "and value:\n" + // + " <%s>\n" + // + "but actually found such property %s", actual, key, value, StandardComparisonStrategy.instance()); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java new file mode 100644 index 0000000000..7449a66020 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java @@ -0,0 +1,41 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.bson.Document; + +/** + * The entry point for all MongoDB assertions. This class extends {@link org.assertj.core.api.Assertions} for + * convenience to statically import a single class. + * + * @author Mark Paluch + */ +public abstract class Assertions extends org.assertj.core.api.Assertions { + + private Assertions() { + // no instances allowed. + } + + /** + * Create assertion for {@link Document}. + * + * @param document the actual value. + * @return the created assertion object. + */ + public static DocumentAssert assertThat(Document document) { + return new DocumentAssert(document); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java new file mode 100644 index 0000000000..c3a97a03bc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.springframework.core.env.StandardEnvironment; + +import org.testcontainers.mongodb.MongoDBAtlasLocalContainer; +import org.testcontainers.utility.DockerImageName; + +/** + * Extension to MongoDBAtlasLocalContainer. + * + * @author Christoph Strobl + */ +public class AtlasContainer extends MongoDBAtlasLocalContainer { + + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("mongodb/mongodb-atlas-local"); + private static final String DEFAULT_TAG = "8.0.0"; + private static final String LATEST = "latest"; + + private AtlasContainer(String dockerImageName) { + super(DockerImageName.parse(dockerImageName)); + } + + private AtlasContainer(DockerImageName dockerImageName) { + super(dockerImageName); + } + + public static AtlasContainer bestMatch() { + return tagged(new StandardEnvironment().getProperty("mongodb.atlas.version", DEFAULT_TAG)); + } + + public static AtlasContainer latest() { + return tagged(LATEST); + } + + public static AtlasContainer version8() { + return tagged(DEFAULT_TAG); + } + + public static AtlasContainer tagged(String tag) { + return new AtlasContainer(DEFAULT_IMAGE_NAME.withTag(tag)); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java index f34e6650db..b3c2361eea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java new file mode 100644 index 0000000000..94971e8f59 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * Annotation used to exclude entries from the classpath. + * Simplified version of ClassPathExclusions. + * + * @author Christoph Strobl + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ExtendWith(ClassPathExclusionsExtension.class) +public @interface ClassPathExclusions { + + /** + * One or more packages that should be excluded from the classpath. + * + * @return the excluded packages + */ + String[] packages(); + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java new file mode 100644 index 0000000000..db2b4730b2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java @@ -0,0 +1,129 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.reflect.Method; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.InvocationInterceptor; +import org.junit.jupiter.api.extension.ReflectiveInvocationContext; +import org.junit.platform.engine.discovery.DiscoverySelectors; +import org.junit.platform.launcher.Launcher; +import org.junit.platform.launcher.LauncherDiscoveryRequest; +import org.junit.platform.launcher.TestPlan; +import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder; +import org.junit.platform.launcher.core.LauncherFactory; +import org.junit.platform.launcher.listeners.SummaryGeneratingListener; +import org.junit.platform.launcher.listeners.TestExecutionSummary; +import org.springframework.util.CollectionUtils; + +/** + * Simplified version of ModifiedClassPathExtension. + * + * @author Christoph Strobl + */ +class ClassPathExclusionsExtension implements InvocationInterceptor { + + @Override + public void interceptBeforeAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptBeforeEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptAfterEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptAfterAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptTestMethod(Invocation invocation, ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) throws Throwable { + interceptMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptTestTemplateMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + interceptMethod(invocation, invocationContext, extensionContext); + } + + private void interceptMethod(Invocation invocation, ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) throws Throwable { + + if (isModifiedClassPathClassLoader(extensionContext)) { + invocation.proceed(); + return; + } + + Class testClass = extensionContext.getRequiredTestClass(); + Method testMethod = invocationContext.getExecutable(); + PackageExcludingClassLoader modifiedClassLoader = PackageExcludingClassLoader.get(testClass, testMethod); + if (modifiedClassLoader == null) { + invocation.proceed(); + return; + } + invocation.skip(); + ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(modifiedClassLoader); + try { + runTest(extensionContext.getUniqueId()); + } finally { + Thread.currentThread().setContextClassLoader(originalClassLoader); + } + } + + private void runTest(String testId) throws Throwable { + + LauncherDiscoveryRequest request = LauncherDiscoveryRequestBuilder.request() + .selectors(DiscoverySelectors.selectUniqueId(testId)).build(); + Launcher launcher = LauncherFactory.create(); + TestPlan testPlan = launcher.discover(request); + SummaryGeneratingListener listener = new SummaryGeneratingListener(); + launcher.registerTestExecutionListeners(listener); + launcher.execute(testPlan); + TestExecutionSummary summary = listener.getSummary(); + if (!CollectionUtils.isEmpty(summary.getFailures())) { + throw summary.getFailures().get(0).getException(); + } + } + + private void intercept(Invocation invocation, ExtensionContext extensionContext) throws Throwable { + if (isModifiedClassPathClassLoader(extensionContext)) { + invocation.proceed(); + return; + } + invocation.skip(); + } + + private boolean isModifiedClassPathClassLoader(ExtensionContext extensionContext) { + Class testClass = extensionContext.getRequiredTestClass(); + ClassLoader classLoader = testClass.getClassLoader(); + return classLoader.getClass().getName().equals(PackageExcludingClassLoader.class.getName()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java index 880b6a2173..8f28f0fdf0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,19 +20,22 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Set; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; /** * {@link CleanMongoDB} is a junit {@link TestRule} implementation to be used as for wiping data from MongoDB instance. @@ -40,13 +43,13 @@ * after the base {@link Statement}.
                    * Use as {@link org.junit.ClassRule} to wipe data after finishing all tests within a class or as {@link org.junit.Rule} * to do so after each {@link org.junit.Test}. - * + * * @author Christoph Strobl * @since 1.6 */ public class CleanMongoDB implements TestRule { - private static final Logger LOGGER = LoggerFactory.getLogger(CleanMongoDB.class); + private static final Log LOGGER = LogFactory.getLog(CleanMongoDB.class); /** * Defines contents of MongoDB. @@ -55,7 +58,7 @@ public enum Struct { DATABASE, COLLECTION, INDEX; } - @SuppressWarnings("serial")// + @SuppressWarnings("serial") // private Set preserveDatabases = new HashSet() { { add("admin"); @@ -77,18 +80,18 @@ public CleanMongoDB() { /** * Create new instance using an internal {@link MongoClient} connecting to specified instance running at host:port. - * + * * @param host * @param port * @throws UnknownHostException */ public CleanMongoDB(String host, int port) throws UnknownHostException { - this(new MongoClient(host, port)); + this(MongoTestUtils.client(host, port)); } /** * Create new instance using the given client. - * + * * @param client */ public CleanMongoDB(MongoClient client) { @@ -96,8 +99,8 @@ public CleanMongoDB(MongoClient client) { } /** - * Removes everything by dropping every single {@link DB}. - * + * Removes everything by dropping every single {@link MongoDatabase}. + * * @return */ public static CleanMongoDB everything() { @@ -108,8 +111,8 @@ public static CleanMongoDB everything() { } /** - * Removes everything from the databases with given name by dropping the according {@link DB}. - * + * Removes everything from the databases with given name by dropping the according {@link MongoDatabase}. + * * @param dbNames * @return */ @@ -122,8 +125,8 @@ public static CleanMongoDB databases(String... dbNames) { } /** - * Drops the {@link DBCollection} with given names from every single {@link DB} containing them. - * + * Drops the {@link MongoCollection} with given names from every single {@link MongoDatabase} containing them. + * * @param collectionNames * @return */ @@ -132,8 +135,8 @@ public static CleanMongoDB collections(String... collectionNames) { } /** - * Drops the {@link DBCollection} with given names from the named {@link DB}. - * + * Drops the {@link MongoCollection} with given names from the named {@link MongoDatabase}. + * * @param dbName * @param collectionNames * @return @@ -147,8 +150,8 @@ public static CleanMongoDB collections(String dbName, Collection collect } /** - * Drops all index structures from every single {@link DBCollection}. - * + * Drops all index structures from every single {@link MongoCollection}. + * * @return */ public static CleanMongoDB indexes() { @@ -156,8 +159,8 @@ public static CleanMongoDB indexes() { } /** - * Drops all index structures from every single {@link DBCollection}. - * + * Drops all index structures from every single {@link MongoCollection}. + * * @param collectionNames * @return */ @@ -171,7 +174,7 @@ public static CleanMongoDB indexes(Collection collectionNames) { /** * Define {@link Struct} to be cleaned. - * + * * @param types * @return */ @@ -182,14 +185,14 @@ public CleanMongoDB clean(Struct... types) { } /** - * Defines the {@link DB}s to be used.
                    + * Defines the {@link MongoDatabase}s to be used.
                    * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
                      *
                    • {@link Struct#DATABASE}: Forces drop of named databases.
                    • *
                    • {@link Struct#COLLECTION}: Forces drop of collections within named databases.
                    • *
                    • {@link Struct#INDEX}: Removes index within collections of named databases.
                    • *
                    - * + * * @param dbNames * @return */ @@ -200,8 +203,8 @@ public CleanMongoDB useDatabases(String... dbNames) { } /** - * Excludes the given {@link DB}s from being processed. - * + * Excludes the given {@link MongoDatabase}s from being processed. + * * @param dbNames * @return */ @@ -211,13 +214,13 @@ public CleanMongoDB preserveDatabases(String... dbNames) { } /** - * Defines the {@link DBCollection}s to be used.
                    + * Defines the {@link MongoCollection}s to be used.
                    * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
                      *
                    • {@link Struct#COLLECTION}: Forces drop of named collections.
                    • *
                    • {@link Struct#INDEX}: Removes index within named collections.
                    • *
                    - * + * * @param collectionNames * @return */ @@ -230,13 +233,13 @@ private CleanMongoDB useCollections(Collection collectionNames) { } /** - * Defines the {@link DBCollection}s and {@link DB} to be used.
                    + * Defines the {@link MongoCollection}s and {@link MongoDatabase} to be used.
                    * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
                      *
                    • {@link Struct#COLLECTION}: Forces drop of named collections in given db.
                    • *
                    • {@link Struct#INDEX}: Removes index within named collections in given db.
                    • *
                    - * + * * @param collectionNames * @return */ @@ -256,10 +259,6 @@ Statement apply() { return apply(null, null); } - /* - * (non-Javadoc) - * @see org.junit.rules.TestRule#apply(org.junit.runners.model.Statement, org.junit.runner.Description) - */ public Statement apply(Statement base, Description description) { return new MongoCleanStatement(base); } @@ -274,7 +273,7 @@ private void doClean() { continue; } - DB db = client.getDB(dbName); + MongoDatabase db = client.getDatabase(dbName); dropCollectionsOrIndexIfRequried(db, initCollectionNames(db)); } } @@ -285,26 +284,35 @@ private boolean dropDbIfRequired(String dbName) { return false; } - client.dropDatabase(dbName); - LOGGER.debug("Dropping DB '{}'. ", dbName); + client.getDatabase(dbName).drop(); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropping DB '%s'; ", dbName)); + } return true; } - private void dropCollectionsOrIndexIfRequried(DB db, Collection collectionsToUse) { + private void dropCollectionsOrIndexIfRequried(MongoDatabase db, Collection collectionsToUse) { + + Collection availableCollections = db.listCollectionNames().into(new LinkedHashSet<>()); for (String collectionName : collectionsToUse) { - if (db.collectionExists(collectionName)) { + if (availableCollections.contains(collectionName)) { - DBCollection collection = db.getCollectionFromString(collectionName); + MongoCollection collection = db.getCollection(collectionName); if (collection != null) { if (types.contains(Struct.COLLECTION)) { collection.drop(); - LOGGER.debug("Dropping collection '{}' for DB '{}'. ", collectionName, db.getName()); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropping collection '%s' for DB '%s'; ", collectionName, db.getName())); + } } else if (types.contains(Struct.INDEX)) { collection.dropIndexes(); - LOGGER.debug("Dropping indexes in collection '{}' for DB '{}'. ", collectionName, db.getName()); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Dropping indexes in collection '%s' for DB '%s'; ", collectionName, db.getName())); + } } } } @@ -319,16 +327,16 @@ private Collection initDbNames() { Collection dbNamesToUse = dbNames; if (dbNamesToUse.isEmpty()) { - dbNamesToUse = client.getDatabaseNames(); + dbNamesToUse = client.listDatabaseNames().into(new LinkedHashSet<>()); } return dbNamesToUse; } - private Collection initCollectionNames(DB db) { + private Collection initCollectionNames(MongoDatabase db) { Collection collectionsToUse = collectionNames; if (CollectionUtils.isEmpty(collectionsToUse)) { - collectionsToUse = db.getCollectionNames(); + collectionsToUse = db.listCollectionNames().into(new LinkedHashSet<>()); } return collectionsToUse; } @@ -354,14 +362,13 @@ public void evaluate() throws Throwable { boolean isInternal = false; if (client == null) { - client = new MongoClient(); + client = MongoTestUtils.client(); isInternal = true; } doClean(); if (isInternal) { - client.close(); client = null; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java index ce2beefc9e..ecb18d4e04 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,7 @@ /** * {@link RunListener} implementation to be used for wiping MongoDB index structures after all test runs have finished. - * + * * @author Christoph Strobl * @since 1.6 */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java index 209dd61c09..f2fd993ef8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,38 @@ */ package org.springframework.data.mongodb.test.util; -import static org.mockito.Matchers.*; import static org.mockito.Mockito.*; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; -import java.util.HashSet; -import org.junit.Before; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.runner.Description; -import org.junit.runner.RunWith; import org.junit.runners.model.Statement; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import org.springframework.data.mongodb.test.util.CleanMongoDB.Struct; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.MongoClient; +import com.mongodb.client.ListDatabasesIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; /** * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class CleanMongoDBTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class CleanMongoDBTests { private CleanMongoDB cleaner; @@ -51,97 +58,96 @@ public class CleanMongoDBTests { private @Mock MongoClient mongoClientMock; // Some Mock DBs - private @Mock DB db1mock, db2mock; - private @Mock DBCollection db1collection1mock, db1collection2mock, db2collection1mock; + private @Mock MongoDatabase db1mock, db2mock, admin; + private @Mock MongoCollection db1collection1mock, db1collection2mock, db2collection1mock; - @SuppressWarnings("serial") - @Before - public void setUp() { + @SuppressWarnings({ "serial", "unchecked" }) + @BeforeEach + void setUp() throws ClassNotFoundException { // DB setup - when(mongoClientMock.getDatabaseNames()).thenReturn(Arrays.asList("admin", "db1", "db2")); - when(mongoClientMock.getDB(eq("db1"))).thenReturn(db1mock); - when(mongoClientMock.getDB(eq("db2"))).thenReturn(db2mock); + + ListDatabasesIterable dbIterable = mock(ListDatabasesIterable.class); + when(dbIterable.into(any(Collection.class))).thenReturn(Arrays.asList("admin", "db1", "db2")); + when(mongoClientMock.listDatabaseNames()).thenReturn(dbIterable); + when(mongoClientMock.getDatabase(eq("db1"))).thenReturn(db1mock); + when(mongoClientMock.getDatabase(eq("db2"))).thenReturn(db2mock); // collections have to exist - when(db1mock.collectionExists(anyString())).thenReturn(true); - when(db2mock.collectionExists(anyString())).thenReturn(true); - - // init collection names per database - when(db1mock.getCollectionNames()).thenReturn(new HashSet() { - { - add("db1collection1"); - add("db1collection2"); - } - }); - when(db2mock.getCollectionNames()).thenReturn(Collections.singleton("db2collection1")); + MongoIterable collectionIterable = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db1mock).collectionNameIterableType()); + when(collectionIterable.into(any(Collection.class))).thenReturn(Arrays.asList("db1collection1", "db1collection2")); + doReturn(collectionIterable).when(db1mock).listCollectionNames(); + + MongoIterable collectionIterable2 = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db2mock).collectionNameIterableType()); + when(collectionIterable2.into(any(Collection.class))).thenReturn(Collections.singletonList("db2collection1")); + doReturn(collectionIterable2).when(db2mock).listCollectionNames(); // return collections according to names - when(db1mock.getCollectionFromString(eq("db1collection1"))).thenReturn(db1collection1mock); - when(db1mock.getCollectionFromString(eq("db1collection2"))).thenReturn(db1collection2mock); - when(db2mock.getCollectionFromString(eq("db2collection1"))).thenReturn(db2collection1mock); + when(db1mock.getCollection(eq("db1collection1"))).thenReturn(db1collection1mock); + when(db1mock.getCollection(eq("db1collection2"))).thenReturn(db1collection2mock); + when(db2mock.getCollection(eq("db2collection1"))).thenReturn(db2collection1mock); cleaner = new CleanMongoDB(mongoClientMock); } @Test - public void preservesSystemDBsCorrectlyWhenCleaningDatabase() throws Throwable { + void preservesSystemDBsCorrectlyWhenCleaningDatabase() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(admin, never()).drop(); } @Test - public void preservesNamedDBsCorrectlyWhenCleaningDatabase() throws Throwable { + void preservesNamedDBsCorrectlyWhenCleaningDatabase() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.preserveDatabases("db1"); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); + verify(db1mock, never()).drop(); } @Test - public void dropsAllDBsCorrectlyWhenCleaingDatabaseAndNotExplictDBNamePresent() throws Throwable { + void dropsAllDBsCorrectlyWhenCleaingDatabaseAndNotExplictDBNamePresent() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, times(1)).dropDatabase(eq("db1")); - verify(mongoClientMock, times(1)).dropDatabase(eq("db2")); + verify(db1mock).drop(); + verify(db2mock).drop(); } @Test - public void dropsSpecifiedDBsCorrectlyWhenExplicitNameSet() throws Throwable { + void dropsSpecifiedDBsCorrectlyWhenExplicitNameSet() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.useDatabases("db2"); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, times(1)).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); + verify(db2mock).drop(); + verify(db1mock, never()).drop(); } @Test - public void doesNotRemoveAnyDBwhenCleaningCollections() throws Throwable { + void doesNotRemoveAnyDBwhenCleaningCollections() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); - verify(mongoClientMock, never()).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(db1mock, never()).drop(); + verify(db2mock, never()).drop(); + verify(admin, never()).drop(); } @Test - public void doesNotDropCollectionsFromPreservedDBs() throws Throwable { + void doesNotDropCollectionsFromPreservedDBs() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.preserveDatabases("db1"); @@ -154,7 +160,7 @@ public void doesNotDropCollectionsFromPreservedDBs() throws Throwable { } @Test - public void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() throws Throwable { + void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() throws Throwable { cleaner.clean(Struct.COLLECTION); @@ -166,7 +172,7 @@ public void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() } @Test - public void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { + void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.useCollections("db1collection2"); @@ -179,15 +185,15 @@ public void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { } @Test - public void removesIndexesCorrectly() throws Throwable { + void removesIndexesCorrectly() throws Throwable { cleaner.clean(Struct.INDEX); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); - verify(mongoClientMock, never()).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(db1mock, never()).drop(); + verify(db2mock, never()).drop(); + verify(admin, never()).drop(); verify(db1collection1mock, times(1)).dropIndexes(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java new file mode 100644 index 0000000000..064c6edf7b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * Marks a field or method as to be autowired by JUnit's dependency injection facilities for injection of a MongoDB + * client instance. Depends on {@link MongoClientExtension}. + * + * @author Christoph Strobl + * @see com.mongodb.client.MongoClient + * @see com.mongodb.reactivestreams.client.MongoClient + * @see ReplSetClient + * @see MongoClientExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@ExtendWith(MongoClientExtension.class) +public @interface Client { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java new file mode 100644 index 0000000000..2ae41f734d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java @@ -0,0 +1,98 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.List; + +import org.bson.Document; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.Collation; + +/** + * Value Object providing a methods for accessing collection/view information within a raw {@link Document}. + * + * @author Christoph Strobl + */ +public class CollectionInfo { + + private final Document source; + + public static CollectionInfo from(Document source) { + return new CollectionInfo(source); + } + + CollectionInfo(Document source) { + this.source = source; + } + + /** + * @return the collection/view name. + */ + public String getName() { + return source.getString("name"); + } + + /** + * @return {@literal true} if the {@literal type} equals {@literal view}. + */ + public boolean isView() { + return ObjectUtils.nullSafeEquals("view", source.get("type")); + } + + /** + * @return the {@literal options.viewOn} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public String getViewTarget() { + + if (isView()) { + return getOptionValue("viewOn", String.class); + } + throw new IllegalStateException(getName() + " is not a view"); + } + + /** + * @return the {@literal options.pipeline} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public List getViewPipeline() { + + if (isView()) { + return getOptions().getList("pipeline", Document.class); + } + + throw new IllegalStateException(getName() + " is not a view"); + } + + /** + * @return the {@literal options.collation} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public Collation getCollation() { + + return org.springframework.data.mongodb.core.query.Collation.from(getOptionValue("collation", Document.class)) + .toMongoCollation(); + } + + private Document getOptions() { + return source.get("options", Document.class); + } + + private T getOptionValue(String key, Class type) { + return getOptions().get(key, type); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java new file mode 100644 index 0000000000..2407208fe0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java @@ -0,0 +1,112 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Method; + +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +/** + * Extension to consider tests that {@code @DirtiesState} and {@code @ProvidesState} through annotations. + * + * @author Mark Paluch + */ +public class DirtiesStateExtension implements BeforeEachCallback, AfterEachCallback { + + /** + * Test method that changes the data state by saving or deleting objects. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface DirtiesState { + + } + + /** + * Test method that sets up its state within the test method itself. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface ProvidesState { + + } + + /** + * Interface to be implemented by tests that make use of {@link DirtiesStateExtension}. + */ + public interface StateFunctions { + + /** + * Clear the state. + */ + void clear(); + + /** + * Setup the test fixture. + */ + void setupState(); + } + + static final String STATE_KEY = "state"; + + @Override + public void beforeEach(ExtensionContext context) throws Exception { + + Method method = context.getTestMethod().orElse(null); + Object instance = context.getTestInstance().orElse(null); + + if (method == null || instance == null) { + return; + } + + if (method.isAnnotationPresent(ProvidesState.class)) { + ((StateFunctions) instance).clear(); + return; + } + + ExtensionContext.Store mongo = getStore(context); + Boolean state = mongo.get(STATE_KEY, Boolean.class); + + if (state == null) { + + ((StateFunctions) instance).clear(); + ((StateFunctions) instance).setupState(); + mongo.put(STATE_KEY, true); + } + } + + private ExtensionContext.Store getStore(ExtensionContext context) { + return context.getParent().get() + .getStore(ExtensionContext.Namespace.create("mongo-" + context.getRequiredTestClass().getName())); + } + + @Override + public void afterEach(ExtensionContext context) throws Exception { + + Method method = context.getTestMethod().orElse(null); + + if (method == null) { + return; + } + + if (method.isAnnotationPresent(DirtiesState.class) || method.isAnnotationPresent(ProvidesState.class)) { + ExtensionContext.Store mongo = getStore(context); + mongo.remove(STATE_KEY); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java new file mode 100644 index 0000000000..d4360b4d95 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java @@ -0,0 +1,359 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import static org.assertj.core.error.ElementsShouldBe.*; +import static org.assertj.core.error.ShouldContain.*; +import static org.assertj.core.error.ShouldContainKeys.*; +import static org.assertj.core.error.ShouldNotContain.*; +import static org.assertj.core.error.ShouldNotContainKeys.*; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Consumer; + +import org.assertj.core.api.AbstractMapAssert; +import org.assertj.core.api.Condition; +import org.assertj.core.error.ShouldContainAnyOf; +import org.assertj.core.internal.Failures; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Assertions for Mongo's {@link Document}. Assertions based on keys/entries are translated to document paths allowing + * to assert nested elements. + * + *
                    + * 
                    + *  Document document = Document.parse("{ $set: { concreteInnerList: [ { foo: "bar", _class: … }] } }");
                    + *
                    + *  assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].foo").doesNotContainKey("$set.concreteInnerList.[0].bar");
                    + * 
                    + * 
                    + * + * @author Mark Paluch + */ +public class DocumentAssert extends AbstractMapAssert, String, Object> { + + private final Document actual; + + DocumentAssert(Document actual) { + + super(actual, DocumentAssert.class); + this.actual = actual; + } + + @Override + public DocumentAssert containsEntry(String key, Object value) { + + Assert.hasText(key, "The key to look for must not be empty"); + + Lookup lookup = lookup(key); + + if (!lookup.isPathFound() || !ObjectUtils.nullSafeEquals(value, lookup.getValue())) { + throw Failures.instance().failure(info, AssertErrors.shouldHaveProperty(actual, key, value)); + } + + return myself; + } + + /** + * Verifies that the actual value is equal to the given one by accepting the expected {@link Document} in its + * JSON/BSON representation. + *

                    + * Example: + * + *

                    +	 *  // assertions will pass
                    +	 * assertThat(Document.parse("{foo: 1}").isEqualTo("{foo: 1}");
                    +	 * 
                    + * + * @param expectedBson the given value to compare the actual value to in BSON/JSON format. + * @return {@code this} assertion object. + * @throws AssertionError if the actual value is not equal to the given one. + * @see Document#parse(String) + */ + public DocumentAssert isEqualTo(String expectedBson) { + + isEqualTo(Document.parse(expectedBson)); + return myself; + } + + @Override + public DocumentAssert doesNotContainEntry(String key, Object value) { + + Assert.hasText(key, "The key to look for must not be empty"); + + Lookup lookup = lookup(key); + + if (lookup.isPathFound() && ObjectUtils.nullSafeEquals(value, lookup.getValue())) { + throw Failures.instance().failure(info, AssertErrors.shouldNotHaveProperty(actual, key, value)); + } + + return myself; + } + + @Override + public DocumentAssert containsKey(String key) { + return containsKeys(key); + } + + @Override + protected DocumentAssert containsKeysForProxy(String[] keys) { + + Set notFound = new LinkedHashSet<>(); + + for (String key : keys) { + + if (!lookup(key).isPathFound()) { + notFound.add(key); + } + } + + if (!notFound.isEmpty()) { + throw Failures.instance().failure(info, shouldContainKeys(actual, notFound)); + } + + return myself; + } + + @Override + public DocumentAssert doesNotContainKey(String key) { + return doesNotContainKeys(key); + } + + @Override + protected DocumentAssert doesNotContainKeysForProxy(String[] keys) { + + Set found = new LinkedHashSet<>(); + for (String key : keys) { + + if (lookup(key).isPathFound()) { + found.add(key); + } + } + if (!found.isEmpty()) { + throw Failures.instance().failure(info, shouldNotContainKeys(actual, found)); + } + + return myself; + } + + // override methods to annotate them with @SafeVarargs, we unfortunately can't do that in AbstractMapAssert as it is + // used in soft assertions which need to be able to proxy method - @SafeVarargs requiring method to be final prevents + // using proxies. + + @Override + protected DocumentAssert containsForProxy(Entry[] entries) { + + // if both actual and values are empty, then assertion passes. + if (actual.isEmpty() && entries.length == 0) { + return myself; + } + Set> notFound = new LinkedHashSet<>(); + for (Map.Entry entry : entries) { + if (!containsEntry(entry)) { + notFound.add(entry); + } + } + if (!notFound.isEmpty()) { + throw Failures.instance().failure(info, shouldContain(actual, entries, notFound)); + } + + return myself; + } + + @Override + protected DocumentAssert containsAnyOfForProxy(Entry[] entries) { + for (Map.Entry entry : entries) { + if (containsEntry(entry)) { + return myself; + } + } + + throw Failures.instance().failure(info, ShouldContainAnyOf.shouldContainAnyOf(actual, entries)); + } + + @Override + protected DocumentAssert containsOnlyForProxy(Entry[] entries) { + throw new UnsupportedOperationException(); + } + + @Override + protected DocumentAssert doesNotContainForProxy(Entry[] entries) { + Set> found = new LinkedHashSet<>(); + + for (Map.Entry entry : entries) { + if (containsEntry(entry)) { + found.add(entry); + } + } + if (!found.isEmpty()) { + throw Failures.instance().failure(info, shouldNotContain(actual, entries, found)); + } + + return myself; + } + + @Override + protected DocumentAssert containsExactlyForProxy(Entry[] entries) { + throw new UnsupportedOperationException(); + } + + private boolean containsEntry(Entry entry) { + + Lookup lookup = lookup(entry.getKey()); + + return lookup.isPathFound() && ObjectUtils.nullSafeEquals(entry.getValue(), lookup.getValue()); + } + + private Lookup lookup(String path) { + return lookup(actual, path); + } + + @SuppressWarnings("unchecked") + private static Lookup lookup(Bson source, String path) { + + Document lookupDocument = (Document) source; + String pathToUse = path.replace("\\.", "."); + + if (lookupDocument.containsKey(pathToUse)) { + return Lookup.found((T) lookupDocument.get(pathToUse)); + } + + String[] fragments = path.split("(? it = Arrays.asList(fragments).iterator(); + + Object current = source; + while (it.hasNext()) { + + String key = it.next().replace("\\.", "."); + + if ((!(current instanceof Bson) && !(current instanceof Map)) && !key.startsWith("[")) { + return Lookup.notFound(); + } + + if (key.startsWith("[")) { + + String indexNumber = key.substring(1, key.indexOf("]")); + + if (current instanceof List) { + current = ((List) current).get(Integer.valueOf(indexNumber)); + } + + if (!it.hasNext()) { + return Lookup.found((T) current); + } + } else { + + if (current instanceof Document) { + + Document document = (Document) current; + + if (!it.hasNext() && !document.containsKey(key)) { + return Lookup.notFound(); + } + + current = document.get(key); + } + + else if (current instanceof Map) { + + Map document = (Map) current; + + if (!it.hasNext() && !document.containsKey(key)) { + return Lookup.notFound(); + } + + current = document.get(key); + } + + if (!it.hasNext()) { + return Lookup.found((T) current); + } + } + } + + return Lookup.notFound(); + } + + @Override + public DocumentAssert hasEntrySatisfying(String key, Condition valueCondition) { + + Lookup value = lookup(key); + + if (!value.isPathFound() || !valueCondition.matches(value.getValue())) { + throw Failures.instance().failure(info, elementsShouldBe(actual, value, valueCondition)); + } + + return myself; + } + + @Override + public DocumentAssert hasEntrySatisfying(String key, Consumer valueRequirements) { + + containsKey(key); + + valueRequirements.accept(lookup(key).getValue()); + + return myself; + } + + static class Lookup { + + private final T value; + private final boolean pathFound; + + private Lookup(T value, boolean pathFound) { + this.value = value; + this.pathFound = pathFound; + } + + /** + * Factory method to construct a lookup with a hit. + * + * @param value the actual value. + * @return the lookup object. + */ + static Lookup found(T value) { + return new Lookup<>(value, true); + } + + /** + * Factory method to construct a lookup that yielded no match. + * + * @return the lookup object. + */ + static Lookup notFound() { + return new Lookup<>(null, false); + } + + public T getValue() { + return this.value; + } + + public boolean isPathFound() { + return this.pathFound; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java new file mode 100644 index 0000000000..fa56d2c2a1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * {@code @EnableIfMongoServerVersion} is used to signal that the annotated test class or test method is only + * enabled if the value of the specified version boundaries {@link #isGreaterThanEqual()} and + * {@link #isLessThan()} match the connected MongoDB server version. + * + * @author Christoph Strobl + * @since 3.0 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@Tag("version-specific") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfMongoServerVersion { + + /** + * Inclusive lower bound of MongoDB server range. + * + * @return {@code 0.0.0} by default. + */ + String isGreaterThanEqual() default "0.0.0"; + + /** + * Exclusive upper bound of MongoDB server range. + * + * @return {@code 9999.9999.9999} by default. + */ + String isLessThan() default "9999.9999.9999"; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java new file mode 100644 index 0000000000..bc22e53569 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * {@link EnableIfReplicaSetAvailable} marks a specific test class or method to be only executed against a server + * running in replicaSet mode. Intended to be used along with {@link MongoServerCondition}. + * + * @author Christoph Strobl + * @since 3.0 + */ +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Tag("replSet") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfReplicaSetAvailable { + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Version.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java similarity index 52% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Version.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java index d96670675e..da008d9ee4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Version.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,27 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.core.mapping; - -import static java.lang.annotation.ElementType.*; -import static java.lang.annotation.RetentionPolicy.*; +package org.springframework.data.mongodb.test.util; import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + /** - * Demarcates a property to be used as version field to implement optimistic locking on entities. - * - * @since 1.4 - * @author Patryk Wasik - * @deprecated use {@link org.springframework.data.annotation.Version} instead. + * @author Christoph Strobl */ -@Deprecated +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Retention(RetentionPolicy.RUNTIME) @Documented -@Target({ FIELD }) -@Retention(RUNTIME) -@org.springframework.data.annotation.Version -public @interface Version { +@Tag("vector-search") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfVectorSearchAvailable { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java new file mode 100644 index 0000000000..894f2ec882 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java @@ -0,0 +1,34 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Christoph Strobl + * @see ClassPathExclusions + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) +public @interface ExcludeReactiveClientFromClassPath { + +} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java similarity index 62% rename from spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java index 33ff2a2bfa..eba7d228ef 100644 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2024-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.crossstore; +package org.springframework.data.mongodb.test.util; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * @author Thomas Risberg + * @author Christoph Strobl + * @see ClassPathExclusions */ @Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.FIELD }) -public @interface RelatedDocument { +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ClassPathExclusions(packages = { "com.mongodb.client" }) +public @interface ExcludeSyncClientFromClassPath { + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java deleted file mode 100644 index d42cec2845..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.test.util; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -import org.bson.BSONObject; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; -import org.hamcrest.core.IsEqual; -import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.util.ClassUtils; - -import com.mongodb.DBObject; - -/** - * @author Christoph Strobl - * @param - */ -public class IsBsonObject extends TypeSafeMatcher { - - private List expectations = new ArrayList();; - - public static IsBsonObject isBsonObject() { - return new IsBsonObject(); - } - - @Override - protected void describeMismatchSafely(T item, Description mismatchDescription) { - mismatchDescription.appendText("was ").appendValue(SerializationUtils.serializeToJsonSafely(item)); - } - - @Override - public void describeTo(Description description) { - - for (ExpectedBsonContent expectation : expectations) { - - if (expectation.not) { - description.appendText(String.format("Path %s should not be present.", expectation.path)); - } else if (expectation.value == null) { - description.appendText(String.format("Expected to find path %s.", expectation.path)); - } else { - description.appendText(String.format("Expected to find %s for path %s.", expectation.value, expectation.path)); - } - } - - } - - @Override - protected boolean matchesSafely(T item) { - - if (expectations.isEmpty()) { - return true; - } - - for (ExpectedBsonContent expectation : expectations) { - - Object o = getValue(item, expectation.path); - - if (o == null && expectation.not) { - return true; - } - - if (o == null) { - return false; - } - - if (expectation.type != null && !ClassUtils.isAssignable(expectation.type, o.getClass())) { - return false; - } - - if (expectation.value != null && !new IsEqual(expectation.value).matches(o)) { - return false; - } - - if (o != null && expectation.not) { - return false; - } - - } - return true; - } - - public IsBsonObject containing(String key) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject containing(String key, Class type) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.type = type; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject containing(String key, Object value) { - - if (value == null) { - return notContaining(key); - } - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.type = ClassUtils.getUserClass(value); - expected.value = value; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject notContaining(String key) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.not = true; - - this.expectations.add(expected); - return this; - } - - static class ExpectedBsonContent { - String path; - Class type; - Object value; - boolean not = false; - } - - Object getValue(BSONObject source, String path) { - - String[] fragments = path.split("(? it = Arrays.asList(fragments).iterator(); - - Object current = source; - while (it.hasNext()) { - - String key = it.next().replace("\\.", "."); - - if (!(current instanceof BSONObject) && !key.startsWith("[")) { - return null; - } - - if (key.startsWith("[")) { - String indexNumber = key.substring(1, key.indexOf("]")); - if (current instanceof List) { - current = ((List) current).get(Integer.valueOf(indexNumber)); - } - if (!it.hasNext()) { - return current; - } - } else { - - if (current instanceof DBObject) { - current = ((DBObject) current).get(key); - } - - if (!it.hasNext()) { - return current; - } - - } - } - - throw new NoSuchElementException(String.format("Unable to find '%s' in %s.", path, source)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java new file mode 100644 index 0000000000..15a0538600 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Utility to configure {@link org.springframework.data.mongodb.core.mapping.MongoMappingContext} properties. + * + * @author Christoph Strobl + */ +public class MappingContextConfigurer { + + private @Nullable Set> intitalEntitySet; + boolean autocreateIndex = false; + + public void autocreateIndex(boolean autocreateIndex) { + this.autocreateIndex = autocreateIndex; + } + + public void initialEntitySet(Set> initialEntitySet) { + this.intitalEntitySet = initialEntitySet; + } + + public void initialEntitySet(Class... initialEntitySet) { + this.intitalEntitySet = Set.of(initialEntitySet); + } + + Set> initialEntitySet() { + return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java new file mode 100644 index 0000000000..c5c23162d1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java @@ -0,0 +1,42 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import javax.annotation.PreDestroy; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * @author Christoph Strobl + */ +public abstract class MongoClientClosingTestConfiguration extends AbstractMongoClientConfiguration { + + @Autowired(required = false) MongoDatabaseFactory dbFactory; + + @PreDestroy + public void destroy() { + + if (dbFactory != null) { + Object mongo = ReflectionTestUtils.getField(dbFactory, "mongoClient"); + if (mongo != null) { + ReflectionTestUtils.invokeMethod(mongo, "close"); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java new file mode 100644 index 0000000000..357a87168e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java @@ -0,0 +1,196 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import static org.junit.platform.commons.util.AnnotationUtils.*; +import static org.junit.platform.commons.util.ReflectionUtils.*; + +import java.lang.reflect.Field; +import java.util.function.Predicate; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.Extension; +import org.junit.jupiter.api.extension.ExtensionConfigurationException; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.junit.platform.commons.util.ExceptionUtils; +import org.junit.platform.commons.util.ReflectionUtils; + +import org.springframework.util.ClassUtils; + +import com.mongodb.client.MongoClient; + +/** + * JUnit {@link Extension} providing parameter resolution for synchronous and reactive MongoDB client instances. + * + * @author Christoph Strobl + * @see Client + * @see ReplSetClient + */ +public class MongoClientExtension implements Extension, BeforeAllCallback, AfterAllCallback, ParameterResolver { + + private static final Log LOGGER = LogFactory.getLog(MongoClientExtension.class); + + private static final Namespace NAMESPACE = MongoExtensions.Client.NAMESPACE; + + private static final String SYNC_KEY = MongoExtensions.Client.SYNC_KEY; + private static final String REACTIVE_KEY = MongoExtensions.Client.REACTIVE_KEY; + private static final String SYNC_REPLSET_KEY = MongoExtensions.Client.SYNC_REPLSET_KEY; + private static final String REACTIVE_REPLSET_KEY = MongoExtensions.Client.REACTIVE_REPLSET_KEY; + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + injectFields(context, null, ReflectionUtils::isStatic); + } + + private void injectFields(ExtensionContext context, Object testInstance, Predicate predicate) { + + findAnnotatedFields(context.getRequiredTestClass(), Client.class, predicate).forEach(field -> { + assertValidFieldCandidate(field); + try { + makeAccessible(field).set(testInstance, getMongoClient(field.getType(), context, false)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + + findAnnotatedFields(context.getRequiredTestClass(), ReplSetClient.class, predicate).forEach(field -> { + assertValidFieldCandidate(field); + try { + makeAccessible(field).set(testInstance, getMongoClient(field.getType(), context, true)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + } + + protected Object getMongoClient(Class type, ExtensionContext extensionContext, boolean replSet) { + + Store store = extensionContext.getStore(NAMESPACE); + + if (ClassUtils.isAssignable(com.mongodb.client.MongoClient.class, type)) { + + LOGGER.debug("Obtaining sync client from store."); + return store.getOrComputeIfAbsent(replSet ? SYNC_REPLSET_KEY : SYNC_KEY, it -> syncClient(replSet), + SyncClientHolder.class).client; + } + + if (ClassUtils.isAssignable(com.mongodb.reactivestreams.client.MongoClient.class, type)) { + + LOGGER.debug("Obtaining reactive client from store."); + return store.getOrComputeIfAbsent(replSet ? REACTIVE_REPLSET_KEY : REACTIVE_KEY, key -> reactiveClient(replSet), + ReactiveClientHolder.class).client; + } + + throw new IllegalStateException("Damn - something went wrong."); + } + + private ReactiveClientHolder reactiveClient(boolean replSet) { + + LOGGER.debug(String.format("Creating new reactive %sclient.", replSet ? "replica set " : "")); + return new ReactiveClientHolder(replSet ? MongoTestUtils.reactiveReplSetClient() : MongoTestUtils.reactiveClient()); + } + + private SyncClientHolder syncClient(boolean replSet) { + + LOGGER.debug(String.format("Creating new sync %sclient.", replSet ? "replica set " : "")); + return new SyncClientHolder(replSet ? MongoTestUtils.replSetClient() : MongoTestUtils.client()); + } + + boolean holdsReplSetClient(ExtensionContext context) { + + Store store = context.getStore(NAMESPACE); + return store.get(SYNC_REPLSET_KEY) != null || store.get(REACTIVE_REPLSET_KEY) != null; + } + + private void assertValidFieldCandidate(Field field) { + + assertSupportedType("field", field.getType()); + } + + private void assertSupportedType(String target, Class type) { + + if (type != com.mongodb.client.MongoClient.class && type != com.mongodb.reactivestreams.client.MongoClient.class) { + throw new ExtensionConfigurationException(String.format( + "Can only resolve @MongoClient %s of type %s or %s but was: %s", target, MongoClient.class.getName(), + com.mongodb.reactivestreams.client.MongoClient.class.getName(), type.getName())); + } + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return parameterContext.isAnnotated(Client.class) || parameterContext.isAnnotated(ReplSetClient.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + + Class parameterType = parameterContext.getParameter().getType(); + boolean replSet = parameterContext.getParameter().getAnnotation(ReplSetClient.class) != null; + return getMongoClient(parameterType, extensionContext, replSet); + } + + static class SyncClientHolder implements Store.CloseableResource { + + final MongoClient client; + + SyncClientHolder(MongoClient client) { + this.client = client; + } + + @Override + public void close() { + try { + client.close(); + } catch (RuntimeException e) { + // so what? + } + } + } + + static class ReactiveClientHolder implements Store.CloseableResource { + + final com.mongodb.reactivestreams.client.MongoClient client; + + ReactiveClientHolder(com.mongodb.reactivestreams.client.MongoClient client) { + this.client = client; + } + + @Override + public void close() { + + try { + client.close(); + } catch (RuntimeException e) { + // so what? + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java new file mode 100644 index 0000000000..44b7ae3e45 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; + +/** + * Utility to configure {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +public class MongoConverterConfigurer { + + CustomConversions customConversions; + + public void customConversions(CustomConversions customConversions) { + this.customConversions = customConversions; + } + + public void customConverters(Converter... converters) { + customConversions(new MongoCustomConversions(Arrays.asList(converters))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java new file mode 100644 index 0000000000..c90f7e999b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; + +/** + * @author Christoph Strobl + */ +class MongoExtensions { + + static class Client { + + static final Namespace NAMESPACE = Namespace.create(MongoClientExtension.class); + static final String SYNC_KEY = "mongo.client.sync"; + static final String REACTIVE_KEY = "mongo.client.reactive"; + static final String SYNC_REPLSET_KEY = "mongo.client.replset.sync"; + static final String REACTIVE_REPLSET_KEY = "mongo.client.replset.reactive"; + } + + static class Termplate { + + static final Namespace NAMESPACE = Namespace.create(MongoTemplateExtension.class); + static final String SYNC = "mongo.template.sync"; + static final String REACTIVE = "mongo.template.reactive"; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java new file mode 100644 index 0000000000..d811e0a1ef --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java @@ -0,0 +1,97 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.junit.jupiter.api.extension.ConditionEvaluationResult; +import org.junit.jupiter.api.extension.ExecutionCondition; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.data.util.Version; + +/** + * @author Christoph Strobl + */ +public class MongoServerCondition implements ExecutionCondition { + + private static final Namespace NAMESPACE = Namespace.create("mongodb", "server"); + + private static final Version ANY = new Version(9999, 9999, 9999); + private static final Version DEFAULT_HIGH = ANY; + private static final Version DEFAULT_LOW = new Version(0, 0, 0); + + @Override + public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) { + + if (context.getTags().contains("replSet")) { + if (!serverIsPartOfReplicaSet(context)) { + return ConditionEvaluationResult.disabled("Disabled for servers not running in replicaSet mode."); + } + } + + if(context.getTags().contains("vector-search")) { + if(!atlasEnvironment(context)) { + return ConditionEvaluationResult.disabled("Disabled for servers not supporting Vector Search."); + } + } + + if (context.getTags().contains("version-specific") && context.getElement().isPresent()) { + + EnableIfMongoServerVersion version = AnnotatedElementUtils.findMergedAnnotation(context.getElement().get(), + EnableIfMongoServerVersion.class); + + Version serverVersion = serverVersion(context); + + if (version != null && !serverVersion.equals(ANY)) { + + Version expectedMinVersion = Version.parse(version.isGreaterThanEqual()); + if (!expectedMinVersion.equals(ANY) && !expectedMinVersion.equals(DEFAULT_LOW)) { + if (serverVersion.isLessThan(expectedMinVersion)) { + return ConditionEvaluationResult.disabled(String + .format("Disabled for server version %s; Requires at least %s.", serverVersion, expectedMinVersion)); + } + } + + Version expectedMaxVersion = Version.parse(version.isLessThan()); + if (!expectedMaxVersion.equals(ANY) && !expectedMaxVersion.equals(DEFAULT_HIGH)) { + if (serverVersion.isGreaterThanOrEqualTo(expectedMaxVersion)) { + return ConditionEvaluationResult.disabled(String + .format("Disabled for server version %s; Only supported until %s.", serverVersion, expectedMaxVersion)); + } + } + } + } + + return ConditionEvaluationResult.enabled("Enabled by default"); + } + + private boolean serverIsPartOfReplicaSet(ExtensionContext context) { + + return context.getStore(NAMESPACE).getOrComputeIfAbsent("--replSet", (key) -> MongoTestUtils.serverIsReplSet(), + Boolean.class); + } + + private Version serverVersion(ExtensionContext context) { + + return context.getStore(NAMESPACE).getOrComputeIfAbsent(Version.class, (key) -> MongoTestUtils.serverVersion(), + Version.class); + } + + private boolean atlasEnvironment(ExtensionContext context) { + return context.getStore(NAMESPACE).getOrComputeIfAbsent(Version.class, (key) -> MongoTestUtils.isVectorSearchEnabled(), + Boolean.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java new file mode 100644 index 0000000000..301d1ef499 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java @@ -0,0 +1,152 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.reflect.Field; +import java.util.List; +import java.util.function.Predicate; + +import org.junit.jupiter.api.extension.Extension; +import org.junit.jupiter.api.extension.ExtensionConfigurationException; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.TestInstancePostProcessor; +import org.junit.platform.commons.util.AnnotationUtils; +import org.junit.platform.commons.util.ExceptionUtils; +import org.junit.platform.commons.util.ReflectionUtils; +import org.junit.platform.commons.util.StringUtils; + +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.test.util.MongoExtensions.Termplate; +import org.springframework.data.util.ParsingUtils; +import org.springframework.util.ClassUtils; + +/** + * JUnit {@link Extension} providing parameter resolution for synchronous and reactive MongoDB Template API objects. + * + * @author Christoph Strobl + * @see Template + * @see MongoTestTemplate + * @see ReactiveMongoTestTemplate + */ +public class MongoTemplateExtension extends MongoClientExtension implements TestInstancePostProcessor { + + private static final String DEFAULT_DATABASE = "database"; + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + + super.beforeAll(context); + + injectFields(context, null, ReflectionUtils::isStatic); + } + + @Override + public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception { + injectFields(context, testInstance, it -> true); + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return super.supportsParameter(parameterContext, extensionContext) || parameterContext.isAnnotated(Template.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + + if (parameterContext.getParameter().getAnnotation(Template.class) == null) { + return super.resolveParameter(parameterContext, extensionContext); + } + + Class parameterType = parameterContext.getParameter().getType(); + return getMongoTemplate(parameterType, parameterContext.getParameter().getAnnotation(Template.class), + extensionContext); + } + + private void injectFields(ExtensionContext context, Object testInstance, Predicate predicate) { + + AnnotationUtils.findAnnotatedFields(context.getRequiredTestClass(), Template.class, predicate).forEach(field -> { + + assertValidFieldCandidate(field); + + try { + + ReflectionUtils.makeAccessible(field).set(testInstance, + getMongoTemplate(field.getType(), field.getAnnotation(Template.class), context)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + } + + private void assertValidFieldCandidate(Field field) { + + assertSupportedType("field", field.getType()); + } + + private void assertSupportedType(String target, Class type) { + + if (!ClassUtils.isAssignable(MongoOperations.class, type) + && !ClassUtils.isAssignable(ReactiveMongoOperations.class, type)) { + throw new ExtensionConfigurationException( + String.format("Can only resolve @%s %s of type %s or %s but was: %s", Template.class.getSimpleName(), target, + MongoOperations.class.getName(), ReactiveMongoOperations.class.getName(), type.getName())); + } + } + + private Object getMongoTemplate(Class type, Template options, ExtensionContext extensionContext) { + + Store templateStore = extensionContext.getStore(MongoExtensions.Termplate.NAMESPACE); + + boolean replSetClient = holdsReplSetClient(extensionContext) || options.replicaSet(); + + String dbName = StringUtils.isNotBlank(options.database()) ? options.database() + : extensionContext.getTestClass().map(it -> { + List target = ParsingUtils.splitCamelCaseToLower(it.getSimpleName()); + return org.springframework.util.StringUtils.collectionToDelimitedString(target, "-"); + }).orElse(DEFAULT_DATABASE); + + if (ClassUtils.isAssignable(MongoOperations.class, type)) { + + String key = Termplate.SYNC + "-" + dbName; + return templateStore.getOrComputeIfAbsent(key, it -> { + + com.mongodb.client.MongoClient client = (com.mongodb.client.MongoClient) getMongoClient( + com.mongodb.client.MongoClient.class, extensionContext, replSetClient); + return new MongoTestTemplate(client, dbName, options.initialEntitySet()); + }); + } + + if (ClassUtils.isAssignable(ReactiveMongoOperations.class, type)) { + + String key = Termplate.REACTIVE + "-" + dbName; + return templateStore.getOrComputeIfAbsent(key, it -> { + + com.mongodb.reactivestreams.client.MongoClient client = (com.mongodb.reactivestreams.client.MongoClient) getMongoClient( + com.mongodb.reactivestreams.client.MongoClient.class, extensionContext, replSetClient); + return new ReactiveMongoTestTemplate(client, dbName, options.initialEntitySet()); + }); + } + + throw new IllegalStateException("Damn - something went wrong."); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java new file mode 100644 index 0000000000..b65d6278fe --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java @@ -0,0 +1,91 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Collections; +import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.MethodInvocationRecorder; + +/** + * @author Christoph Strobl + */ +public class MongoTestMappingContext extends MongoMappingContext { + + private MappingContextConfigurer contextConfigurer; + private MongoConverterConfigurer converterConfigurer; + + public static MongoTestMappingContext newTestContext() { + return new MongoTestMappingContext(conig -> {}).init(); + } + + public MongoTestMappingContext(MappingContextConfigurer contextConfig) { + + this.contextConfigurer = contextConfig; + this.converterConfigurer = new MongoConverterConfigurer(); + } + + public MongoTestMappingContext(Consumer contextConfig) { + + this(new MappingContextConfigurer()); + contextConfig.accept(contextConfigurer); + } + + public MongoPersistentProperty getPersistentPropertyFor(Class type, Function property) { + + MongoPersistentEntity persistentEntity = getRequiredPersistentEntity(type); + return persistentEntity.getPersistentProperty(MethodInvocationRecorder.forProxyOf(type).record(property).getPropertyPath().get()); + } + + public MongoTestMappingContext customConversions(MongoConverterConfigurer converterConfig) { + + this.converterConfigurer = converterConfig; + return this; + } + + public MongoTestMappingContext customConversions(Consumer converterConfig) { + + converterConfig.accept(converterConfigurer); + return this; + } + + public MongoTestMappingContext init() { + + setInitialEntitySet(contextConfigurer.initialEntitySet()); + setAutoIndexCreation(contextConfigurer.autocreateIndex); + if (converterConfigurer.customConversions != null) { + setSimpleTypeHolder(converterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + } + + super.afterPropertiesSet(); + return this; + } + + @Override + public void afterPropertiesSet() { + init(); + } + + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java new file mode 100644 index 0000000000..40948a0e22 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java @@ -0,0 +1,179 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +import com.mongodb.MongoWriteException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; + +/** + * A {@link MongoTemplate} with configuration hooks and extension suitable for tests. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class MongoTestTemplate extends MongoTemplate { + + private final MongoTestTemplateConfiguration cfg; + + public MongoTestTemplate(MongoClient client, String database, Class... initialEntities) { + this(cfg -> { + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(database); + }); + + cfg.configureMappingContext(it -> { + + it.autocreateIndex(false); + it.initialEntitySet(initialEntities); + }); + }); + } + + public MongoTestTemplate(Consumer cfg) { + + this(() -> { + + MongoTestTemplateConfiguration config = new MongoTestTemplateConfiguration(); + cfg.accept(config); + return config; + }); + } + + public MongoTestTemplate(Supplier config) { + this(config.get()); + } + + public MongoTestTemplate(MongoTestTemplateConfiguration config) { + super(config.databaseFactory(), config.mongoConverter()); + + ApplicationContext applicationContext = config.getApplicationContext(); + EntityCallbacks callbacks = config.getEntityCallbacks(); + if (callbacks != null) { + setEntityCallbacks(callbacks); + } + if (applicationContext != null) { + setApplicationContext(applicationContext); + } + + this.cfg = config; + } + + public void flush() { + flush(PersistentEntities.of(getConverter().getMappingContext()).stream().map(it -> getCollectionName(it.getType())) + .collect(Collectors.toList())); + } + + public void flushDatabase() { + flush(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(getDb()).listCollectionNames()); + } + + public void flush(Iterable collections) { + + for (String collection : collections) { + MongoCollection mongoCollection = getCollection(collection); + try { + mongoCollection.deleteMany(new Document()); + } catch (MongoWriteException e) { + mongoCollection.drop(); + } + } + } + + public void flush(Class... entities) { + flush(Arrays.stream(entities).map(this::getCollectionName).collect(Collectors.toList())); + } + + public void flush(String... collections) { + flush(Arrays.asList(collections)); + } + + public void flush(Object... objects) { + + flush(Arrays.stream(objects).map(it -> { + + if (it instanceof String) { + return (String) it; + } + if (it instanceof Class) { + return getCollectionName((Class) it); + } + return it.toString(); + }).collect(Collectors.toList())); + } + + public void dropDatabase() { + getDb().drop(); + } + + public void dropIndexes(String... collections) { + for (String collection : collections) { + getCollection(collection).dropIndexes(); + } + } + + public void dropIndexes(Class... entities) { + for (Class entity : entities) { + getCollection(getCollectionName(entity)).dropIndexes(); + } + } + + public void doInCollection(Class entityClass, Consumer> callback) { + execute(entityClass, (collection -> { + callback.accept(collection); + return null; + })); + } + + public void awaitIndexCreation(Class type, String indexName) { + awaitIndexCreation(getCollectionName(type), indexName, Duration.ofSeconds(10)); + } + + public void awaitIndexCreation(String collectionName, String indexName, Duration timeout) { + + Awaitility.await().atMost(timeout).pollInterval(Duration.ofMillis(200)).until(() -> { + + List execute = this.execute(collectionName, + coll -> coll + .aggregate(List.of(Document.parse("{'$listSearchIndexes': { 'name' : '%s'}}".formatted(indexName)))) + .into(new ArrayList<>())); + for (Document doc : execute) { + if (doc.getString("name").equals(indexName)) { + return doc.getString("status").equals("READY"); + } + } + return false; + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java new file mode 100644 index 0000000000..09149c02ef --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -0,0 +1,269 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + * @since 3.0 + */ +public class MongoTestTemplateConfiguration { + + private final DatabaseFactoryConfigurer dbFactoryConfig = new DatabaseFactoryConfigurer(); + private final MappingContextConfigurer mappingContextConfigurer = new MappingContextConfigurer(); + private final MongoConverterConfigurer mongoConverterConfigurer = new MongoConverterConfigurer(); + private final AuditingConfigurer auditingConfigurer = new AuditingConfigurer(); + private final ApplicationContextConfigurer applicationContextConfigurer = new ApplicationContextConfigurer(); + + private MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private ApplicationContext context; + + private com.mongodb.client.MongoClient syncClient; + private com.mongodb.reactivestreams.client.MongoClient reactiveClient; + private MongoDatabaseFactory syncFactory; + private SimpleReactiveMongoDatabaseFactory reactiveFactory; + + MongoConverter mongoConverter() { + + if (converter == null) { + + if (dbFactoryConfig.syncClient != null || syncClient != null) { + converter = new MappingMongoConverter(new DefaultDbRefResolver(databaseFactory()), mappingContext()); + } else { + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext()); + } + + if (mongoConverterConfigurer.customConversions != null) { + converter.setCustomConversions(mongoConverterConfigurer.customConversions); + } + if (auditingConfigurer.hasAuditingHandler()) { + converter.setEntityCallbacks(getEntityCallbacks()); + } + converter.afterPropertiesSet(); + } + + return converter; + } + + EntityCallbacks getEntityCallbacks() { + + EntityCallbacks callbacks = null; + if (getApplicationContext() != null) { + callbacks = EntityCallbacks.create(getApplicationContext()); + } + if (!auditingConfigurer.hasAuditingHandler()) { + return callbacks; + } + if (callbacks == null) { + callbacks = EntityCallbacks.create(); + } + + callbacks.addEntityCallback(new AuditingEntityCallback(new ObjectFactory() { + @Override + public IsNewAwareAuditingHandler getObject() throws BeansException { + return auditingConfigurer.auditingHandlerFunction.apply(converter.getMappingContext()); + } + })); + return callbacks; + + } + + List> getApplicationEventListener() { + return new ArrayList<>(applicationContextConfigurer.listeners); + } + + @Nullable + ApplicationContext getApplicationContext() { + + if (applicationContextConfigurer.applicationContext == null) { + return null; + } + + if (context != null) { + return context; + } + + context = applicationContextConfigurer.applicationContext; + + if (context instanceof ConfigurableApplicationContext) { + + ConfigurableApplicationContext configurableApplicationContext = (ConfigurableApplicationContext) this.context; + getApplicationEventListener().forEach(configurableApplicationContext::addApplicationListener); + + configurableApplicationContext.refresh(); + } + return context; + } + + MongoMappingContext mappingContext() { + + if (mappingContext == null) { + mappingContext = new MongoTestMappingContext(mappingContextConfigurer).customConversions(mongoConverterConfigurer) + .init(); + } + + return mappingContext; + } + + MongoDatabaseFactory databaseFactory() { + + if (syncFactory == null) { + syncFactory = new SimpleMongoClientDatabaseFactory(syncClient(), defaultDatabase()); + } + + return syncFactory; + } + + ReactiveMongoDatabaseFactory reactiveDatabaseFactory() { + + if (reactiveFactory == null) { + reactiveFactory = new SimpleReactiveMongoDatabaseFactory(reactiveClient(), defaultDatabase()); + } + + return reactiveFactory; + } + + public MongoTestTemplateConfiguration configureDatabaseFactory(Consumer dbFactory) { + + dbFactory.accept(dbFactoryConfig); + return this; + } + + public MongoTestTemplateConfiguration configureMappingContext( + Consumer mappingContextConfigurerConsumer) { + mappingContextConfigurerConsumer.accept(mappingContextConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureApplicationContext( + Consumer applicationContextConfigurerConsumer) { + + applicationContextConfigurerConsumer.accept(applicationContextConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureAuditing(Consumer auditingConfigurerConsumer) { + + auditingConfigurerConsumer.accept(auditingConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureConversion( + Consumer mongoConverterConfigurerConsumer) { + + mongoConverterConfigurerConsumer.accept(mongoConverterConfigurer); + return this; + } + + com.mongodb.client.MongoClient syncClient() { + + if (syncClient == null) { + syncClient = dbFactoryConfig.syncClient != null ? dbFactoryConfig.syncClient : MongoTestUtils.client(); + } + + return syncClient; + } + + com.mongodb.reactivestreams.client.MongoClient reactiveClient() { + + if (reactiveClient == null) { + reactiveClient = dbFactoryConfig.reactiveClient != null ? dbFactoryConfig.reactiveClient + : MongoTestUtils.reactiveClient(); + } + + return reactiveClient; + } + + String defaultDatabase() { + return dbFactoryConfig.defaultDatabase != null ? dbFactoryConfig.defaultDatabase : "test"; + } + + public static class DatabaseFactoryConfigurer { + + com.mongodb.client.MongoClient syncClient; + com.mongodb.reactivestreams.client.MongoClient reactiveClient; + String defaultDatabase; + + public void client(com.mongodb.client.MongoClient client) { + this.syncClient = client; + } + + public void client(com.mongodb.reactivestreams.client.MongoClient client) { + this.reactiveClient = client; + } + + public void defaultDb(String defaultDatabase) { + this.defaultDatabase = defaultDatabase; + } + } + + public static class AuditingConfigurer { + + Function auditingHandlerFunction; + + public void auditingHandler(Function auditingHandlerFunction) { + this.auditingHandlerFunction = auditingHandlerFunction; + } + + IsNewAwareAuditingHandler auditingHandlers(MongoMappingContext mongoMappingContext) { + return auditingHandlerFunction.apply(mongoMappingContext); + } + + boolean hasAuditingHandler() { + return auditingHandlerFunction != null; + } + } + + public static class ApplicationContextConfigurer { + + List>> listeners = new ArrayList<>(); + ApplicationContext applicationContext; + + public void applicationContext(ApplicationContext context) { + this.applicationContext = context; + } + + public void addEventListener(ApplicationListener> listener) { + this.listeners.add(listener); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java new file mode 100644 index 0000000000..f88caf80dd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java @@ -0,0 +1,308 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; +import reactor.util.retry.Retry; + +import java.time.Duration; +import java.util.List; + +import org.bson.Document; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.data.util.Version; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Utility to create (and reuse) imperative and reactive {@code MongoClient} instances. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +public class MongoTestUtils { + + private static final Environment ENV = new StandardEnvironment(); + private static final Duration DEFAULT_TIMEOUT = Duration.ofMillis(10); + + public static final String CONNECTION_STRING = "mongodb://127.0.0.1:27017/?replicaSet=rs0&w=majority&uuidrepresentation=javaLegacy"; + + private static final String CONNECTION_STRING_PATTERN = "mongodb://%s:%s/?w=majority&uuidrepresentation=javaLegacy"; + + private static final Version ANY = new Version(9999, 9999, 9999); + + /** + * Create a new {@link com.mongodb.client.MongoClient} with defaults. + * + * @return new instance of {@link com.mongodb.client.MongoClient}. + */ + public static MongoClient client() { + return client("127.0.0.1", 27017); + } + + public static MongoClient client(String host, int port) { + return client(new ConnectionString(String.format(CONNECTION_STRING_PATTERN, host, port))); + } + + public static MongoClient client(ConnectionString connectionString) { + return com.mongodb.client.MongoClients.create(connectionString, SpringDataMongoDB.driverInformation()); + } + + /** + * Create a new {@link com.mongodb.reactivestreams.client.MongoClient} with defaults. + * + * @return new instance of {@link com.mongodb.reactivestreams.client.MongoClient}. + */ + public static com.mongodb.reactivestreams.client.MongoClient reactiveClient() { + return reactiveClient("127.0.0.1", 27017); + } + + public static com.mongodb.reactivestreams.client.MongoClient reactiveClient(String host, int port) { + + ConnectionString connectionString = new ConnectionString(String.format(CONNECTION_STRING_PATTERN, host, port)); + return MongoClients.create(connectionString, SpringDataMongoDB.driverInformation()); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static MongoCollection createOrReplaceCollection(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + MongoDatabase database = client.getDatabase(dbName).withWriteConcern(WriteConcern.MAJORITY) + .withReadPreference(ReadPreference.primary()); + + boolean collectionExists = database.listCollections().filter(new Document("name", collectionName)).first() != null; + + if (collectionExists) { + + database.getCollection(collectionName).drop(); + giveTheServerALittleTimeToThink(); + } + + database.createCollection(collectionName); + giveTheServerALittleTimeToThink(); + + return database.getCollection(collectionName); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static Mono createOrReplaceCollection(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + return Mono.from(database.getCollection(collectionName).drop()) // + .delayElement(getTimeout()) // server replication time + .then(Mono.from(database.createCollection(collectionName))) // + .delayElement(getTimeout()); // server replication time + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void createOrReplaceCollectionNow(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + createOrReplaceCollection(dbName, collectionName, client) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + Mono.from(database.getCollection(collectionName).drop()) // + .delayElement(getTimeout()).retryWhen(Retry.backoff(3, Duration.ofMillis(250))) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).drop(); + } + + /** + * Remove all documents from the {@link MongoCollection} with given name in the according {@link MongoDatabase + * database}. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void flushCollection(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + Mono.from(database.getCollection(collectionName).deleteMany(new Document())) // + .delayElement(getTimeout()).then() // + .as(StepVerifier::create) // + .verifyComplete(); + } + + public static void flushCollection(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).deleteMany(new Document()); + } + + /** + * Create a new {@link com.mongodb.client.MongoClient} with defaults suitable for replica set usage. + * + * @return new instance of {@link com.mongodb.client.MongoClient}. + */ + public static com.mongodb.client.MongoClient replSetClient() { + return com.mongodb.client.MongoClients.create(CONNECTION_STRING); + } + + /** + * Create a new {@link com.mongodb.reactivestreams.client.MongoClient} with defaults suitable for replica set usage. + * + * @return new instance of {@link com.mongodb.reactivestreams.client.MongoClient}. + */ + public static com.mongodb.reactivestreams.client.MongoClient reactiveReplSetClient() { + return MongoClients.create(CONNECTION_STRING); + } + + /** + * @return the server version extracted from buildInfo. + * @since 3.0 + */ + public static Version serverVersion() { + + try (MongoClient client = client()) { + + MongoDatabase database = client.getDatabase("test"); + Document result = database.runCommand(new Document("buildInfo", 1)); + + return Version.parse(result.get("version", String.class)); + } catch (Exception e) { + return ANY; + } + } + + /** + * @return check if the server is running as part of a replica set. + * @since 3.0 + */ + public static boolean serverIsReplSet() { + + try (MongoClient client = MongoTestUtils.client()) { + + return client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .contains("--replSet"); + } catch (Exception e) { + return false; + } + } + + @SuppressWarnings("unchecked") + public static boolean isVectorSearchEnabled() { + try (MongoClient client = MongoTestUtils.client()) { + + return client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .stream().anyMatch(it -> { + if(it instanceof String cfgString) { + return cfgString.startsWith("searchIndexManagementHostAndPort"); + } + return false; + }); + } catch (Exception e) { + return false; + } + } + + public static Duration getTimeout() { + + return ObjectUtils.nullSafeEquals("jenkins", ENV.getProperty("user.name")) ? Duration.ofMillis(100) + : DEFAULT_TIMEOUT; + } + + private static void giveTheServerALittleTimeToThink() { + + try { + Thread.sleep(getTimeout().toMillis()); // server replication time + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + public static CollectionInfo readCollectionInfo(MongoDatabase db, String collectionName) { + + List list = db.runCommand(new Document().append("listCollections", 1).append("filter", new Document("name", collectionName))) + .get("cursor", Document.class).get("firstBatch", List.class); + + if(list.isEmpty()) { + throw new IllegalStateException(String.format("Collection %s not found.", collectionName)); + } + return CollectionInfo.from(list.get(0)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java new file mode 100644 index 0000000000..c632d0326e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java @@ -0,0 +1,56 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * {@link MongoVersion} allows specifying an version range of mongodb that is applicable for a specific test method. To + * be used along with {@link MongoVersionRule} or {@link MongoServerCondition}. + * + * @author Christoph Strobl + * @since 2.1 + * @deprecated Use {@link EnableIfMongoServerVersion} instead. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@EnableIfMongoServerVersion +@Deprecated +public @interface MongoVersion { + + /** + * Inclusive lower bound of MongoDB server range. + * + * @return {@code 0.0.0} by default. + */ + @AliasFor(annotation = EnableIfMongoServerVersion.class, attribute = "isGreaterThanEqual") + String asOf() default "0.0.0"; + + /** + * Exclusive upper bound of MongoDB server range. + * + * @return {@code 9999.9999.9999} by default. + */ + @AliasFor(annotation = EnableIfMongoServerVersion.class, attribute = "isLessThan") + String until() default "9999.9999.9999"; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java index 8d4feffa1a..a680d33a32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,95 +15,183 @@ */ package org.springframework.data.mongodb.test.util; +import java.util.concurrent.atomic.AtomicReference; + +import org.bson.Document; +import org.junit.AssumptionViolatedException; import org.junit.ClassRule; import org.junit.Rule; -import org.junit.internal.AssumptionViolatedException; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; import org.springframework.data.util.Version; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.BasicDBObjectBuilder; -import com.mongodb.CommandResult; -import com.mongodb.DB; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; /** * {@link TestRule} verifying server tests are executed against match a given version. This one can be used as - * {@link ClassRule} eg. in context depending tests run with {@link SpringJUnit4ClassRunner} when the context would fail - * to start in case of invalid version, or as simple {@link Rule} on specific tests. - * + * {@link ClassRule} eg. in context depending tests run with {@link SpringRunner} when the context would fail to start + * in case of invalid version, or as simple {@link Rule} on specific tests. + * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 + * @deprecated Use {@link MongoServerCondition} instead. */ +@Deprecated public class MongoVersionRule implements TestRule { - private String host = "localhost"; - private int port = 27017; + private static final Version ANY = new Version(9999, 9999, 9999); + private static final Version DEFAULT_HIGH = ANY; + private static final Version DEFAULT_LOW = new Version(0, 0, 0); + public static MongoVersionRule REQUIRES_4_2 = MongoVersionRule + .atLeast(org.springframework.data.util.Version.parse("4.2")); + + private final AtomicReference currentVersion = new AtomicReference<>(null); private final Version minVersion; private final Version maxVersion; - private Version currentVersion; + private String host = "127.0.0.1"; + private int port = 27017; public MongoVersionRule(Version min, Version max) { + this.minVersion = min; this.maxVersion = max; } public static MongoVersionRule any() { - return new MongoVersionRule(new Version(0, 0, 0), new Version(9999, 9999, 9999)); + return new MongoVersionRule(ANY, ANY); } public static MongoVersionRule atLeast(Version minVersion) { - return new MongoVersionRule(minVersion, new Version(9999, 9999, 9999)); + return new MongoVersionRule(minVersion, DEFAULT_HIGH); } public static MongoVersionRule atMost(Version maxVersion) { - return new MongoVersionRule(new Version(0, 0, 0), maxVersion); + return new MongoVersionRule(DEFAULT_LOW, maxVersion); } public MongoVersionRule withServerRunningAt(String host, int port) { + this.host = host; this.port = port; return this; } + /** + * @see Version#isGreaterThan(Version) + */ + public boolean isGreaterThan(Version version) { + return getCurrentVersion().isGreaterThan(version); + } + + /** + * @see Version#isGreaterThanOrEqualTo(Version) + */ + public boolean isGreaterThanOrEqualTo(Version version) { + return getCurrentVersion().isGreaterThanOrEqualTo(version); + } + + /** + * @see Version#is(Version) + */ + public boolean is(Version version) { + return getCurrentVersion().equals(version); + } + + /** + * @see Version#isLessThan(Version) + */ + public boolean isLessThan(Version version) { + return getCurrentVersion().isLessThan(version); + } + + /** + * @see Version#isLessThanOrEqualTo(Version) + */ + public boolean isLessThanOrEqualTo(Version version) { + return getCurrentVersion().isGreaterThanOrEqualTo(version); + } + @Override public Statement apply(final Statement base, Description description) { - initCurrentVersion(); return new Statement() { @Override public void evaluate() throws Throwable { - if (currentVersion != null) { - if (currentVersion.isLessThan(minVersion) || currentVersion.isGreaterThan(maxVersion)) { - throw new AssumptionViolatedException(String.format( - "Expected mongodb server to be in range %s to %s but found %s", minVersion, maxVersion, currentVersion)); + + if (!getCurrentVersion().equals(ANY)) { + + Version minVersion = MongoVersionRule.this.minVersion.equals(ANY) ? DEFAULT_LOW + : MongoVersionRule.this.minVersion; + Version maxVersion = MongoVersionRule.this.maxVersion.equals(ANY) ? DEFAULT_HIGH + : MongoVersionRule.this.maxVersion; + + if (description.getAnnotation(MongoVersion.class) != null) { + MongoVersion version = description.getAnnotation(MongoVersion.class); + if (version != null) { + + Version expectedMinVersion = Version.parse(version.asOf()); + if (!expectedMinVersion.equals(ANY) && !expectedMinVersion.equals(DEFAULT_LOW)) { + minVersion = expectedMinVersion; + } + + Version expectedMaxVersion = Version.parse(version.until()); + if (!expectedMaxVersion.equals(ANY) && !expectedMaxVersion.equals(DEFAULT_HIGH)) { + maxVersion = expectedMaxVersion; + } + } } + + validateVersion(minVersion, maxVersion); } + base.evaluate(); } }; } - private void initCurrentVersion() { - - if (currentVersion == null) { - try { - MongoClient client; - client = new MongoClient(host, port); - DB db = client.getDB("test"); - CommandResult result = db.command(new BasicDBObjectBuilder().add("buildInfo", 1).get()); - this.currentVersion = Version.parse(result.get("version").toString()); - } catch (Exception e) { - e.printStackTrace(); - } + private void validateVersion(Version min, Version max) { + + if (getCurrentVersion().isLessThan(min) || getCurrentVersion().isGreaterThanOrEqualTo(max)) { + + throw new AssumptionViolatedException( + String.format("Expected MongoDB server to be in range (%s, %s] but found %s", min, max, currentVersion)); + } + + } + + private Version getCurrentVersion() { + + if (currentVersion.get() == null) { + currentVersion.compareAndSet(null, fetchCurrentVersion()); } + return currentVersion.get(); + } + + private Version fetchCurrentVersion() { + + try { + + MongoClient client = MongoTestUtils.client(host, port); + MongoDatabase database = client.getDatabase("test"); + Document result = database.runCommand(new Document("buildInfo", 1)); + + return Version.parse(result.get("version", String.class)); + } catch (Exception e) { + return ANY; + } } + @Override + public String toString() { + return getCurrentVersion().toString(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java new file mode 100644 index 0000000000..caec182aad --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java @@ -0,0 +1,142 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.io.File; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Method; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collector; +import java.util.stream.Stream; + +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.util.ClassUtils; + +/** + * Simplified version of ModifiedClassPathClassLoader. + * + * @author Christoph Strobl + */ +class PackageExcludingClassLoader extends URLClassLoader { + + private final Set excludedPackages; + private final ClassLoader junitLoader; + + PackageExcludingClassLoader(URL[] urls, ClassLoader parent, Collection excludedPackages, + ClassLoader junitClassLoader) { + + super(urls, parent); + this.excludedPackages = Set.copyOf(excludedPackages); + this.junitLoader = junitClassLoader; + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + + if (name.startsWith("org.junit") || name.startsWith("org.hamcrest")) { + return Class.forName(name, false, this.junitLoader); + } + + String packageName = ClassUtils.getPackageName(name); + if (this.excludedPackages.contains(packageName)) { + throw new ClassNotFoundException(name); + } + return super.loadClass(name); + } + + static PackageExcludingClassLoader get(Class testClass, Method testMethod) { + + List excludedPackages = readExcludedPackages(testClass, testMethod); + + if (excludedPackages.isEmpty()) { + return null; + } + + ClassLoader testClassClassLoader = testClass.getClassLoader(); + Stream urls = null; + if (testClassClassLoader instanceof URLClassLoader urlClassLoader) { + urls = Stream.of(urlClassLoader.getURLs()); + } else { + urls = Stream.of(ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator)) + .map(PackageExcludingClassLoader::toURL); + } + + return new PackageExcludingClassLoader(urls.toArray(URL[]::new), testClassClassLoader.getParent(), excludedPackages, + testClassClassLoader); + } + + private static List readExcludedPackages(Class testClass, Method testMethod) { + + return Stream.of( // + AnnotatedElementUtils.findMergedAnnotation(testClass, ClassPathExclusions.class), + AnnotatedElementUtils.findMergedAnnotation(testMethod, ClassPathExclusions.class) // + ).filter(Objects::nonNull) // + .map(ClassPathExclusions::packages) // + .collect(new CombingArrayCollector()); + } + + private static URL toURL(String entry) { + try { + return new File(entry).toURI().toURL(); + } catch (Exception ex) { + throw new IllegalArgumentException(ex); + } + } + + private static class CombingArrayCollector implements Collector, List> { + + @Override + public Supplier> supplier() { + return ArrayList::new; + } + + @Override + public BiConsumer, T[]> accumulator() { + return (target, values) -> target.addAll(Arrays.asList(values)); + } + + @Override + public BinaryOperator> combiner() { + return (r1, r2) -> { + r1.addAll(r2); + return r1; + }; + } + + @Override + public Function, List> finisher() { + return i -> (List) i; + } + + @Override + public Set characteristics() { + return Collections.unmodifiableSet(EnumSet.of(Characteristics.IDENTITY_FINISH)); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java new file mode 100644 index 0000000000..bc61f81625 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java @@ -0,0 +1,44 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import javax.annotation.PreDestroy; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + */ +public abstract class ReactiveMongoClientClosingTestConfiguration extends AbstractReactiveMongoConfiguration { + + @Autowired(required = false) ReactiveMongoDatabaseFactory dbFactory; + + @PreDestroy + public void destroy() { + + if (dbFactory != null) { + Object mongo = ReflectionTestUtils.getField(dbFactory, "mongo"); + if (mongo != null) { + ((MongoClient) mongo).close(); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java new file mode 100644 index 0000000000..9955daa98e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java @@ -0,0 +1,147 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Arrays; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.context.ApplicationContext; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * A {@link ReactiveMongoTemplate} with configuration hooks and extension suitable for tests. + * + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 3.0 + */ +public class ReactiveMongoTestTemplate extends ReactiveMongoTemplate { + + private final MongoTestTemplateConfiguration cfg; + + public ReactiveMongoTestTemplate(MongoClient client, String database, Class... initialEntities) { + this(cfg -> { + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(database); + }); + + cfg.configureMappingContext(it -> { + + it.autocreateIndex(false); + it.initialEntitySet(initialEntities); + }); + }); + } + + public ReactiveMongoTestTemplate(Consumer cfg) { + + this(new Supplier() { + @Override + public MongoTestTemplateConfiguration get() { + + MongoTestTemplateConfiguration config = new MongoTestTemplateConfiguration(); + cfg.accept(config); + return config; + } + }); + } + + public ReactiveMongoTestTemplate(Supplier config) { + this(config.get()); + } + + public ReactiveMongoTestTemplate(MongoTestTemplateConfiguration config) { + super(config.reactiveDatabaseFactory(), config.mongoConverter()); + + ApplicationContext applicationContext = config.getApplicationContext(); + if (applicationContext != null) { + setApplicationContext(applicationContext); + } + + this.cfg = config; + } + + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return cfg.reactiveDatabaseFactory(); + } + + public Mono flush() { + return flush(Flux.fromStream( + PersistentEntities.of(getConverter().getMappingContext()).stream().map(it -> getCollectionName(it.getType())))); + } + + public Mono flushDatabase() { + return flush(getMongoDatabase().flatMapMany(MongoDatabase::listCollectionNames)); + } + + public Mono flush(Class... entities) { + return flush(Flux.fromStream(Arrays.asList(entities).stream().map(this::getCollectionName))); + } + + public Mono flush(String... collections) { + return flush(Flux.fromArray(collections)); + } + + public Mono flush(Publisher collectionNames) { + + return Flux.from(collectionNames) + .flatMap(collection -> getCollection(collection).flatMapMany(it -> it.deleteMany(new Document())).then() + .onErrorResume(it -> getCollection(collection).flatMapMany(MongoCollection::drop).then())) + .then(); + } + + public Mono flush(Object... objects) { + + return flush(Flux.fromStream(Arrays.asList(objects).stream().map(it -> { + + if (it instanceof String) { + return (String) it; + } + if (it instanceof Class) { + return getCollectionName((Class) it); + } + return it.toString(); + }))); + } + + public Mono dropDatabase() { + return getMongoDatabase().map(MongoDatabase::drop).then(); + } + + public Mono dropIndexes(String... collections) { + return Flux.fromArray(collections).flatMap(it -> getCollection(it).map(MongoCollection::dropIndexes).then()).then(); + } + + public Mono dropIndexes(Class... entities) { + return Flux.fromArray(entities) + .flatMap(it -> getCollection(getCollectionName(it)).map(MongoCollection::dropIndexes).then()).then(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java new file mode 100644 index 0000000000..8342c5b5ee --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java @@ -0,0 +1,39 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Marks a field or method as to be autowired by JUnit's dependency injection facilities for injection of a MongoDB + * client instance connected to a replica set. Depends on {@link MongoClientExtension}. + * + * @author Christoph Strobl + * @see com.mongodb.client.MongoClient + * @see com.mongodb.reactivestreams.client.MongoClient + * @see Client + * @see MongoClientExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface ReplSetClient { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java new file mode 100644 index 0000000000..d2b770b818 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java @@ -0,0 +1,107 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import org.bson.Document; +import org.junit.AssumptionViolatedException; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; +import org.springframework.test.annotation.IfProfileValue; + +import com.mongodb.client.MongoClient; + +/** + * {@link TestRule} evaluating if MongoDB Server is running with {@code --replSet} flag. + * + * @author Christoph Strobl + * @deprecated Use {@link MongoServerCondition} with {@link EnableIfReplicaSetAvailable} instead. + */ +@Deprecated +public class ReplicaSet implements TestRule { + + boolean required = false; + AtomicReference runsAsReplicaSet = new AtomicReference<>(); + + private ReplicaSet(boolean required) { + this.required = required; + } + + /** + * A MongoDB server running with {@code --replSet} flag is required to execute tests. + * + * @return new instance of {@link ReplicaSet}. + */ + public static ReplicaSet required() { + return new ReplicaSet(true); + } + + /** + * A MongoDB server running with {@code --replSet} flag might be required to execute some tests. Those tests are + * marked with {@code @IfProfileValue(name="replSet", value="true")}. + * + * @return new instance of {@link ReplicaSet}. + */ + public static ReplicaSet none() { + return new ReplicaSet(false); + } + + @Override + public Statement apply(Statement base, Description description) { + + return new Statement() { + + @Override + public void evaluate() throws Throwable { + + if (!required) { + + IfProfileValue profileValue = description.getAnnotation(IfProfileValue.class); + if (profileValue == null || !profileValue.name().equalsIgnoreCase("replSet")) { + base.evaluate(); + return; + } + + if (!Boolean.valueOf(profileValue.value())) { + base.evaluate(); + return; + } + } + + if (!runsAsReplicaSet()) { + throw new AssumptionViolatedException("Not running in repl set mode"); + } + base.evaluate(); + } + }; + } + + public boolean runsAsReplicaSet() { + + if (runsAsReplicaSet.get() == null) { + + MongoClient client = MongoTestUtils.client(); + + boolean tmp = client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .contains("--replSet"); + runsAsReplicaSet.compareAndSet(null, tmp); + } + return runsAsReplicaSet.get(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java new file mode 100644 index 0000000000..a50497f335 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java @@ -0,0 +1,61 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtensionContext; + +/** + * Annotation to inject {@link org.springframework.data.mongodb.core.MongoOperations} and + * {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} parameters as method arguments and into + * {@code static} fields. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoTemplateExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@ExtendWith(MongoTemplateExtension.class) +public @interface Template { + + /** + * @return name of the database to use. Use empty String to generate the database name for the + * {@link ExtensionContext#getTestClass() test class}. + */ + String database() default ""; + + /** + * Pre-initialize the {@link org.springframework.data.mapping.context.MappingContext} with the given entities. + * + * @return empty by default. + */ + Class[] initialEntitySet() default {}; + + /** + * Use a {@link ReplSetClient} if {@literal true}. + * + * @return false by default. + */ + boolean replicaSet() default false; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java new file mode 100644 index 0000000000..053498ebbd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.test.util.ClassPathExclusions; +import org.springframework.util.ClassUtils; + +import com.mongodb.internal.build.MongoDriverVersion; + +/** + * Tests for {@link MongoClientVersion}. + * + * @author Christoph Strobl + */ +class MongoClientVersionUnitTests { + + @Test // GH-4578 + void parsesClientVersionCorrectly() { + assertThat(MongoClientVersion.isVersion5orNewer()).isEqualTo(MongoDriverVersion.VERSION.startsWith("5")); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.internal.build" }) + void fallsBackToClassLookupIfDriverVersionNotPresent() { + assertThat(MongoClientVersion.isVersion5orNewer()).isEqualTo( + ClassUtils.isPresent("com.mongodb.internal.connection.StreamFactoryFactory", this.getClass().getClassLoader())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java new file mode 100644 index 0000000000..ab8e17a469 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java @@ -0,0 +1,49 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.test.util.ExcludeReactiveClientFromClassPath; +import org.springframework.data.mongodb.test.util.ExcludeSyncClientFromClassPath; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + */ +class MongoCompatibilityAdapterUnitTests { + + @Test // GH-4578 + @ExcludeReactiveClientFromClassPath + void returnsListCollectionNameIterableTypeCorrectly() { + + String expectedType = MongoClientVersion.isVersion5orNewer() ? "ListCollectionNamesIterable" : "MongoIterable"; + assertThat(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(null).collectionNameIterableType()) + .satisfies(type -> assertThat(ClassUtils.getShortName(type)).isEqualTo(expectedType)); + + } + + @Test // GH-4578 + @ExcludeSyncClientFromClassPath + void returnsListCollectionNamePublisherTypeCorrectly() { + + String expectedType = MongoClientVersion.isVersion5orNewer() ? "ListCollectionNamesPublisher" : "Publisher"; + assertThat(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(null).collectionNamePublisherType()) + .satisfies(type -> assertThat(ClassUtils.getShortName(type)).isEqualTo(expectedType)); + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java new file mode 100644 index 0000000000..1dc7030e70 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java @@ -0,0 +1,254 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.temporal.Temporal; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Stream; + +import org.bson.BsonArray; +import org.bson.BsonDouble; +import org.bson.BsonInt32; +import org.bson.BsonInt64; +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.util.BsonUtils; + +import com.mongodb.BasicDBList; + +/** + * Unit tests for {@link BsonUtils}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class BsonUtilsTest { + + @Test // DATAMONGO-625 + void simpleToBsonValue() { + + assertThat(BsonUtils.simpleToBsonValue(Long.valueOf(10))).isEqualTo(new BsonInt64(10)); + assertThat(BsonUtils.simpleToBsonValue(new Integer(10))).isEqualTo(new BsonInt32(10)); + assertThat(BsonUtils.simpleToBsonValue(Double.valueOf(0.1D))).isEqualTo(new BsonDouble(0.1D)); + assertThat(BsonUtils.simpleToBsonValue("value")).isEqualTo(new BsonString("value")); + } + + @Test // DATAMONGO-625 + void primitiveToBsonValue() { + assertThat(BsonUtils.simpleToBsonValue(10L)).isEqualTo(new BsonInt64(10)); + } + + @Test // DATAMONGO-625 + void objectIdToBsonValue() { + + ObjectId source = new ObjectId(); + assertThat(BsonUtils.simpleToBsonValue(source)).isEqualTo(new BsonObjectId(source)); + } + + @Test // DATAMONGO-625 + void bsonValueToBsonValue() { + + BsonObjectId source = new BsonObjectId(new ObjectId()); + assertThat(BsonUtils.simpleToBsonValue(source)).isSameAs(source); + } + + @Test // DATAMONGO-625 + void unsupportedToBsonValue() { + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> BsonUtils.simpleToBsonValue(new Object())); + } + + @Test // GH-3571 + void removeNullIdIfNull() { + + Document source = new Document("_id", null).append("value", "v-1"); + + assertThat(BsonUtils.removeNullId(source)).isTrue(); + assertThat(source).doesNotContainKey("_id").containsKey("value"); + } + + @Test // GH-3571 + void removeNullIdDoesNotTouchNonNullOn() { + + Document source = new Document("_id", "id-value").append("value", "v-1"); + + assertThat(BsonUtils.removeNullId(source)).isFalse(); + assertThat(source).containsKeys("_id", "value"); + } + + @Test // GH-3571 + void asCollectionDoesNotModifyCollection() { + + Object source = new ArrayList<>(0); + + assertThat(BsonUtils.asCollection(source)).isSameAs(source); + } + + @Test // GH-3571 + void asCollectionConvertsArrayToCollection() { + + Object source = new String[] { "one", "two" }; + + assertThat((Collection) BsonUtils.asCollection(source)).containsExactly("one", "two"); + } + + @Test // GH-3571 + void asCollectionConvertsWrapsNonIterable() { + + Object source = 100L; + + assertThat((Collection) BsonUtils.asCollection(source)).containsExactly(source); + } + + @Test // GH-3702 + void supportsBsonShouldReportIfConversionSupported() { + + assertThat(BsonUtils.supportsBson("foo")).isFalse(); + assertThat(BsonUtils.supportsBson(new Document())).isTrue(); + assertThat(BsonUtils.supportsBson(new BasicDBList())).isTrue(); + assertThat(BsonUtils.supportsBson(Collections.emptyMap())).isTrue(); + } + + @ParameterizedTest // GH-4432 + @MethodSource("javaTimeInstances") + void convertsJavaTimeTypesToBsonDateTime(Temporal source) { + + assertThat(BsonUtils.simpleToBsonValue(source)) + .isEqualTo(new Document("value", source).toBsonDocument().get("value")); + } + + @ParameterizedTest // GH-4432 + @MethodSource("collectionLikeInstances") + void convertsCollectionLikeToBsonArray(Object source) { + + assertThat(BsonUtils.simpleToBsonValue(source)) + .isEqualTo(new Document("value", source).toBsonDocument().get("value")); + } + + @Test // GH-4432 + void convertsPrimitiveArrayToBsonArray() { + + assertThat(BsonUtils.simpleToBsonValue(new int[] { 1, 2, 3 })) + .isEqualTo(new BsonArray(List.of(new BsonInt32(1), new BsonInt32(2), new BsonInt32(3)))); + } + + @ParameterizedTest + @MethodSource("fieldNames") + void resolveValueForField(FieldName fieldName, boolean exists) { + + Map source = new LinkedHashMap<>(); + source.put("a", "a-value"); // top level + source.put("b", new Document("a", "b.a-value")); // path + source.put("c.a", "c.a-value"); // key + + if(exists) { + assertThat(BsonUtils.resolveValue(source, fieldName)).isEqualTo(fieldName.name() + "-value"); + } else { + assertThat(BsonUtils.resolveValue(source, fieldName)).isNull(); + } + } + + @Test + void retainsOrderWhenMappingValues() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + source.append("0", "third-entry"); + source.append("9", "fourth-entry"); + + Document target = BsonUtils.mapValues(source, (key, value) -> value); + assertThat(source).isNotSameAs(target).containsExactlyEntriesOf(source); + } + + @Test + void retainsOrderWhenMappingKeys() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapEntries(source, entry -> entry.getKey().toUpperCase(), Entry::getValue); + assertThat(target).containsExactly(Map.entry("Z", "first-entry"), Map.entry("A", "second-entry")); + } + + @Test + void appliesValueMapping() { + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapValues(source, + (key, value) -> new StringBuilder(value.toString()).reverse().toString()); + assertThat(target).containsValues("yrtne-tsrif", "yrtne-dnoces"); + } + + @Test + void appliesKeyMapping() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapEntries(source, entry -> entry.getKey().toUpperCase(), Entry::getValue); + assertThat(target).containsKeys("Z", "A"); + } + + static Stream fieldNames() { + return Stream.of(// + Arguments.of(FieldName.path("a"), true), // + Arguments.of(FieldName.path("b.a"), true), // + Arguments.of(FieldName.path("c.a"), false), // + Arguments.of(FieldName.name("d"), false), // + Arguments.of(FieldName.name("b.a"), false), // + Arguments.of(FieldName.name("c.a"), true) // + ); + } + + static Stream javaTimeInstances() { + + return Stream.of(Arguments.of(Instant.now()), Arguments.of(LocalDate.now()), Arguments.of(LocalDateTime.now()), + Arguments.of(LocalTime.now())); + } + + static Stream collectionLikeInstances() { + + return Stream.of(Arguments.of(new String[] { "1", "2", "3" }), Arguments.of(List.of("1", "2", "3")), + Arguments.of(new Integer[] { 1, 2, 3 }), Arguments.of(List.of(1, 2, 3)), + Arguments.of(new Date[] { new Date() }), Arguments.of(List.of(new Date())), + Arguments.of(new LocalDate[] { LocalDate.now() }), Arguments.of(List.of(LocalDate.now()))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java new file mode 100644 index 0000000000..20b5060f77 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java @@ -0,0 +1,699 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonRegularExpression; +import org.bson.Document; +import org.bson.codecs.DecoderContext; +import org.junit.jupiter.api.Test; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ParseException; +import org.springframework.expression.TypedValue; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; + +/** + * Unit tests for {@link ParameterBindingJsonReader}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Rocco Lagrotteria + */ +class ParameterBindingJsonReaderUnitTests { + + @Test + void bindUnquotedStringValue() { + + Document target = parse("{ 'lastname' : ?0 }", "kohlin"); + assertThat(target).isEqualTo(new Document("lastname", "kohlin")); + } + + @Test + void bindQuotedStringValue() { + + Document target = parse("{ 'lastname' : '?0' }", "kohlin"); + assertThat(target).isEqualTo(new Document("lastname", "kohlin")); + } + + @Test + void bindUnquotedIntegerValue() { + + Document target = parse("{ 'lastname' : ?0 } ", 100); + assertThat(target).isEqualTo(new Document("lastname", 100)); + } + + @Test + void bindMultiplePlacholders() { + + Document target = parse("{ 'lastname' : ?0, 'firstname' : '?1' }", "Kohlin", "Dalinar"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : 'Kohlin', 'firstname' : 'Dalinar' }")); + } + + @Test + void bindQuotedIntegerValue() { + + Document target = parse("{ 'lastname' : '?0' }", 100); + assertThat(target).isEqualTo(new Document("lastname", "100")); + } + + @Test // GH-4806 + void regexConsidersOptions() { + + Document target = parse("{ 'c': /^true$/i }"); + + BsonRegularExpression pattern = target.get("c", BsonRegularExpression.class); + assertThat(pattern.getPattern()).isEqualTo("^true$"); + assertThat(pattern.getOptions()).isEqualTo("i"); + } + + @Test // GH-4806 + void regexConsidersBindValueWithOptions() { + + Document target = parse("{ 'c': /^?0$/i }", "foo"); + + BsonRegularExpression pattern = target.get("c", BsonRegularExpression.class); + assertThat(pattern.getPattern()).isEqualTo("^foo$"); + assertThat(pattern.getOptions()).isEqualTo("i"); + } + + @Test // GH-4806 + void treatsQuotedValueThatLooksLikeRegexAsPlainString() { + + Document target = parse("{ 'c': '/^?0$/i' }", "foo"); + + assertThat(target.get("c")).isInstanceOf(String.class); + } + + @Test // GH-4806 + void treatsStringParameterValueThatLooksLikeRegexAsPlainString() { + + Document target = parse("{ 'c': ?0 }", "/^foo$/i"); + + assertThat(target.get("c")).isInstanceOf(String.class); + } + + @Test + void bindValueToRegex() { + + Document target = parse("{ 'lastname' : { '$regex' : '^(?0)'} }", "kohlin"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { '$regex' : '^(kohlin)'} }")); + } + + @Test + void bindValueToMultiRegex() { + + Document target = parse( + "{'$or' : [{'firstname': {'$regex': '.*?0.*', '$options': 'i'}}, {'lastname' : {'$regex': '.*?0xyz.*', '$options': 'i'}} ]}", + "calamity"); + assertThat(target).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"firstname\" : { \"$regex\" : \".*calamity.*\" , \"$options\" : \"i\"}} , { \"lastname\" : { \"$regex\" : \".*calamityxyz.*\" , \"$options\" : \"i\"}}]}")); + } + + @Test + void bindMultipleValuesToSingleToken() { + + Document target = parse("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}", 0, 1, 2, + 3, 4); + assertThat(target) + .isEqualTo(Document.parse("{$where: 'return this.date.getUTCMonth() == 2 && this.date.getUTCDay() == 3;'}")); + } + + @Test + void bindValueToDbRef() { + + Document target = parse("{ 'reference' : { $ref : 'reference', $id : ?0 }}", "kohlin"); + assertThat(target).isEqualTo(Document.parse("{ 'reference' : { $ref : 'reference', $id : 'kohlin' }}")); + } + + @Test + void bindToKey() { + + Document target = parse("{ ?0 : ?1 }", "firstname", "kaladin"); + assertThat(target).isEqualTo(Document.parse("{ 'firstname' : 'kaladin' }")); + } + + @Test + void bindListValue() { + + Document target = parse("{ 'lastname' : { $in : ?0 } }", Arrays.asList("Kohlin", "Davar")); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { $in : ['Kohlin', 'Davar' ]} }")); + } + + @Test + void bindListOfBinaryValue() { + + byte[] value = "Kohlin".getBytes(StandardCharsets.UTF_8); + List args = Collections.singletonList(value); + + Document target = parse("{ 'lastname' : { $in : ?0 } }", args); + assertThat(target).isEqualTo(new Document("lastname", new Document("$in", args))); + } + + @Test + void bindExtendedExpression() { + + Document target = parse("{'id':?#{ [0] ? { $exists :true} : [1] }}", true, "firstname", "kaladin"); + assertThat(target).isEqualTo(Document.parse("{ \"id\" : { \"$exists\" : true}}")); + } + + @Test + void bindDocumentValue() { + + Document target = parse("{ 'lastname' : ?0 }", new Document("$eq", "Kohlin")); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { '$eq' : 'Kohlin' } }")); + } + + @Test + void arrayWithoutBinding() { + + Document target = parse("{ 'lastname' : { $in : [\"Kohlin\", \"Davar\"] } }"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { $in : ['Kohlin', 'Davar' ]} }")); + } + + @Test + void bindSpEL() { + + Document target = parse("{ arg0 : ?#{[0]} }", 100.01D); + assertThat(target).isEqualTo(new Document("arg0", 100.01D)); + } + + @Test // DATAMONGO-2315 + void bindDateAsDate() { + + Date date = new Date(); + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", date); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + date.getTime() + " } } } ")); + } + + @Test // DATAMONGO-2315 + void bindQuotedDateAsDate() { + + Date date = new Date(); + Document target = parse("{ 'end_date' : { $gte : { $date : '?0' } } }", date); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + date.getTime() + " } } } ")); + } + + @Test // DATAMONGO-2315 + void bindStringAsDate() { + + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", "2019-07-04T12:19:23.000Z"); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : '2019-07-04T12:19:23.000Z' } } } ")); + } + + @Test // DATAMONGO-2315 + void bindNumberAsDate() { + + Long time = new Date().getTime(); + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", time); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + time + " } } } ")); + } + + @Test // GH-3750 + public void shouldParseISODate() { + + String json = "{ 'value' : ISODate(\"1970-01-01T00:00:00Z\") }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isZero(); + } + + @Test // GH-3750 + public void shouldParseISODateWith24HourTimeSpecification() { + + String json = "{ 'value' : ISODate(\"2013-10-04T12:07:30.443Z\") }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1380888450443L); + } + + @Test // GH-3750 + public void shouldParse$date() { + + String json = "{ 'value' : { \"$date\" : \"2015-04-16T14:55:57.626Z\" } }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1429196157626L); + } + + @Test // GH-3750 + public void shouldParse$dateWithTimeOffset() { + + String json = "{ 'value' :{ \"$date\" : \"2015-04-16T16:55:57.626+02:00\" } }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1429196157626L); + } + + @Test // GH-4282 + public void shouldReturnNullAsSuch() { + + String json = "{ 'value' : ObjectId(?0) }"; + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> parse(json, new Object[] { null })) + .withMessageContaining("hexString"); + } + + @Test // DATAMONGO-2418 + void shouldNotAccessSpElEvaluationContextWhenNoSpElPresentInBindableTarget() { + + Object[] args = new Object[] { "value" }; + EvaluationContext evaluationContext = new StandardEvaluationContext() { + + @Override + public TypedValue getRootObject() { + throw new RuntimeException("o_O"); + } + }; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader("{ 'name':'?0' }", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("name", "value")); + } + + @Test // DATAMONGO-2476 + void bindUnquotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : [?0] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindMultipleUnquotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : [?0,?1] } }", "dalinar", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Arrays.asList("dalinar", "kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindUnquotedParameterInArrayWithSpaces() { + + Document target = parse("{ 'name' : { $in : [ ?0 ] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindQuotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : ['?0'] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindQuotedMulitParameterInArray() { + + Document target = parse("{ 'name' : { $in : ['?0,?1'] } }", "dalinar", "kohlin"); + assertThat(target) + .isEqualTo(new Document("name", new Document("$in", Collections.singletonList("dalinar,kohlin")))); + } + + @Test // DATAMONGO-1894 + void discoversNoDependenciesInExpression() { + + String json = "{ $and : [?#{ [0] == null ? { '$where' : 'true' } : { 'v1' : { '$in' : {[0]} } } }]}"; + + ExpressionDependencies expressionDependencies = new ParameterBindingDocumentCodec().captureExpressionDependencies( + json, it -> new Object(), ValueExpressionParser.create(SpelExpressionParser::new)); + + assertThat(expressionDependencies).isEqualTo(ExpressionDependencies.none()); + } + + @Test // DATAMONGO-1894 + void discoversCorrectlyDependenciesInExpression() { + + String json = "{ hello: ?#{hasRole('foo')} }"; + + ExpressionDependencies expressionDependencies = new ParameterBindingDocumentCodec().captureExpressionDependencies( + json, it -> new Object(), ValueExpressionParser.create(SpelExpressionParser::new)); + + assertThat(expressionDependencies).isNotEmpty(); + assertThat(expressionDependencies.get()).hasSize(1); + } + + @Test // DATAMONGO-2523 + void bindSpelExpressionInArrayCorrectly/* closing bracket must not have leading whitespace! */() { + + Document target = parse("{ $and : [?#{ [0] == null ? { '$where' : 'true' } : { 'v1' : { '$in' : {[0]} } } }]}", 1); + + assertThat(target).isEqualTo(Document.parse("{\"$and\": [{\"v1\": {\"$in\": [1]}}]}")); + } + + @Test // DATAMONGO-2545 + void shouldABindArgumentsViaIndexInSpelExpressions() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : ?#{ T(" + this.getClass().getName() + ").isBatman() ? [0] : [1] }}", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void shouldAllowMethodArgumentPlaceholdersInSpelExpressions/*becuase this worked before*/() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : ?#{ T(" + this.getClass().getName() + ").isBatman() ? '?0' : '?1' }}", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void shouldAllowMethodArgumentPlaceholdersInQuotedSpelExpressions/*because this worked before*/() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : \"?#{ T(" + this.getClass().getName() + ").isBatman() ? '?0' : '?1' }\" }", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void evaluatesSpelExpressionDefiningEntireQuery() { + + Object[] args = new Object[] {}; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + evaluationContext.setRootObject(new DummySecurityObject(new DummyWithId("wonderwoman"))); + + String json = "?#{ T(" + this.getClass().getName() + + ").isBatman() ? {'_class': { '$eq' : 'region' }} : { '$and' : { {'_class': { '$eq' : 'region' } }, {'user.supervisor': principal.id } } } }"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target) + .isEqualTo(new Document("$and", Arrays.asList(new Document("_class", new Document("$eq", "region")), + new Document("user.supervisor", "wonderwoman")))); + } + + @Test // GH-3871 + public void capturingExpressionDependenciesShouldNotThrowParseErrorForSpelOnlyJson() { + + Object[] args = new Object[] { "1", "2" }; + String json = "?#{ true ? { 'name': #name } : { 'name' : #name + 'trouble' } }"; + + new ParameterBindingDocumentCodec().captureExpressionDependencies(json, (index) -> args[index], + ValueExpressionParser.create(SpelExpressionParser::new)); + } + + @Test // GH-3871, GH-4089 + public void bindEntireQueryUsingSpelExpressionWhenEvaluationResultIsDocument() { + + Object[] args = new Object[] { "expected", "unexpected" }; + String json = "?#{ true ? { 'name': ?0 } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + assertThat(target).isEqualTo(new Document("name", "expected")); + } + + @Test // GH-3871, GH-4089 + public void throwsExceptionWhenBindEntireQueryUsingSpelExpressionIsMalFormatted() { + + Object[] args = new Object[] { "expected", "unexpected" }; + String json = "?#{ true ? { 'name': ?0 { } } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + assertThatExceptionOfType(ParseException.class).isThrownBy(() -> { + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + }); + } + + @Test // GH-3871, GH-4089 + public void bindEntireQueryUsingSpelExpressionWhenEvaluationResultIsJsonStringContainingUUID() { + + Object[] args = new Object[] { UUID.fromString("cfbca728-4e39-4613-96bc-f920b5c37e16"), "unexpected" }; + String json = "?#{ true ? { 'name': ?0 } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target.get("name")).isInstanceOf(UUID.class); + } + + @Test // GH-3871 + void bindEntireQueryUsingSpelExpression() { + + Object[] args = new Object[] { "region" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + evaluationContext.setRootObject(new DummySecurityObject(new DummyWithId("wonderwoman"))); + + String json = "?#{ T(" + this.getClass().getName() + ").applyFilterByUser('?0' ,principal.id) }"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target) + .isEqualTo(new Document("$and", Arrays.asList(new Document("_class", new Document("$eq", "region")), + new Document("user.supervisor", "wonderwoman")))); + } + + @Test // GH-3871 + void bindEntireQueryUsingParameter() { + + Object[] args = new Object[] { "{ 'itWorks' : true }" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + String json = "?0"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("itWorks", true)); + } + + @Test // DATAMONGO-2571 + void shouldParseRegexCorrectly() { + + Document target = parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}"); + assertThat(target) + .isEqualTo(Document.parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}")); + } + + @Test // DATAMONGO-2571 + void shouldParseRegexWithPlaceholderCorrectly() { + + Document target = parse("{ $and: [{'fieldA': {$in: [/?0.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}", "ABC"); + assertThat(target) + .isEqualTo(Document.parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}")); + } + + @Test // DATAMONGO-2633 + void shouldParseNestedArrays() { + + Document target = parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ ?0, 48.799029 ] , ?1 ] } } }", + 1.948516D, 0.004D); + assertThat(target).isEqualTo(Document + .parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ 1.948516, 48.799029 ] , 0.004 ] } } }")); + } + + @Test // GH-3633 + void parsesNullValue() { + + Document target = parse("{ 'parent' : null }"); + assertThat(target).isEqualTo(new Document("parent", null)); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaArgumentIndex() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : ?#{[0]} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholder() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void errorsOnNonDocument() { + + String source = "new java.lang.Object()"; + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> parse(":#{?0}", source)); + } + + @Test // GH-4089 + void bindsFullDocument() { + + Document source = new Document(); + assertThat(parse(":#{?0}", source)).isSameAs(source); + } + + @Test // GH-4089 + void enforcesStringSpelArgumentTypeViaParameterPlaceholderWhenQuoted() { + + Integer source = 10; + Document target = parse("{ arg0 : :#{'?0'} }", source); + assertThat(target.get("arg0")).isEqualTo("10"); + } + + @Test // GH-4089 + void enforcesSpelArgumentTypeViaParameterPlaceholderWhenQuoted() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : :#{'?0'} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholderWhenValueContainsSingleQuotes() { + + String source = "' + new java.lang.Object() + '"; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholderWhenValueContainsDoubleQuotes() { + + String source = "\\\" + new java.lang.Object() + \\\""; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-3750 + void shouldParseUUIDasStandardRepresentation() { + + String json = "{ 'value' : UUID(\"b5f21e0c-2a0d-42d6-ad03-d827008d8ab6\") }"; + + BsonBinary value = parse(json).get("value", BsonBinary.class); + assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue()); + } + + @Test // GH-3750 + public void shouldParse$uuidAsStandardRepresentation() { + + String json = "{ 'value' : { '$uuid' : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\" } } }"; + BsonBinary value = parse(json).get("value", BsonBinary.class); + assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue()); + } + + private static Document parse(String json, Object... args) { + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, args); + return new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + } + + // DATAMONGO-2545 + public static boolean isBatman() { + return false; + } + + public static String applyFilterByUser(String _class, String username) { + switch (username) { + case "batman": + return "{'_class': { '$eq' : '" + _class + "' }}"; + default: + return "{ '$and' : [ {'_class': { '$eq' : '" + _class + "' } }, {'user.supervisor': '" + username + "' } ] }"; + } + } + + public static class DummySecurityObject { + + DummyWithId principal; + + public DummySecurityObject(DummyWithId principal) { + this.principal = principal; + } + + public DummyWithId getPrincipal() { + return this.principal; + } + + public void setPrincipal(DummyWithId principal) { + this.principal = principal; + } + + public String toString() { + return "ParameterBindingJsonReaderUnitTests.DummySecurityObject(principal=" + this.getPrincipal() + ")"; + } + } + + public static class DummyWithId { + + String id; + + public DummyWithId(String id) { + this.id = id; + } + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ParameterBindingJsonReaderUnitTests.DummyWithId(id=" + this.getId() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt new file mode 100644 index 0000000000..e5c08c74dc --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt @@ -0,0 +1,79 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.query.Criteria +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update +import org.springframework.data.mongodb.core.query.UpdateDefinition +import org.springframework.data.util.Pair.of + +/** + * Unit tests for BulkOperationExtensions. + * @author 2tsumo-hitori + */ +class BulkOperationExtensionsTests { + + private val bulkOperation = mockk(relaxed = true) + + @Test // GH-4911 + fun `BulkOperation#updateMulti using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.updateMulti(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.updateMulti(expected) } + } + + @Test // GH-4911 + fun `BulkOperation#upsert using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.upsert(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.upsert(expected) } + } + + @Test // GH-4911 + fun `BulkOperation#updateOne using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.updateOne(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.updateOne(expected) } + } + + private fun where(field: String, value: String): Query { + return Query().addCriteria(Criteria.where(field).`is`(value)) + } + + private fun set(field: String, value: String): Update { + return Update().set(field, value) + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt new file mode 100644 index 0000000000..f29df408b2 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt @@ -0,0 +1,32 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import org.springframework.data.annotation.Id +import org.springframework.data.annotation.LastModifiedDate +import org.springframework.data.annotation.Version +import org.springframework.data.mongodb.core.mapping.Document +import java.time.Instant + +@Document("versioned-auditable") +data class KAuditableVersionedEntity( + @Id val id: String?, + val value: String, + @Version val version: Long?, + @LastModifiedDate val modificationDate: Instant? +) { + fun withValue(value: String) = copy(value = value) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt new file mode 100644 index 0000000000..01804af55f --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt @@ -0,0 +1,37 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Sebastien Deleuze + * @author Mark Paluch + */ +class ExecutableAggregationOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1689 + fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() { + + operation.aggregateAndReturn() + verify { operation.aggregateAndReturn(First::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt new file mode 100644 index 0000000000..516b01793a --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt @@ -0,0 +1,81 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Sebastien Deleuze + * @author Mark Paluch + */ +class ExecutableFindOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + val distinctWithProjection = mockk(relaxed = true) + + val findDistinct = mockk(relaxed = true) + + val executableFind = mockk>(relaxed = true) + + @Test // DATAMONGO-1689 + fun `ExecutableFindOperation#query() with reified type parameter extension should call its Java counterpart`() { + + operation.query() + verify { operation.query(First::class.java) } + } + + @Test // DATAMONGO-1689, DATAMONGO-2086 + fun `ExecutableFindOperation#FindOperationWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2086 + fun `ExecutableFindOperation#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + distinctWithProjection.asType() + verify { distinctWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2417 + fun `ExecutableFindOperation#distrinct() using KProperty1 should call its Java counterpart`() { + + every { operation.query(KotlinUser::class.java) } returns executableFind + + operation.distinct(KotlinUser::username) + verify { + operation.query(KotlinUser::class.java) + executableFind.distinct("username") + } + } + + @Test // DATAMONGO-2417 + fun `ExecutableFindOperation#FindDistinct#field() using KProperty should call its Java counterpart`() { + + findDistinct.distinct(KotlinUser::username) + verify { findDistinct.distinct("username") } + } + + data class KotlinUser(val username: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt new file mode 100644 index 0000000000..27b8063958 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt @@ -0,0 +1,38 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Sebastien Deleuze + * @author Mark Paluch + */ +class ExecutableInsertOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1689 + fun `insert() with reified type parameter extension should call its Java counterpart`() { + + operation.insert() + verify { operation.insert(First::class.java) } + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt new file mode 100644 index 0000000000..8cda511a54 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Christoph Strobl + * @author Sebastien Deleuze + */ +class ExecutableMapReduceOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + @Test // DATAMONGO-1929 + fun `ExecutableMapReduceOperation#mapReduce() with reified type parameter extension should call its Java counterpart`() { + + operation.mapReduce() + verify { operation.mapReduce(First::class.java) } + } + + @Test // DATAMONGO-1929, DATAMONGO-2086 + fun `ExecutableMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt new file mode 100644 index 0000000000..81e1702114 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt @@ -0,0 +1,38 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Sebastien Deleuze + * @author Mark Paluch + */ +class ExecutableRemoveOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1689 + fun `remove() with reified type parameter extension should call its Java counterpart`() { + + operation.remove() + verify { operation.remove(First::class.java) } + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt new file mode 100644 index 0000000000..86bddda4c8 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt @@ -0,0 +1,39 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * Unit tests for `ExecutableUpdateOperationExtensions.kt`. + * + * @author Christoph Strobl + * @author Sebastien Deleuze + */ +class ExecutableUpdateOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1719 + fun `update() with reified type parameter extension should call its Java counterpart`() { + + operation.update() + verify { operation.update(First::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt new file mode 100644 index 0000000000..1fc68c85c5 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt @@ -0,0 +1,496 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import example.second.Second +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test +import org.springframework.data.mongodb.core.BulkOperations.BulkMode +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update + +/** + * @author Sebastien Deleuze + * @author Mark Paluch + * @author Christoph Strobl + */ +class MongoOperationsExtensionsTests { + + val operations = mockk(relaxed = true) + + @Test // DATAMONGO-1689 + fun `getCollectionName() with reified type parameter extension should call its Java counterpart`() { + + operations.getCollectionName() + verify { operations.getCollectionName(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `execute(CollectionCallback) with reified type parameter extension should call its Java counterpart`() { + + val collectionCallback = mockk>() + operations.execute(collectionCallback) + verify { operations.execute(First::class.java, collectionCallback) } + } + + @Test // DATAMONGO-1689 + fun `stream(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + operations.stream(query) + verify { operations.stream(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `stream(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + operations.stream(query, collectionName) + verify { operations.stream(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `createCollection() with reified type parameter extension should call its Java counterpart`() { + + operations.createCollection() + verify { operations.createCollection(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() { + + val collectionOptions = mockk() + operations.createCollection(collectionOptions) + verify { operations.createCollection(First::class.java, collectionOptions) } + } + + @Test // DATAMONGO-1689 + fun `collectionExists() with reified type parameter extension should call its Java counterpart`() { + + operations.collectionExists() + verify { operations.collectionExists(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `dropCollection() with reified type parameter extension should call its Java counterpart`() { + + operations.dropCollection() + verify { operations.dropCollection(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `indexOps() with reified type parameter extension should call its Java counterpart`() { + + operations.indexOps() + verify { operations.indexOps(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `bulkOps(BulkMode) with reified type parameter extension should call its Java counterpart`() { + + val bulkMode = BulkMode.ORDERED + + operations.bulkOps(bulkMode) + verify { operations.bulkOps(bulkMode, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `bulkOps(BulkMode, String) with reified type parameter extension should call its Java counterpart`() { + + val bulkMode = BulkMode.ORDERED + val collectionName = "foo" + + operations.bulkOps(bulkMode, collectionName) + verify { operations.bulkOps(bulkMode, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAll() with reified type parameter extension should call its Java counterpart`() { + + operations.findAll() + verify { operations.findAll(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAll(String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + + operations.findAll(collectionName) + verify { operations.findAll(First::class.java, collectionName) } + } + + @Test // #3508 + fun `aggregate(Aggregation) with reified type parameter extension should call its Java counterpart`() { + + val aggregation = mockk() + + operations.aggregate(aggregation) + verify { + operations.aggregate( + aggregation, + Second::class.java, + First::class.java + ) + } + } + + @Test // DATAMONGO-1689 + fun `aggregate(Aggregation, String) with reified type parameter extension should call its Java counterpart`() { + + val aggregation = mockk() + val collectionName = "foo" + + operations.aggregate(aggregation, collectionName) + verify { operations.aggregate(aggregation, collectionName, First::class.java) } + } + + @Test // #3508 + fun `aggregateStream(Aggregation) with reified type parameter extension should call its Java counterpart`() { + + val aggregation = mockk() + + operations.aggregateStream(aggregation) + verify { + operations.aggregateStream( + aggregation, + Second::class.java, + First::class.java + ) + } + } + + @Test // DATAMONGO-1689 + fun `aggregateStream(Aggregation, String) with reified type parameter extension should call its Java counterpart`() { + + val aggregation = mockk() + val collectionName = "foo" + + operations.aggregateStream(aggregation, collectionName) + verify { + operations.aggregateStream( + aggregation, + collectionName, + First::class.java + ) + } + } + + @Test // DATAMONGO-1689 + fun `mapReduce(String, String, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val mapFunction = "bar" + val reduceFunction = "baz" + + operations.mapReduce(collectionName, mapFunction, reduceFunction) + verify { operations.mapReduce(collectionName, mapFunction, reduceFunction, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `mapReduce(String, String, String, MapReduceOptions) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val mapFunction = "bar" + val reduceFunction = "baz" + val options = mockk() + + operations.mapReduce(collectionName, mapFunction, reduceFunction, options) + verify { operations.mapReduce(collectionName, mapFunction, reduceFunction, options, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `mapReduce(Query, String, String, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + val mapFunction = "bar" + val reduceFunction = "baz" + + operations.mapReduce(query, collectionName, mapFunction, reduceFunction) + verify { operations.mapReduce(query, collectionName, mapFunction, reduceFunction, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `mapReduce(Query, String, String, String, MapReduceOptions) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + val mapFunction = "bar" + val reduceFunction = "baz" + val options = mockk() + + operations.mapReduce(query, collectionName, mapFunction, reduceFunction, options) + verify { operations.mapReduce(query, collectionName, mapFunction, reduceFunction, options, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findOne(query) + verify { operations.findOne(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + + operations.findOne(query, collectionName) + verify { operations.findOne(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `exists(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.exists(query) + verify { operations.exists(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `find(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.find(query) + verify { operations.find(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + + operations.find(query, collectionName) + verify { operations.find(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findById(Any) with reified type parameter extension should call its Java counterpart`() { + + val id = 1L + + operations.findById(id) + verify { operations.findById(id, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findById(Any, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val id = 1L + + operations.findById(id, collectionName) + verify { operations.findById(id, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val options = mockk() + + operations.findAndModify(query, update, options) + verify { operations.findAndModify(query, update, options, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + val update = mockk() + val options = mockk() + + operations.findAndModify(query, update, options, collectionName) + verify { operations.findAndModify(query, update, options, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findAndRemove(query) + verify { operations.findAndRemove(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.findAndRemove(query, collectionName) + verify { operations.findAndRemove(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `count() with reified type parameter extension should call its Java counterpart`() { + + operations.count() + verify { operations.count(any(), eq(First::class.java)) } + } + + @Test // DATAMONGO-1689 + fun `count(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.count(query) + verify { operations.count(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.count(query, collectionName) + verify { operations.count(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-2208 + fun `insert(Collection) with reified type parameter extension should call its Java counterpart`() { + + val collection = listOf(First(), First()) + + operations.insert(collection) + verify { operations.insert(collection, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.upsert(query, update) + verify { operations.upsert(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.upsert(query, update, collectionName) + verify { operations.upsert(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.updateFirst(query, update) + verify { operations.updateFirst(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.updateFirst(query, update, collectionName) + verify { operations.updateFirst(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.updateMulti(query, update) + verify { operations.updateMulti(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.updateMulti(query, update, collectionName) + verify { operations.updateMulti(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `remove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.remove(query) + verify { operations.remove(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.remove(query, collectionName) + verify { operations.remove(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findAllAndRemove(query) + verify { operations.findAllAndRemove(query, First::class.java) } + } + + @Test // DATAMONGO-1761 + fun `findDistinctImplicit(Query, String) should call java counterpart`() { + + val query = mockk() + + operations.findDistinct(query, "field") + verify { operations.findDistinct(query, "field", First::class.java, String::class.java) } + } + + @Test // DATAMONGO-1761 + fun `findDistinct(Query, String, String) should call java counterpart`() { + + val query = mockk() + + operations.findDistinct(query, "field", "collection") + verify { operations.findDistinct(query, "field", "collection", First::class.java, String::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt new file mode 100644 index 0000000000..4249b58468 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt @@ -0,0 +1,58 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import reactor.core.publisher.Flux + +/** + * @author Mark Paluch + * @author Sebastien Deleuze + */ +class ReactiveAggregationOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1719 + fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() { + + operation.aggregateAndReturn() + verify { operation.aggregateAndReturn(First::class.java) } + } + + @Test // DATAMONGO-2255 + fun terminatingAggregationOperationAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt new file mode 100644 index 0000000000..58c071ccb6 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt @@ -0,0 +1,65 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.bson.Document +import org.junit.Test +import reactor.core.publisher.Flux + +/** + * @author Christoph Strobl + * @soundtrack Rage Against The Machine - Take the Power Back + */ +class ReactiveChangeStreamOperationExtensionsTests { + + val operation = mockk(relaxed = true) + val changestream = mockk>(relaxed = true) + + @Test // DATAMONGO-2089 + fun `ReactiveChangeStreamOperation#changeStream() with reified type parameter extension should call its Java counterpart`() { + + operation.changeStream() + verify { operation.changeStream(First::class.java) } + } + + @Test // DATAMONGO-2089 + fun `TerminatingChangeStream#listen() flow extension`() { + + val doc1 = mockk>() + val doc2 = mockk>() + val doc3 = mockk>() + + val spec = mockk>() + every { spec.listen() } returns Flux.just(doc1, doc2, doc3) + + runBlocking { + assertThat(spec.flow().toList()).contains(doc1, doc2, doc3) + } + + verify { + spec.listen() + } + } + + data class Last(val id: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt new file mode 100644 index 0000000000..cbb7ae46f3 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt @@ -0,0 +1,303 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.take +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatExceptionOfType +import org.junit.Test +import org.springframework.data.geo.Distance +import org.springframework.data.geo.GeoResult +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono + +/** + * @author Mark Paluch + * @author Sebastien Deleuze + */ +class ReactiveFindOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + val distinctWithProjection = mockk(relaxed = true) + + val findDistinct = mockk(relaxed = true) + + val reactiveFind = mockk>(relaxed = true) + + @Test // DATAMONGO-1719 + fun `ReactiveFind#query() with reified type parameter extension should call its Java counterpart`() { + + operation.query() + verify { operation.query(First::class.java) } + } + + @Test // DATAMONGO-1719, DATAMONGO-2086 + fun `ReactiveFind#FindOperatorWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2086 + fun `ReactiveFind#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + distinctWithProjection.asType() + verify { distinctWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2417 + fun `ReactiveFind#distrinct() using KProperty1 should call its Java counterpart`() { + + every { operation.query(KotlinUser::class.java) } returns reactiveFind + + operation.distinct(KotlinUser::username) + verify { + operation.query(KotlinUser::class.java) + reactiveFind.distinct("username") + } + } + + @Test // DATAMONGO-2417 + fun `ReactiveFind#FindDistinct#field() using KProperty should call its Java counterpart`() { + + findDistinct.distinct(KotlinUser::username) + verify { findDistinct.distinct("username") } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitOneWithValue() { + + val find = mockk>() + every { find.one() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitOne()).isEqualTo("foo") + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneWithNull() { + + val find = mockk>() + every { find.one() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.awaitOne() } + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneOrNullWithValue() { + + val find = mockk>() + every { find.one() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitOneOrNull()).isEqualTo("foo") + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneOrNullWithNull() { + + val find = mockk>() + every { find.one() } returns Mono.empty() + + runBlocking { + assertThat(find.awaitOneOrNull()).isNull() + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitFirstWithValue() { + + val find = mockk>() + every { find.first() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitFirst()).isEqualTo("foo") + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstWithNull() { + + val find = mockk>() + every { find.first() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.awaitFirst() } + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstOrNullWithValue() { + + val find = mockk>() + every { find.first() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitFirstOrNull()).isEqualTo("foo") + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstOrNullWithNull() { + + val find = mockk>() + every { find.first() } returns Mono.empty() + + runBlocking { + assertThat(find.awaitFirstOrNull()).isNull() + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitCount() { + + val find = mockk>() + every { find.count() } returns Mono.just(1) + + runBlocking { + assertThat(find.awaitCount()).isEqualTo(1) + } + + verify { + find.count() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitExists() { + + val find = mockk>() + every { find.exists() } returns Mono.just(true) + + runBlocking { + assertThat(find.awaitExists()).isTrue() + } + + verify { + find.exists() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindTailAsFlow() { + + val spec = mockk>() + every { spec.tail() } returns Flux.just("foo", "bar", "baz").concatWith(Flux.never()) + + runBlocking { + assertThat(spec.tailAsFlow().take(3).toList()).contains("foo", "bar", "baz") + } + + verify { + spec.tail() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindNearAllAsFlow() { + + val spec = mockk>() + val foo = GeoResult("foo", Distance(0.0)) + val bar = GeoResult("bar", Distance(0.0)) + val baz = GeoResult("baz", Distance(0.0)) + every { spec.all() } returns Flux.just(foo, bar, baz) + + runBlocking { + assertThat(spec.flow().toList()).contains(foo, bar, baz) + } + + verify { + spec.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingDistinctAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } + + data class KotlinUser(val username: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt new file mode 100644 index 0000000000..888d890655 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt @@ -0,0 +1,74 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono + +/** + * @author Mark Paluch + * @author Sebastien Deleuze + */ +class ReactiveInsertOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1719 + fun `insert() with reified type parameter extension should call its Java counterpart`() { + + operation.insert() + verify { operation.insert(First::class.java) } + } + + @Test // DATAMONGO-2209 + fun terminatingInsertOneAndAwait() { + + val insert = mockk>() + every { insert.one("foo") } returns Mono.just("foo") + + runBlocking { + assertThat(insert.oneAndAwait("foo")).isEqualTo("foo") + } + + verify { + insert.one("foo") + } + } + + @Test // DATAMONGO-2255 + fun terminatingInsertAllAsFlow() { + + val insert = mockk>() + val list = listOf("foo", "bar") + every { insert.all(any()) } returns Flux.fromIterable(list) + + runBlocking { + assertThat(insert.flow(list).toList()).containsAll(list) + } + + verify { + insert.all(list) + } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt new file mode 100644 index 0000000000..a71f9d5a70 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import reactor.core.publisher.Flux + +/** + * @author Christoph Strobl + * @author Sebastien Deleuze + */ +class ReactiveMapReduceOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + @Test // DATAMONGO-1929 + fun `ReactiveMapReduceOperation#mapReduce() with reified type parameter extension should call its Java counterpart`() { + + operation.mapReduce() + verify { operation.mapReduce(First::class.java) } + } + + @Test // DATAMONGO-1929, DATAMONGO-2086 + fun `ReactiveMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2255 + fun terminatingMapReduceAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt new file mode 100644 index 0000000000..386e1aae56 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt @@ -0,0 +1,411 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.aggregation.TypedAggregation +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update + +/** + * @author Sebastien Deleuze + * @author Christoph Strobl + * @author Mark Paluch + * @author Wonwoo Lee + */ +class ReactiveMongoOperationsExtensionsTests { + + val operations = mockk(relaxed = true) + + @Test // DATAMONGO-1689 + fun `indexOps() with reified type parameter extension should call its Java counterpart`() { + + operations.indexOps() + verify { operations.indexOps(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `execute(ReactiveCollectionCallback) with reified type parameter extension should call its Java counterpart`() { + + val collectionCallback = mockk>() + + operations.execute(collectionCallback) + verify { operations.execute(First::class.java, collectionCallback) } + } + + @Test // DATAMONGO-1689 + fun `createCollection() with reified type parameter extension should call its Java counterpart`() { + + operations.createCollection() + verify { operations.createCollection(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() { + + val collectionOptions = mockk() + + operations.createCollection(collectionOptions) + verify { operations.createCollection(First::class.java, collectionOptions) } + } + + @Test // DATAMONGO-1689 + fun `collectionExists() with reified type parameter extension should call its Java counterpart`() { + + operations.collectionExists() + verify { operations.collectionExists(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `dropCollection() with reified type parameter extension should call its Java counterpart`() { + + operations.dropCollection() + verify { operations.dropCollection(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAll() with reified type parameter extension should call its Java counterpart`() { + + operations.findAll() + verify { operations.findAll(First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAll(String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + + operations.findAll(collectionName) + verify { operations.findAll(First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findOne(query) + verify { operations.findOne(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + + operations.findOne(query, collectionName) + verify { operations.findOne(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `exists(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.exists(query) + verify { operations.exists(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `find(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.find(query) + verify { operations.find(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + + operations.find(query, collectionName) + verify { operations.find(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findById(Any) with reified type parameter extension should call its Java counterpart`() { + + val id = 1L + + operations.findById(id) + verify { operations.findById(id, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findById(Any, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val id = 1L + + operations.findById(id, collectionName) + verify { operations.findById(id, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val options = mockk() + + operations.findAndModify(query, update, options) + verify { operations.findAndModify(query, update, options, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() { + + val collectionName = "foo" + val query = mockk() + val update = mockk() + val options = mockk() + + operations.findAndModify(query, update, options, collectionName) + verify { operations.findAndModify(query, update, options, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findAndRemove(query) + verify { operations.findAndRemove(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.findAndRemove(query, collectionName) + verify { operations.findAndRemove(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `count() with reified type parameter extension should call its Java counterpart`() { + + operations.count() + verify { operations.count(any(), eq(First::class.java)) } + } + + @Test // DATAMONGO-1689 + fun `count(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.count(query) + verify { operations.count(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.count(query, collectionName) + verify { operations.count(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-2208 + fun `insert(Collection) with reified type parameter extension should call its Java counterpart`() { + + val collection = listOf(First(), First()) + + operations.insert(collection) + verify { operations.insert(collection, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.upsert(query, update) + verify { operations.upsert(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.upsert(query, update, collectionName) + verify { operations.upsert(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.updateFirst(query, update) + verify { operations.updateFirst(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.updateFirst(query, update, collectionName) + verify { operations.updateFirst(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + + operations.updateMulti(query, update) + verify { operations.updateMulti(query, update, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val update = mockk() + val collectionName = "foo" + + operations.updateMulti(query, update, collectionName) + verify { operations.updateMulti(query, update, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `remove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.remove(query) + verify { operations.remove(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.remove(query, collectionName) + verify { operations.remove(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1689 + fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.findAllAndRemove(query) + verify { operations.findAllAndRemove(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `tail(Query) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + + operations.tail(query) + verify { operations.tail(query, First::class.java) } + } + + @Test // DATAMONGO-1689 + fun `tail(Query, String) with reified type parameter extension should call its Java counterpart`() { + + val query = mockk() + val collectionName = "foo" + + operations.tail(query, collectionName) + verify { operations.tail(query, First::class.java, collectionName) } + } + + @Test // DATAMONGO-1761 + fun `findDistinctImplicit(Query, String) should call java counterpart`() { + + val query = mockk() + + operations.findDistinct(query, "field") + verify { operations.findDistinct(query, "field", First::class.java, String::class.java) } + } + + @Test // DATAMONGO-1761 + fun `findDistinct(Query, String, String) should call java counterpart`() { + + val query = mockk() + + operations.findDistinct(query, "field", "collection") + verify { operations.findDistinct(query, "field", "collection", First::class.java, String::class.java) } + } + + @Test // #893 + fun `aggregate(TypedAggregation, String, KClass) should call java counterpart`() { + + val aggregation = mockk>() + + operations.aggregate(aggregation, "foo") + verify { operations.aggregate(aggregation, "foo", First::class.java) } + } + + @Test // #893 + fun `aggregate(TypedAggregation, KClass) should call java counterpart`() { + + val aggregation = mockk>() + + operations.aggregate(aggregation) + verify { operations.aggregate(aggregation, First::class.java) } + } + + @Test // #893 + fun `aggregate(Aggregation, KClass) should call java counterpart`() { + + val aggregation = mockk() + + operations.aggregate(aggregation) + verify { + operations.aggregate( + aggregation, + String::class.java, + First::class.java + ) + } + } + + @Test // #893 + fun `aggregate(Aggregation, String) should call java counterpart`() { + + val aggregation = mockk() + + operations.aggregate(aggregation, "foo") + verify { operations.aggregate(aggregation, "foo", First::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt new file mode 100644 index 0000000000..c824568418 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt @@ -0,0 +1,75 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.result.DeleteResult +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono + +/** + * @author Mark Paluch + * @author Sebastien Deleuze + */ +class ReactiveRemoveOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1719 + fun `remove() with reified type parameter extension should call its Java counterpart`() { + + operation.remove() + verify { operation.remove(First::class.java) } + } + + @Test // DATAMONGO-2209 + fun allAndAwait() { + + val remove = mockk>() + val result = mockk() + every { remove.all() } returns Mono.just(result) + + runBlocking { + assertThat(remove.allAndAwait()).isEqualTo(result) + } + + verify { + remove.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingRemoveFindAndRemoveAsFlow() { + + val spec = mockk>() + every { spec.findAndRemove() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.findAndRemoveAsFlow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.findAndRemove() + } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt new file mode 100644 index 0000000000..1c376389ea --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt @@ -0,0 +1,227 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import com.mongodb.client.result.UpdateResult +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatExceptionOfType +import org.junit.Test +import reactor.core.publisher.Mono + +/** + * Unit tests for `ReactiveExecutableUpdateOperationExtensions.kt`. + * + * @author Mark Paluch + * @author Sebastien Deleuze + */ +class ReactiveUpdateOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + @Test // DATAMONGO-1719 + fun `update() with reified type parameter extension should call its Java counterpart`() { + + operation.update() + verify { operation.update(First::class.java) } + } + + @Test // DATAMONGO-2209 + fun findModifyAndAwaitWithValue() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findModifyAndAwait()).isEqualTo("foo") + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitWithNull() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.findModifyAndAwait() } + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitOrNullWithValue() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findModifyAndAwaitOrNull()).isEqualTo("foo") + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitOrNullWithNull() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.empty() + + runBlocking { + assertThat(find.findModifyAndAwaitOrNull()).isNull() + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2209 + fun findReplaceAndAwaitWithValue() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findReplaceAndAwait()).isEqualTo("foo") + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitWithNull() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.findReplaceAndAwait() } + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitOrNullWithValue() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findReplaceAndAwaitOrNull()).isEqualTo("foo") + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitOrNullWithNull() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.empty() + + runBlocking { + assertThat(find.findReplaceAndAwaitOrNull()).isNull() + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2209 + fun allAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.all() } returns Mono.just(result) + + runBlocking { + assertThat(update.allAndAwait()).isEqualTo(result) + } + + verify { + update.all() + } + } + + @Test // DATAMONGO-2209 + fun firstAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.first() } returns Mono.just(result) + + runBlocking { + assertThat(update.firstAndAwait()).isEqualTo(result) + } + + verify { + update.first() + } + } + + @Test // DATAMONGO-2209 + fun upsertAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.upsert() } returns Mono.just(result) + + runBlocking { + assertThat(update.upsertAndAwait()).isEqualTo(result) + } + + verify { + update.upsert() + } + } + + @Test // DATAMONGO-2209 + fun findAndReplaceWithProjectionAsType() { + + val update = mockk>() + val result = mockk>() + every { update.`as`(String::class.java) } returns result + + assertThat(update.asType()).isEqualTo(result) + + verify { + update.`as`(String::class.java) + } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt new file mode 100644 index 0000000000..797c9d41ff --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert + +import org.assertj.core.api.Assertions.assertThat +import org.bson.Document +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.mapping.MongoMappingContext + +/** + * Kotlin unit tests for [MappingMongoConverter]. + * + * @author Mark Paluch + */ +class MappingMongoConverterKtUnitTests { + + @Test // GH-4485 + fun shouldIgnoreNonReadableProperties() { + + val document = Document.parse("{_id: 'baz', type: 'SOME_VALUE'}") + val converter = + MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + + val tx = converter.read(SpecialTransaction::class.java, document) + + assertThat(tx.id).isEqualTo("baz") + assertThat(tx.type).isEqualTo("SOME_DEFAULT_VALUE") + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt new file mode 100644 index 0000000000..95643dfa70 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert + +abstract class SomeTransaction() { + + abstract val id: String + abstract val type: String +} + +data class SpecialTransaction(override val id: String) : SomeTransaction() { + override val type: String = "SOME_DEFAULT_VALUE" +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt new file mode 100644 index 0000000000..a5e20487ff --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt @@ -0,0 +1,127 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import io.mockk.mockk +import io.mockk.verify +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import org.springframework.data.mapping.div + +/** + * @author Sebastien Deleuze + * @author Tjeu Kayim + */ +class CriteriaExtensionsTests { + + val criteria = mockk(relaxed = true) + + @Test + fun `isEqualTo() extension should call its Java counterpart`() { + + val foo = "foo" + + criteria.isEqualTo(foo) + + verify(exactly = 1) { criteria.`is`(foo) } + } + + @Test + fun `isEqualTo() extension should support nullable value`() { + + criteria.isEqualTo(null) + + verify(exactly = 1) { criteria.`is`(null) } + } + + @Test + fun `inValues(varags) extension should call its Java counterpart`() { + + val foo = "foo" + val bar = "bar" + + criteria.inValues(foo, bar) + + verify(exactly = 1) { criteria.`in`(foo, bar) } + } + + @Test + fun `inValues(varags) extension should support nullable values`() { + + criteria.inValues(null, null) + + verify(exactly = 1) { criteria.`in`(null, null) } + } + + @Test + fun `inValues(Collection) extension should call its Java counterpart`() { + + val c = listOf("foo", "bar") + + criteria.inValues(c) + + verify(exactly = 1) { criteria.`in`(c) } + } + + @Test + fun `inValues(Collection) extension should support nullable values`() { + + val c = listOf("foo", null, "bar") + + criteria.inValues(c) + + verify(exactly = 1) { criteria.`in`(c) } + } + + @Test + fun `and(KProperty) extension should call its Java counterpart`() { + + criteria.and(Book::title) + + verify(exactly = 1) { criteria.and("title") } + } + + @Test + fun `and(KProperty) extension should support nested properties`() { + + criteria.and(Book::author / Author::name) + + verify(exactly = 1) { criteria.and("author.name") } + } + + @Test + fun `where(KProperty) should equal Criteria where()`() { + + class Book(val title: String) + + val typedCriteria = where(Book::title) + val classicCriteria = Criteria.where("title") + + assertThat(typedCriteria).isEqualTo(classicCriteria) + } + + @Test + fun `where(KProperty) should support nested properties`() { + + val typedCriteria = where(Book::author / Author::name) + val classicCriteria = Criteria.where("author.name") + + assertThat(typedCriteria).isEqualTo(classicCriteria) + } + + class Book(val title: String, val author: Author) + class Author(val name: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt new file mode 100644 index 0000000000..88b2bcf050 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -0,0 +1,462 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.assertj.core.api.Assertions.assertThat +import org.bson.BsonRegularExpression +import org.junit.Test +import org.springframework.data.geo.Circle +import org.springframework.data.geo.Point +import org.springframework.data.mapping.div +import org.springframework.data.mongodb.core.geo.GeoJsonPoint +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type +import java.util.regex.Pattern + +/** + * Unit tests for [Criteria] extensions. + * + * @author Tjeu Kayim + * @author Mark Paluch + * @author Sangyong Choi + */ +class TypedCriteriaExtensionsTests { + + @Test + fun `isEqualTo() should equal expected criteria`() { + + val typed = Book::title isEqualTo "Moby-Dick" + val expected = Criteria("title").isEqualTo("Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `ne() should equal expected criteria`() { + + val typed = Book::title ne "Moby-Dick" + val expected = Criteria("title").ne("Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `lt() should equal expected criteria`() { + + val typed = Book::price lt 100 + val expected = Criteria("price").lt(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `lte() should equal expected criteria`() { + + val typed = Book::price lte 100 + val expected = Criteria("price").lte(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `gt() should equal expected criteria`() { + + val typed = Book::price gt 100 + val expected = Criteria("price").gt(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `gte() should equal expected criteria`() { + + val typed = Book::price gte 100 + val expected = Criteria("price").gte(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `inValues(vararg) should equal expected criteria`() { + + val typed = Book::price.inValues(1, 2, 3) + val expected = Criteria("price").inValues(1, 2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `inValues(list) should equal expected criteria`() { + + val typed = Book::price inValues listOf(1, 2, 3) + val expected = Criteria("price").inValues(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nin(vararg) should equal expected criteria`() { + + val typed = Book::price.nin(1, 2, 3) + val expected = Criteria("price").nin(1, 2, 3) + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nin(list) should equal expected criteria`() { + + val typed = Book::price nin listOf(1, 2, 3) + val expected = Criteria("price").nin(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `mod() should equal expected criteria`() { + + val typed = Book::price.mod(2, 3) + val expected = Criteria("price").mod(2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `all(vararg) should equal expected criteria`() { + + val typed = Book::categories.all(1, 2, 3) + val expected = Criteria("categories").all(1, 2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `all(list) should equal expected criteria`() { + + val typed = Book::categories all listOf(1, 2, 3) + val expected = Criteria("categories").all(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `size() should equal expected criteria`() { + + val typed = Book::categories size 4 + val expected = Criteria("categories").size(4) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `exists() should equal expected criteria`() { + + val typed = Book::title exists true + val expected = Criteria("title").exists(true) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(Int) should equal expected criteria`() { + + val typed = Book::title type 2 + val expected = Criteria("title").type(2) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(List) should equal expected criteria`() { + + val typed = Book::title type listOf(Type.STRING, Type.BOOLEAN) + val expected = Criteria("title").type(Type.STRING, Type.BOOLEAN) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(vararg) should equal expected criteria`() { + + val typed = Book::title.type(Type.STRING, Type.BOOLEAN) + val expected = Criteria("title").type(Type.STRING, Type.BOOLEAN) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `not() should equal expected criteria`() { + + val typed = Book::price.not().lt(123) + val expected = Criteria("price").not().lt(123) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `regex(string) should equal expected criteria`() { + + val typed = Book::title regex "ab+c" + val expected = Criteria("title").regex("ab+c") + assertEqualCriteriaByJson(typed, expected) + } + + @Test + fun `regex(string, options) should equal expected criteria`() { + + val typed = Book::title.regex("ab+c", "g") + val expected = Criteria("title").regex("ab+c", "g") + + assertEqualCriteriaByJson(typed, expected) + } + + @Test + fun `regex(Regex) should equal expected criteria`() { + + val typed = Book::title regex Regex("ab+c") + val expected = Criteria("title").regex(Pattern.compile("ab+c")) + + assertEqualCriteriaByJson(typed, expected) + } + + private fun assertEqualCriteriaByJson(typed: Criteria, expected: Criteria) { + assertThat(typed.criteriaObject.toJson()).isEqualTo(expected.criteriaObject.toJson()) + } + + @Test + fun `regex(Pattern) should equal expected criteria`() { + + val value = Pattern.compile("ab+c") + val typed = Book::title regex value + val expected = Criteria("title").regex(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `regex(BsonRegularExpression) should equal expected criteria`() { + + val expression = BsonRegularExpression("ab+c") + val typed = Book::title regex expression + val expected = Criteria("title").regex(expression) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `withinSphere() should equal expected criteria`() { + + val value = Circle(Point(928.76, 28.345), 65.243) + val typed = Building::location withinSphere value + val expected = Criteria("location").withinSphere(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `within() should equal expected criteria`() { + + val value = Circle(Point(5.43421, 12.456), 52.67) + val typed = Building::location within value + val expected = Criteria("location").within(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `near() should equal expected criteria`() { + + val value = Point(57.431, 71.345) + val typed = Building::location near value + val expected = Criteria("location").near(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nearSphere() should equal expected criteria`() { + + val value = Point(5.4321, 12.345) + val typed = Building::location nearSphere value + val expected = Criteria("location").nearSphere(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `intersects() should equal expected criteria`() { + + val value = GeoJsonPoint(5.481573, 51.451726) + val typed = Building::location intersects value + val expected = Criteria("location").intersects(value) + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria`() { + + val typed = Building::location maxDistance 3.0 + val expected = Criteria("location").maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria`() { + + val typed = Building::location minDistance 3.0 + val expected = Criteria("location").minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with nearSphere`() { + + val point = Point(0.0, 0.0) + val typed = Building::location nearSphere point maxDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with nearSphere`() { + + val point = Point(0.0, 0.0) + val typed = Building::location nearSphere point minDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with near`() { + + val point = Point(0.0, 0.0) + val typed = Building::location near point maxDistance 3.0 + val expected = Criteria("location") + .near(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with near`() { + + val point = Point(0.0, 0.0) + val typed = Building::location near point minDistance 3.0 + val expected = Criteria("location") + .near(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `elemMatch() should equal expected criteria`() { + + val value = Criteria("price").lt(950) + val typed = Book::title elemMatch value + val expected = Criteria("title").elemMatch(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `elemMatch(TypedCriteria) should equal expected criteria`() { + + val typed = Book::title elemMatch (Book::price lt 950) + val expected = Criteria("title").elemMatch(Criteria("price").lt(950)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `bits() should equal expected criteria`() { + + val typed = Book::title bits { allClear(123) } + val expected = Criteria("title").bits().allClear(123) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `One level nested should equal expected criteria`() { + + val typed = Book::author / Author::name isEqualTo "Herman Melville" + val expected = Criteria("author.name").isEqualTo("Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `Two levels nested should equal expected criteria`() { + + data class Entity(val book: Book) + + val typed = Entity::book / Book::author / Author::name isEqualTo "Herman Melville" + val expected = Criteria("book.author.name").isEqualTo("Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `typed criteria inside orOperator() should equal expected criteria`() { + + val typed = (Book::title isEqualTo "Moby-Dick").orOperator( + Book::price lt 1200, + Book::price gt 240 + ) + val expected = Criteria("title").isEqualTo("Moby-Dick") + .orOperator( + Criteria("price").lt(1200), + Criteria("price").gt(240) + ) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `chaining gt & isEqualTo() should equal expected criteria`() { + + val typed = (Book::title isEqualTo "Moby-Dick") + .and(Book::price).lt(950) + val expected = Criteria("title").isEqualTo("Moby-Dick") + .and("price").lt(950) + + assertThat(typed).isEqualTo(expected) + } + + data class Book( + val title: String = "Moby-Dick", + val price: Int = 123, + val available: Boolean = true, + val categories: List = emptyList(), + val author: Author = Author() + ) + + data class Author( + val name: String = "Herman Melville" + ) + + data class Building( + val location: GeoJsonPoint + ) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt new file mode 100644 index 0000000000..2d2b2c6847 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt @@ -0,0 +1,251 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import org.springframework.data.mapping.div +import java.time.Instant + +/** + * Unit tests for [Update] extensions. + * + * @author Pawel Matysek + */ +class TypedUpdateExtensionsTests { + + @Test // GH-3028 + fun `update() should equal expected Update`() { + + val typed = update(Book::title, "Moby-Dick") + val expected = Update.update("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `set() should equal expected Update`() { + + val typed = Update().set(Book::title, "Moby-Dick") + val expected = Update().set("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `setOnInsert() should equal expected Update`() { + + val typed = Update().setOnInsert(Book::title, "Moby-Dick") + val expected = Update().setOnInsert("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `unset() should equal expected Update`() { + + val typed = Update().unset(Book::title) + val expected = Update().unset("title") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `inc(key, inc) should equal expected Update`() { + + val typed = Update().inc(Book::price, 5) + val expected = Update().inc("price", 5) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `inc(key) should equal expected Update`() { + + val typed = Update().inc(Book::price) + val expected = Update().inc("price") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `push(key, value) should equal expected Update`() { + + val typed = Update().push(Book::categories, "someCategory") + val expected = Update().push("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `push(key) should equal expected Update`() { + + val typed = Update().push(Book::categories) + val expected = Update().push("categories") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `addToSet(key) should equal expected Update`() { + + val typed = Update().addToSet(Book::categories).each("category", "category2") + val expected = Update().addToSet("categories").each("category", "category2") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `addToSet(key, value) should equal expected Update`() { + + val typed = Update().addToSet(Book::categories, "someCategory") + val expected = Update().addToSet("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pop() should equal expected Update`() { + + val typed = Update().pop(Book::categories, Update.Position.FIRST) + val expected = Update().pop("categories", Update.Position.FIRST) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pull() should equal expected Update`() { + + val typed = Update().pull(Book::categories, "someCategory") + val expected = Update().pull("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pullAll() should equal expected Update`() { + + val typed = Update().pullAll(Book::categories, arrayOf("someCategory", "someCategory2")) + val expected = Update().pullAll("categories", arrayOf("someCategory", "someCategory2")) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `currentDate() should equal expected Update`() { + + val typed = Update().currentDate(Book::releaseDate) + val expected = Update().currentDate("releaseDate") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `currentTimestamp() should equal expected Update`() { + + val typed = Update().currentTimestamp(Book::releaseDate) + val expected = Update().currentTimestamp("releaseDate") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `multiply() should equal expected Update`() { + + val typed = Update().multiply(Book::price, 2) + val expected = Update().multiply("price", 2) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `max() should equal expected Update`() { + + val typed = Update().max(Book::price, 200) + val expected = Update().max("price", 200) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `min() should equal expected Update`() { + + val typed = Update().min(Book::price, 100) + val expected = Update().min("price", 100) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `bitwise() should equal expected Update`() { + + val typed = Update().bitwise(Book::price).and(2) + val expected = Update().bitwise("price").and(2) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `filterArray() should equal expected Update`() { + + val typed = Update().filterArray(Book::categories, "someCategory") + val expected = Update().filterArray("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `typed modifies() should equal expected modifies()`() { + + val typed = update(Book::title, "Moby-Dick") + + assertThat(typed.modifies(Book::title)).isEqualTo(typed.modifies("title")) + assertThat(typed.modifies(Book::price)).isEqualTo(typed.modifies("price")) + } + + @Test // GH-3028 + fun `One level nested should equal expected Update`() { + + val typed = update(Book::author / Author::name, "Herman Melville") + val expected = Update.update("author.name", "Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `Two levels nested should equal expected Update`() { + + data class Entity(val book: Book) + + val typed = update(Entity::book / Book::author / Author::name, "Herman Melville") + val expected = Update.update("book.author.name", "Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + data class Book( + val title: String = "Moby-Dick", + val price: Int = 123, + val available: Boolean = true, + val categories: List = emptyList(), + val author: Author = Author(), + val releaseDate: Instant, + ) + + data class Author( + val name: String = "Herman Melville", + ) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt new file mode 100644 index 0000000000..af3744a900 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt @@ -0,0 +1,68 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository + +import com.mongodb.client.result.DeleteResult +import io.mockk.every +import io.mockk.mockk +import kotlinx.coroutines.runBlocking +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.springframework.data.annotation.Id +import org.springframework.data.mongodb.core.ReactiveMongoOperations +import org.springframework.data.mongodb.core.convert.MappingMongoConverter +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory +import org.springframework.data.repository.kotlin.CoroutineCrudRepository +import reactor.core.publisher.Mono + +/** + * Unit tests for Kotlin Coroutine repositories. + * + * @author Mark Paluch + */ +class CoroutineRepositoryUnitTests { + + val operations = mockk(relaxed = true) + lateinit var repositoryFactory: ReactiveMongoRepositoryFactory + + @BeforeEach + fun before() { + + every { operations.getConverter() } returns MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + repositoryFactory = ReactiveMongoRepositoryFactory(operations) + } + + @Test // DATAMONGO-2601 + fun `should discard result of suspended query method without result`() { + + every { operations.remove(any(), any(), any()) } returns Mono.just(DeleteResult.acknowledged(1)) + + val repository = repositoryFactory.getRepository(PersonRepository::class.java) + + runBlocking { + repository.deleteAllByName("foo") + } + } + + interface PersonRepository : CoroutineCrudRepository { + + suspend fun deleteAllByName(name: String) + } + + data class Person(@Id var id: Long, var name: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt new file mode 100644 index 0000000000..96e1b679d2 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt @@ -0,0 +1,61 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository + +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.springframework.data.annotation.Id +import org.springframework.data.mongodb.core.MongoOperations +import org.springframework.data.mongodb.core.convert.MappingMongoConverter +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory +import org.springframework.data.repository.CrudRepository + +/** + * Unit tests for Kotlin repositories. + * + * @author Mark Paluch + */ +class KotlinRepositoryUnitTests { + + val operations = mockk(relaxed = true) + lateinit var repositoryFactory: MongoRepositoryFactory + + @BeforeEach + fun before() { + + every { operations.getConverter() } returns MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + repositoryFactory = MongoRepositoryFactory(operations) + } + + @Test // DATAMONGO-2601 + fun shouldSupportDeleteMethods() { + + val repository = repositoryFactory.getRepository(PersonRepository::class.java) + + repository.deleteAllByName("foo") + } + + interface PersonRepository : CrudRepository { + + fun deleteAllByName(name: String) + } + + data class Person(@Id var id: Long, var name: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt new file mode 100644 index 0000000000..b3e5c013b9 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt @@ -0,0 +1,86 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query + +import kotlinx.coroutines.flow.Flow +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatNoException +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.Person +import org.springframework.data.mongodb.repository.Update +import org.springframework.data.projection.SpelAwareProxyProjectionFactory +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata +import org.springframework.data.repository.kotlin.CoroutineCrudRepository +import kotlin.coroutines.Continuation + +/** + * Unit tests for [ReactiveMongoQueryMethod] using Coroutine repositories. + * + * @author Mark Paluch + */ +class ReactiveMongoQueryMethodCoroutineUnitTests { + + val projectionFactory = SpelAwareProxyProjectionFactory() + + interface PersonRepository : CoroutineCrudRepository { + + suspend fun findSuspendAllByName(): Flow + + fun findAllByName(): Flow + + suspend fun findSuspendByName(): List + + @Update("{ \$inc: { age: 1 } }") + suspend fun findAndIncrementAgeByName(name: String) + } + + @Test // DATAMONGO-2562 + internal fun `should consider methods returning Flow as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findAllByName") + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // DATAMONGO-2562 + internal fun `should consider suspended methods returning Flow as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findSuspendAllByName", Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // DATAMONGO-2630 + internal fun `should consider suspended methods returning List as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findSuspendByName", Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // GH-4772 + internal fun `should consider suspended update queries`() { + + val method = PersonRepository::class.java.getMethod("findAndIncrementAgeByName", String::class.java, Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThatNoException().isThrownBy { queryMethod.verify() } + } +} diff --git a/spring-data-mongodb/src/test/resources/META-INF/beans.xml b/spring-data-mongodb/src/test/resources/META-INF/beans.xml deleted file mode 100644 index 73ae3a2516..0000000000 --- a/spring-data-mongodb/src/test/resources/META-INF/beans.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - diff --git a/spring-data-mongodb/src/test/resources/geospatial.xml b/spring-data-mongodb/src/test/resources/geospatial.xml index 13a01ac350..8937e8f753 100644 --- a/spring-data-mongodb/src/test/resources/geospatial.xml +++ b/spring-data-mongodb/src/test/resources/geospatial.xml @@ -2,23 +2,24 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd + http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd"> - - - + + + - + - + + - + diff --git a/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml b/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml new file mode 100644 index 0000000000..7217ac4743 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml @@ -0,0 +1,2 @@ + + diff --git a/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml b/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml index 274878ec27..a7691c97ae 100644 --- a/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml +++ b/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml b/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml new file mode 100644 index 0000000000..bcba3dfb38 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/infrastructure.xml b/spring-data-mongodb/src/test/resources/infrastructure.xml index 2cd73b7bd7..500c44e2bf 100644 --- a/spring-data-mongodb/src/test/resources/infrastructure.xml +++ b/spring-data-mongodb/src/test/resources/infrastructure.xml @@ -1,20 +1,21 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd"> - + - - + + + diff --git a/spring-data-mongodb/src/test/resources/logback.xml b/spring-data-mongodb/src/test/resources/logback.xml index 3c6add8152..64550c957c 100644 --- a/spring-data-mongodb/src/test/resources/logback.xml +++ b/spring-data-mongodb/src/test/resources/logback.xml @@ -7,14 +7,20 @@ - - - + + + + + + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml b/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml index 1c3e248efb..a16849877b 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml b/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml index bb1037c5f7..580fe87272 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-default.xml b/spring-data-mongodb/src/test/resources/namespace/converter-default.xml index 6692e304db..9da3af36d7 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-default.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-default.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml b/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml index 7d72040076..e69006a0f7 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml b/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml index 1ef0063abd..7fef48d5dd 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml b/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml index 345e7d12bc..8d7415f46b 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml b/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml index 2b7e4519a0..fe6df5bae2 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter.xml b/spring-data-mongodb/src/test/resources/namespace/converter.xml index 91842765d9..1a0106d7bf 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -13,6 +13,8 @@ + + diff --git a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml index ef7b48fbea..66dba8540a 100644 --- a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml +++ b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml @@ -2,16 +2,16 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - + + + - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml index bce7b0aaff..6f8c1ae0cf 100644 --- a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml @@ -2,23 +2,21 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - - - - - \ No newline at end of file + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml index 5388ec8c35..478f95daf0 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml @@ -2,11 +2,15 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + + + - + + + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-details.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-details.xml deleted file mode 100644 index c9c9f622e6..0000000000 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-details.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml index 567585f1c5..4bd9158356 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-write-concern-and-details.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-write-concern-and-details.xml deleted file mode 100644 index 9f1709b4bb..0000000000 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-write-concern-and-details.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml index 3fa5daff85..e12b585237 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-details.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-details.xml deleted file mode 100644 index 7239228e3f..0000000000 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-details.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml index 529b93ab04..4bd9158356 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml @@ -2,9 +2,9 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml index 411da82872..5969d84e9f 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml @@ -2,9 +2,9 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml index 86e1dc28f6..a279bd83e4 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml @@ -2,9 +2,9 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri.xml deleted file mode 100644 index 51175ecba3..0000000000 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - diff --git a/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml b/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml index 8f6a36b231..0659bfb973 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml @@ -2,17 +2,22 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml index 4f0883a6f6..76f271b4da 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml new file mode 100644 index 0000000000..79e5ac40a0 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml index 8eee01d3d4..dc86edce2f 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml @@ -3,13 +3,16 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> + + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml index 28003d77bd..5575248498 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml @@ -3,16 +3,20 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd"> - + + + - + + + - - - - + + - + @@ -73,7 +55,7 @@ - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml index b91883d3cf..8466692f83 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties index ef9d7c7361..a79c2c685c 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties @@ -8,5 +8,8 @@ mongo.socketTimeout=1500 mongo.threadsAllowedToBlockForConnectionMultiplier=4 mongo.socketKeepAlive=true mongo.fsync=true -mongo.slaveOk=true + +mongoSsl.ssl=true +replicaSetName=rs0 +credential=jon:warg@snow?uri.authMechanism=PLAIN diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml index 6ed28955f5..e3635826c7 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -17,7 +17,7 @@ - - + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml index 97797b9847..66d9aed368 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml index d967bf8148..e9c04b79c9 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml index 7e7271d855..264d474b56 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml @@ -3,22 +3,25 @@ xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> - + + + + - + + - @@ -26,13 +29,8 @@ - - - - - - - + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml index d994229bac..2e88cda928 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml @@ -2,12 +2,15 @@ + xmlns:context="http://www.springframework.org/schema/context" + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> - + + + @@ -15,6 +18,6 @@ - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml index e2595507dc..b70efb607c 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml @@ -4,20 +4,25 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:repository="http://www.springframework.org/schema/data/repository" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/data/repository http://www.springframework.org/schema/data/repository/spring-repository.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/data/repository https://www.springframework.org/schema/data/repository/spring-repository.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> + + + - + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml index 41002c4d3a..4817cacf03 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml @@ -4,10 +4,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:repository="http://www.springframework.org/schema/data/repository" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/data/repository http://www.springframework.org/schema/data/repository/spring-repository.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/data/repository https://www.springframework.org/schema/data/repository/spring-repository.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/mongo.properties b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/mongo.properties new file mode 100644 index 0000000000..d784d5f1ed --- /dev/null +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/mongo.properties @@ -0,0 +1 @@ +mongo.create-query-indexes=true diff --git a/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml new file mode 100644 index 0000000000..896bb26812 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/server-jmx.xml b/spring-data-mongodb/src/test/resources/server-jmx.xml index 7b24ed7aba..54f985f4cb 100644 --- a/spring-data-mongodb/src/test/resources/server-jmx.xml +++ b/spring-data-mongodb/src/test/resources/server-jmx.xml @@ -4,9 +4,9 @@ xmlns:p="http://www.springframework.org/schema/p" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> diff --git a/spring-data-mongodb/src/test/resources/template-mapping.xml b/spring-data-mongodb/src/test/resources/template-mapping.xml index 2e952b4aaa..5f571f7241 100644 --- a/spring-data-mongodb/src/test/resources/template-mapping.xml +++ b/spring-data-mongodb/src/test/resources/template-mapping.xml @@ -2,12 +2,12 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd"> - - + + @@ -20,8 +20,8 @@ - - + + @@ -44,7 +44,7 @@ - + diff --git a/spring-data-mongodb/src/test/resources/zips.json b/spring-data-mongodb/src/test/resources/zips.json index 093446c455..b84c090c2a 100644 --- a/spring-data-mongodb/src/test/resources/zips.json +++ b/spring-data-mongodb/src/test/resources/zips.json @@ -4013,7 +4013,6 @@ {"city": "MONTICELLO", "loc": [-83.892454, 30.519681], "pop": 9578, "state": "FL", "_id": "32344"} {"city": "PANACEA", "loc": [-84.391212, 30.015322], "pop": 1292, "state": "FL", "_id": "32346"} {"city": "PERRY", "loc": [-83.585021, 30.097489], "pop": 15401, "state": "FL", "_id": "32347"} -{"city": "PINETTA", "loc": [-83.340463, 30.599703], "pop": 642, "state": "FL", "_id": "32350"} {"city": "QUINCY", "loc": [-84.60945, 30.586675], "pop": 25013, "state": "FL", "_id": "32351"} {"city": "SALEM", "loc": [-83.385828, 29.823815], "pop": 264, "state": "FL", "_id": "32356"} {"city": "SOPCHOPPY", "loc": [-84.454877, 30.071353], "pop": 3335, "state": "FL", "_id": "32358"} @@ -6913,7 +6912,6 @@ {"city": "WILLISVILLE", "loc": [-89.578487, 37.982142], "pop": 912, "state": "IL", "_id": "62997"} {"city": "WOLF LAKE", "loc": [-89.440761, 37.511985], "pop": 533, "state": "IL", "_id": "62998"} {"city": "ZEIGLER", "loc": [-89.06026, 37.906923], "pop": 2502, "state": "IL", "_id": "62999"} -{"city": "SAINT MARY", "loc": [-89.929268, 37.914034], "pop": 130, "state": "IL", "_id": "63673"} {"city": "ALEXANDRIA", "loc": [-85.668148, 40.256081], "pop": 11011, "state": "IN", "_id": "46001"} {"city": "ANDERSON", "loc": [-85.725305, 40.114577], "pop": 17280, "state": "IN", "_id": "46011"} {"city": "ANDERSON", "loc": [-85.653591, 40.130947], "pop": 20949, "state": "IN", "_id": "46012"} @@ -9906,7 +9904,6 @@ {"city": "CERULEAN", "loc": [-87.664848, 36.949619], "pop": 1654, "state": "KY", "_id": "42215"} {"city": "CROFTON", "loc": [-87.489072, 37.034387], "pop": 3531, "state": "KY", "_id": "42217"} {"city": "ELKTON", "loc": [-87.167833, 36.909403], "pop": 7207, "state": "KY", "_id": "42220"} -{"city": "FORT CAMPBELL", "loc": [-87.459706, 36.653584], "pop": 18861, "state": "KY", "_id": "42223"} {"city": "GRACEY", "loc": [-87.6545, 36.856393], "pop": 92, "state": "KY", "_id": "42232"} {"city": "TINY TOWN", "loc": [-87.170931, 36.664268], "pop": 2307, "state": "KY", "_id": "42234"} {"city": "HERNDON", "loc": [-87.608215, 36.708469], "pop": 810, "state": "KY", "_id": "42236"} diff --git a/spring-data-mongodb/template.mf b/spring-data-mongodb/template.mf deleted file mode 100644 index 1bad89f755..0000000000 --- a/spring-data-mongodb/template.mf +++ /dev/null @@ -1,29 +0,0 @@ -Bundle-SymbolicName: org.springframework.data.mongodb -Bundle-Name: Spring Data MongoDB Support -Bundle-Vendor: Pivotal Software, Inc. -Bundle-ManifestVersion: 2 -Excluded-Imports: - lombok.* -Import-Package: - sun.reflect;version="0";resolution:=optional -Export-Template: - org.springframework.data.mongodb.*;version="${project.version}" -Import-Template: - com.fasterxml.jackson.*;version="${jackson:[=.=.=,+1.0.0)}";resolution:=optional, - com.google.common.base.*;version="[11.0.0,14.0.0)";resolution:=optional, - com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}", - com.querydsl.*;version="${querydsl:[=.=.=,+1.0.0)}";resolution:=optional, - javax.annotation.processing.*;version="0", - javax.enterprise.*;version="${cdi:[=.=.=,+1.0.0)}";resolution:=optional, - javax.tools.*;version="0", - javax.net.*;version="0", - javax.validation.*;version="${validation:[=.=.=.=,+1.0.0)}";resolution:=optional, - javax.xml.bind.*;version=0, - org.aopalliance.*;version="[1.0.0, 2.0.0)";resolution:=optional, - org.bson.*;version="0", - org.objenesis.*;version="${objenesis:[=.=.=, +1.0.0)}";resolution:=optional, - org.slf4j.*;version="${slf4j:[=.=.=,+1.0.0)}", - org.springframework.*;version="${spring:[=.=.=.=,+1.0.0)}", - org.springframework.data.*;version="${springdata.commons:[=.=.=.=,+1.0.0)}", - org.springframework.data.mongodb.*;version="${project.version:[=.=.=.=,+1.0.0)}", - org.w3c.dom.*;version="0" diff --git a/src/main/antora/antora-playbook.yml b/src/main/antora/antora-playbook.yml new file mode 100644 index 0000000000..9f842fe401 --- /dev/null +++ b/src/main/antora/antora-playbook.yml @@ -0,0 +1,40 @@ +# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2 +# +# The purpose of this Antora playbook is to build the docs in the current branch. +antora: + extensions: + - require: '@springio/antora-extensions' + root_component_name: 'data-mongodb' +site: + title: Spring Data MongoDB + url: https://docs.spring.io/spring-data/mongo/reference +content: + sources: + - url: ./../../.. + branches: HEAD + start_path: src/main/antora + worktrees: true + - url: https://github.com/spring-projects/spring-data-commons + # Refname matching: + # https://docs.antora.org/antora/latest/playbook/content-refname-matching/ + branches: [ main, 3.3.x, 3.2.x] + start_path: src/main/antora +asciidoc: + attributes: + hide-uri-scheme: '@' + tabs-sync-option: '@' + extensions: + - '@asciidoctor/tabs' + - '@springio/asciidoctor-extensions' + - '@springio/asciidoctor-extensions/javadoc-extension' + sourcemap: true +urls: + latest_version_segment: '' +runtime: + log: + failure_level: warn + format: pretty +ui: + bundle: + url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.18/ui-bundle.zip + snapshot: true diff --git a/src/main/antora/antora.yml b/src/main/antora/antora.yml new file mode 100644 index 0000000000..1a23500e21 --- /dev/null +++ b/src/main/antora/antora.yml @@ -0,0 +1,17 @@ +name: data-mongodb +version: true +title: Spring Data MongoDB +nav: + - modules/ROOT/nav.adoc +ext: + collector: + - run: + command: ./mvnw validate process-resources -pl :spring-data-mongodb-distribution -am -Pantora-process-resources + local: true + scan: + dir: spring-data-mongodb-distribution/target/classes/ + - run: + command: ./mvnw package -Pdistribute + local: true + scan: + dir: target/antora diff --git a/src/main/asciidoc/images/jconsole.png b/src/main/antora/modules/ROOT/assets/images/jconsole.png similarity index 100% rename from src/main/asciidoc/images/jconsole.png rename to src/main/antora/modules/ROOT/assets/images/jconsole.png diff --git a/src/main/antora/modules/ROOT/examples/example b/src/main/antora/modules/ROOT/examples/example new file mode 120000 index 0000000000..3195fe72aa --- /dev/null +++ b/src/main/antora/modules/ROOT/examples/example @@ -0,0 +1 @@ +../../../../../../spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example \ No newline at end of file diff --git a/src/main/antora/modules/ROOT/nav.adoc b/src/main/antora/modules/ROOT/nav.adoc new file mode 100644 index 0000000000..221f47c011 --- /dev/null +++ b/src/main/antora/modules/ROOT/nav.adoc @@ -0,0 +1,71 @@ +* xref:index.adoc[Overview] +** xref:commons/upgrade.adoc[] +** xref:migration-guides.adoc[] +*** xref:migration-guide/migration-guide-2.x-to-3.x.adoc[] +*** xref:migration-guide/migration-guide-3.x-to-4.x.adoc[] + +* xref:mongodb.adoc[] +** xref:preface.adoc[] +** xref:mongodb/getting-started.adoc[] +** xref:mongodb/configuration.adoc[] + +** xref:mongodb/template-api.adoc[] +*** xref:mongodb/template-config.adoc[] +*** xref:mongodb/template-collection-management.adoc[] +*** xref:mongodb/template-crud-operations.adoc[] +*** xref:mongodb/template-query-operations.adoc[] +*** xref:mongodb/template-document-count.adoc[] +*** xref:mongodb/aggregation-framework.adoc[] + +** xref:mongodb/template-gridfs.adoc[] +** xref:mongodb/mapping/mapping.adoc[] +*** xref:mongodb/mapping/mapping-schema.adoc[] +*** xref:mongodb/mapping/custom-conversions.adoc[Type based Converter] +*** xref:mongodb/mapping/property-converters.adoc[] +*** xref:mongodb/mapping/unwrapping-entities.adoc[] +*** xref:mongodb/mapping/document-references.adoc[Object References] +*** xref:mongodb/mapping/mapping-index-management.adoc[] + +** xref:mongodb/value-expressions.adoc[] +** xref:mongodb/lifecycle-events.adoc[] +** xref:mongodb/auditing.adoc[] +** xref:mongodb/client-session-transactions.adoc[] +** xref:mongodb/change-streams.adoc[] +** xref:mongodb/tailable-cursors.adoc[] +** xref:mongodb/sharding.adoc[] +** xref:mongodb/mongo-search-indexes.adoc[] +** xref:mongodb/mongo-encryption.adoc[] + +// Repository +* xref:repositories.adoc[] +** xref:repositories/core-concepts.adoc[] +** xref:repositories/definition.adoc[] +** xref:mongodb/repositories/repositories.adoc[] +** xref:repositories/core-extensions.adoc[] +** xref:repositories/create-instances.adoc[] +** xref:repositories/query-methods-details.adoc[] +** xref:mongodb/repositories/query-methods.adoc[] +** xref:mongodb/repositories/modifying-methods.adoc[] +** xref:repositories/projections.adoc[] +** xref:repositories/custom-implementations.adoc[] +** xref:repositories/core-domain-events.adoc[] +** xref:repositories/null-handling.adoc[] +** xref:mongodb/repositories/cdi-integration.adoc[] +** xref:repositories/query-keywords-reference.adoc[] +** xref:repositories/query-return-types-reference.adoc[] + +// Observability +* xref:observability/observability.adoc[] +** xref:observability/conventions.adoc[] +** xref:observability/metrics.adoc[] +** xref:observability/spans.adoc[] + +* xref:kotlin.adoc[] +** xref:kotlin/requirements.adoc[] +** xref:kotlin/null-safety.adoc[] +** xref:kotlin/extensions.adoc[] +** xref:kotlin/coroutines.adoc[] + +* xref:attachment$api/java/index.html[Javadoc,role=link-external,window=_blank] +* https://github.com/spring-projects/spring-data-commons/wiki[Wiki,role=link-external,window=_blank] + diff --git a/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc new file mode 100644 index 0000000000..51a9189aa0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$upgrade.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/index.adoc b/src/main/antora/modules/ROOT/pages/index.adoc new file mode 100644 index 0000000000..2a22bd56b4 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/index.adoc @@ -0,0 +1,22 @@ +[[spring-data-mongodb-reference-documentation]] += Spring Data MongoDB +:revnumber: {version} +:revdate: {localdate} +:feature-scroll: true + +_Spring Data MongoDB provides support for the MongoDB database. +It uses familiar Spring concepts such as a template classes for core API usage and lightweight repository style data access to ease development of applications with a consistent programming model._ + +[horizontal] +xref:mongodb.adoc[MongoDB] :: MongoDB support and connectivity +xref:repositories.adoc[Repositories] :: Mongo Repositories +xref:observability/observability.adoc[Observability] :: Observability Integration +xref:kotlin.adoc[Kotlin] :: Kotlin support +// xref:migration-guides.adoc[Migration] :: Migration Guides +https://github.com/spring-projects/spring-data-commons/wiki[Wiki] :: What's New, Upgrade Notes, Supported Versions, additional cross-version information. + +Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch; Jay Bryant + +(C) 2008-{copyright-year} VMware Inc. + +Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. diff --git a/src/main/antora/modules/ROOT/pages/kotlin.adoc b/src/main/antora/modules/ROOT/pages/kotlin.adoc new file mode 100644 index 0000000000..4f01678d84 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc new file mode 100644 index 0000000000..8f578961cf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/coroutines.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc new file mode 100644 index 0000000000..381a48be13 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc @@ -0,0 +1,85 @@ +include::{commons}@data-commons::page$kotlin/extensions.adoc[] + +To retrieve a list of `SWCharacter` objects in Java, you would normally write the following: + +[source,java] +---- +Flux characters = template.query(SWCharacter.class).inTable("star-wars").all() +---- + +With Kotlin and the Spring Data extensions, you can instead write the following: + +[source,kotlin] +---- +val characters = template.query().inTable("star-wars").all() +// or (both are equivalent) +val characters : Flux = template.query().inTable("star-wars").all() +---- + +As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax. + +[[mongo.query.kotlin-support]] +== Type-safe Queries for Kotlin + +Kotlin embraces domain-specific language creation through its language syntax and its extension system. +Spring Data MongoDB ships with a Kotlin Extension for `Criteria` using https://kotlinlang.org/docs/reference/reflection.html#property-references[Kotlin property references] to build type-safe queries. +Queries using this extension are typically benefit from improved readability. +Most keywords on `Criteria` have a matching Kotlin extension, such as `inValues` and `regex`. + +Consider the following example explaining Type-safe Queries: + +==== +[source,kotlin] +---- +import org.springframework.data.mongodb.core.query.* + +mongoOperations.find( + Query(Book::title isEqualTo "Moby-Dick") <1> +) + +mongoOperations.find( + Query(titlePredicate = Book::title exists true) +) + +mongoOperations.find( + Query( + Criteria().andOperator( + Book::price gt 5, + Book::price lt 10 + )) +) + +// Binary operators +mongoOperations.find( + Query(BinaryMessage::payload bits { allClear(0b101) }) <2> +) + +// Nested Properties (i.e. refer to "book.author") +mongoOperations.find( + Query(Book::author / Author::name regex "^H") <3> +) +---- +<1> `isEqualTo()` is an infix extension function with receiver type `KProperty` that returns `Criteria`. +<2> For bitwise operators, pass a lambda argument where you call one of the methods of `Criteria.BitwiseCriteriaOperators`. +<3> To construct nested properties, use the `/` character (overloaded operator `div`). +==== + +[[mongo.update.kotlin-support]] +== Type-safe Updates for Kotlin + +A syntax similar to <> can be used to update documents: + +==== +[source,kotlin] +---- +mongoOperations.updateMulti( + Query(Book::title isEqualTo "Moby-Dick"), + update(Book:title, "The Whale") <1> + .inc(Book::price, 100) <2> + .addToSet(Book::authors, "Herman Melville") <3> +) +---- +<1> `update()` is a factory function with receiver type `KProperty` that returns `Update`. +<2> Most methods from `Update` have a matching Kotlin extension. +<3> Functions with `KProperty` can be used as well on collections types +==== diff --git a/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc new file mode 100644 index 0000000000..6967ddb3f6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/null-safety.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc new file mode 100644 index 0000000000..bb209ab6a4 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/requirements.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc new file mode 100644 index 0000000000..c002c1fee5 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc @@ -0,0 +1,61 @@ +[[mongodb.migration.2.x-3.x]] += Migration Guide from 2.x to 3.x + +Spring Data MongoDB 3.x requires the MongoDB Java Driver 4.x + +To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation]. + +[[dependency-changes]] +== Dependency Changes + +* `org.mongodb:mongo-java-driver` (uber jar) got replaced with: +** bson-jar +** core-jar +** sync-jar + +The change in dependencies allows usage of the reactive support without having to pull the synchronous driver. +NOTE: The new sync driver does no longer support `com.mongodb.DBObject`. Please use `org.bson.Document` instead. + +[[signature-changes]] +== Signature Changes + +* `MongoTemplate` no longer supports `com.mongodb.MongoClient` and `com.mongodb.MongoClientOptions`. +Please use `com.mongodb.client.MongoClient` and `com.mongodb.MongoClientSettings` instead. + +In case you're using `AbstractMongoConfiguration` please switch to `AbstractMongoClientConfiguration`. + +[[namespace-changes]] +== Namespace Changes + +The switch to `com.mongodb.client.MongoClient` requires an update of your configuration XML if you have one. +The best way to provide required connection information is by using a connection string. +Please see the https://docs.mongodb.com/manual/reference/connection-string/[MongoDB Documentation] for details. + + +==== +[source,xml] +---- + +---- + +[source,xml] +---- + + + + + +---- + +[source,xml] +---- + + + +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc new file mode 100644 index 0000000000..aa340a4f24 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc @@ -0,0 +1,5 @@ +[[mongodb.migration.3.x-4.x]] += Migration Guide from 3.x to 4.x + +Spring Data MongoDB 4.x requires the MongoDB Java Driver 4.8.x + +To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation]. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides.adoc b/src/main/antora/modules/ROOT/pages/migration-guides.adoc new file mode 100644 index 0000000000..3f8e783a16 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides.adoc @@ -0,0 +1,8 @@ +[[mongodb.migration]] += Migration Guides +:page-section-summary-toc: 1 + +This section contains version-specific migration guides explaining how to upgrade between two versions. + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb.adoc b/src/main/antora/modules/ROOT/pages/mongodb.adoc new file mode 100644 index 0000000000..907cbf06be --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb.adoc @@ -0,0 +1,23 @@ +[[mongodb.core]] += MongoDB Support +:page-section-summary-toc: 1 + +Spring Data support for MongoDB contains a wide range of features: + +* xref:mongodb/template-config.adoc[Spring configuration support] with Java-based `@Configuration` classes or an XML namespace for a Mongo driver instance and replica sets. +* xref:mongodb/template-api.adoc[`MongoTemplate` helper class] that increases productivity when performing common Mongo operations. +Includes integrated object mapping between documents and POJOs. +* xref:mongodb/template-api.adoc#mongo-template.exception-translation[Exception translation] into Spring's portable Data Access Exception hierarchy. +* Feature-rich xref:mongodb/mapping/mapping.adoc[Object Mapping] integrated with Spring's Conversion Service. +* xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[Annotation-based mapping metadata] that is extensible to support other metadata formats. +* xref:mongodb/lifecycle-events.adoc[Persistence and mapping lifecycle events]. +* xref:mongodb/template-query-operations.adoc[Java-based Query, Criteria, and Update DSLs]. +* Automatic implementation of xref:repositories.adoc[Repository interfaces], including support for custom query methods. +* xref:repositories/core-extensions.adoc#mongodb.repositories.queries.type-safe[QueryDSL integration] to support type-safe queries. +* xref:mongodb/client-session-transactions.adoc[Multi-Document Transactions]. +* xref:mongodb/template-query-operations.adoc#mongo.geo-json[GeoSpatial integration]. + +For most tasks, you should use `MongoTemplate` or the Repository support, which both leverage the rich mapping functionality. +`MongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. +`MongoTemplate` also provides callback methods so that it is easy for you to get the low-level API artifacts, such as `com.mongodb.client.MongoDatabase`, to communicate directly with MongoDB. +The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc b/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc new file mode 100644 index 0000000000..81a00683c6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc @@ -0,0 +1,691 @@ +[[mongo.aggregation]] += Aggregation Framework Support + +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. + +[[mongo.aggregation.basic-concepts]] +== Basic Concepts + +The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: javadoc:org.springframework.data.mongodb.core.aggregation.Aggregation[] and javadoc:org.springframework.data.mongodb.core.aggregation.AggregationResults[]. + +* `Aggregation` ++ +An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. ++ +The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. ++ +* `TypedAggregation` ++ +A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. ++ +At runtime, field references get checked against the given input type, considering potential `@Field` annotations. +[NOTE] +==== +Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. +==== +* `AggregationDefinition` ++ +An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. ++ +* `AggregationResults` ++ +`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. ++ +The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: ++ +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + pipelineOP1(), + pipelineOP2(), + pipelineOPn() +); + +AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); +List mappedResult = results.getMappedResults(); +---- + +Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. + +[[mongo.aggregation.supported-aggregation-operations]] +[[aggregation-stages]] +.Supported Aggregation Operations & Stages +[%collapsible] +==== +The MongoDB Aggregation Framework provides the following types of aggregation stages and operations: + +* addFields - `AddFieldsOperation` +* bucket / bucketAuto - `BucketOperation` / `BucketAutoOperation` +* count - `CountOperation` +* densify - `DensifyOperation` +* facet - `FacetOperation` +* geoNear - `GeoNearOperation` +* graphLookup - `GraphLookupOperation` +* group - `GroupOperation` +* limit - `LimitOperation` +* lookup - `LookupOperation` +* match - `MatchOperation` +* merge - `MergeOperation` +* project - `ProjectionOperation` +* redact - `RedactOperation` +* replaceRoot - `ReplaceRootOperation` +* sample - `SampleOperation` +* set - `SetOperation` +* setWindowFields - `SetWindowFieldsOperation` +* skip - `SkipOperation` +* sort / sortByCount - `SortOperation` / `SortByCountOperation` +* unionWith - `UnionWithOperation` +* unset - `UnsetOperation` +* unwind - `UnwindOperation` +==== + +[TIP] +==== +Unsupported aggregation stages (like https://www.mongodb.com/docs/atlas/atlas-search/query-syntax/[$search] for MongoDB Atlas) can be provided by implementing either `AggregationOperation`. +`Aggregation.stage` is a shortcut for registering a pipeline stage by providing its JSON or `Bson` representation. + +[source,java] +---- +Aggregation.stage(""" + { $search : { + "near": { + "path": "released", + "origin": { "$date": { "$numberLong": "..." } } , + "pivot": 7 + } + } + } +"""); +---- +==== + +At the time of this writing, we provide support for the following Aggregation Operators in Spring Data MongoDB: + +.Aggregation Operators currently supported by Spring Data MongoDB +[cols="2*"] +|=== +| Set Aggregation Operators +| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` + +| Group/Accumulator Aggregation Operators +| `addToSet`, `bottom`, `bottomN`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `firstN`, `last`, `lastN` `max`, `maxN`, `min`, `minN`, `avg`, `push`, `sum`, `top`, `topN`, `count` (+++*+++), `median`, `percentile`, `stdDevPop`, `stdDevSamp` + +| Arithmetic Aggregation Operators +| `abs`, `acos`, `acosh`, `add` (+++*+++ via `plus`), `asin`, `asin`, `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` + +| String Aggregation Operators +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `replaceAll`, `replaceOne`, split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` + +| Comparison Aggregation Operators +| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` + +| Array Aggregation Operators +| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `first`, `in`, `indexOfArray`, `isArray`, `last`, range`, `reverseArray`, `reduce`, `size`, `sortArray`, `slice`, `zip` + +| Literal Operators +| `literal` + +| Date Aggregation Operators +| `dateSubstract`, `dateTrunc`, `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateAdd`, `dateDiff`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`, `tsIncrement`, `tsSecond` + +| Variable Operators +| `map` + +| Conditional Aggregation Operators +| `cond`, `ifNull`, `switch` + +| Type Aggregation Operators +| `type` + +| Convert Aggregation Operators +| `convert`, `degreesToRadians`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` + +| Object Aggregation Operators +| `objectToArray`, `mergeObjects`, `getField`, `setField` + +| Script Aggregation Operators +| `function`, `accumulator` + +|=== + ++++*+++ The operation is mapped or added by Spring Data MongoDB. + +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. + +[[mongo.aggregation.projection]] +== Projection Expressions + +Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. +Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: + +.Projection expression examples +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}} +project("name", "netPrice") + +// generates {$project: {thing1: $thing2}} +project().and("thing1").as("thing2") + +// generates {$project: {a: 1, b: 1, thing2: $thing1}} +project("a","b").and("thing1").as("thing2") +---- +==== + +.Multi-Stage Aggregation using Projection and Sorting +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} +project("name", "netPrice"), sort(ASC, "name") + +// generates {$project: {name: $firstname}}, {$sort: {name: 1}} +project().and("firstname").as("name"), sort(ASC, "name") + +// does not work +project().and("firstname").as("name"), sort(ASC, "firstname") +---- +==== + +More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.facet]] +== Faceted Classification + +As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. + +[[buckets]] +=== Buckets + +Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. + +`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} +bucket("price").withBoundaries(0, 100, 400); + +// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} +bucket("price").withBoundaries(0, 100).withDefault("Other"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} +bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} +bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); +---- +==== + +`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucketAuto: {groupBy: $price, buckets: 5}} +bucketAuto("price", 5) + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} +bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} +bucketAuto("price", 5).andOutput("title").push().as("titles"); +---- +==== + +To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.projection.expressions[SpEL expressions] through `andOutputExpression()`. + +Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and +https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. + +[[multi-faceted-aggregation]] +=== Multi-faceted Aggregation + +Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. + +You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. + +Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: + +.Facet operation examples +==== +[source,java] +---- +// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} +facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) + +// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} +facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) + +// generates {$facet: {categorizedByYear: [ +// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, +// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} +// ]}} +facet(project("title").and("publicationDate").extractYear().as("publicationYear"), + bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) + .as("categorizedByYear")) +---- +==== + +Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.sort-by-count]] +=== Sort By Count + +Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using xref:mongodb/aggregation-framework.adoc#mongo.aggregation.facet[Faceted Classification]. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: + +.Sort by count example +==== +[source,java] +---- +// generates { $sortByCount: "$country" } +sortByCount("country"); +---- +==== + +A sort by count operation is equivalent to the following BSON (Binary JSON): + +---- +{ $group: { _id: , count: { $sum: 1 } } }, +{ $sort: { count: -1 } } +---- + +[[mongo.aggregation.projection.expressions]] +=== Spring Expression Support in Projection Expressions + +We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. + +[[complex-calculations-with-spel-expressions]] +==== Complex Calculations with SpEL expressions + +Consider the following SpEL expression: + +[source,java] +---- +1 + (q + 1) / (q - 1) +---- + +The preceding expression is translated into the following projection expression part: + +[source,javascript] +---- +{ "$add" : [ 1, { + "$divide" : [ { + "$add":["$q", 1]}, { + "$subtract":[ "$q", 1]} + ] +}]} +---- + +You can see examples in more context in xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example5[Aggregation Framework Example 5] and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example6[Aggregation Framework Example 6]. +You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. + +.Supported SpEL transformations +[%collapsible] +==== +[%header,cols="2"] +|=== +| SpEL Expression +| Mongo Expression Part +| a == b +| { $eq : [$a, $b] } +| a != b +| { $ne : [$a , $b] } +| a > b +| { $gt : [$a, $b] } +| a >= b +| { $gte : [$a, $b] } +| a < b +| { $lt : [$a, $b] } +| a <= b +| { $lte : [$a, $b] } +| a + b +| { $add : [$a, $b] } +| a - b +| { $subtract : [$a, $b] } +| a * b +| { $multiply : [$a, $b] } +| a / b +| { $divide : [$a, $b] } +| a^b +| { $pow : [$a, $b] } +| a % b +| { $mod : [$a, $b] } +| a && b +| { $and : [$a, $b] } +| a \|\| b +| { $or : [$a, $b] } +| !a +| { $not : [$a] } +|=== +==== + +In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: + +[source,java] +---- +// { $setEquals : [$a, [5, 8, 13] ] } +.andExpression("setEquals(a, new int[]{5, 8, 13})"); +---- + +[[mongo.aggregation.examples]] +=== Aggregation Framework Examples + +The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. + +[[mongo.aggregation.examples.example1]] +==== Aggregation Framework Example 1 + +In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). + +[source,java] +---- +class TagCount { + String tag; + int n; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + project("tags"), + unwind("tags"), + group("tags").count().as("n"), + project("n").and("tag").previousOperation(), + sort(DESC, "n") +); + +AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); +List tagCount = results.getMappedResults(); +---- + +The preceding listing uses the following algorithm: + +. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. +. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. +. Use the `unwind` operation to generate a new document for each tag within the `tags` array. +. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). +. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. +. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. +. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. + +Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. + +[[mongo.aggregation.examples.example2]] +==== Aggregation Framework Example 2 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). + +[source,java] +---- +class ZipInfo { + String id; + String city; + String state; + @Field("pop") int population; + @Field("loc") double[] location; +} + +class City { + String name; + int population; +} + +class ZipInfoStats { + String id; + String state; + City biggestCity; + City smallestCity; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation aggregation = newAggregation(ZipInfo.class, + group("state", "city") + .sum("population").as("pop"), + sort(ASC, "pop", "state", "city"), + group("state") + .last("city").as("biggestCity") + .last("pop").as("biggestPop") + .first("city").as("smallestCity") + .first("pop").as("smallestPop"), + project() + .and("state").previousOperation() + .and("biggestCity") + .nested(bind("name", "biggestCity").and("population", "biggestPop")) + .and("smallestCity") + .nested(bind("name", "smallestCity").and("population", "smallestPop")), + sort(ASC, "state") +); + +AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); +ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); +---- + +Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. + +The preceding listings use the following algorithm: + +. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. +. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). +. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. +. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. +. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example3]] +==== Aggregation Framework Example 3 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). + +[source,java] +---- +class StateStats { + @Id String id; + String state; + @Field("totalPop") int totalPopulation; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(ZipInfo.class, + group("state").sum("population").as("totalPop"), + sort(ASC, previousOperation(), "totalPop"), + match(where("totalPop").gte(10 * 1000 * 1000)) +); + +AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); +List stateStatsList = result.getMappedResults(); +---- + +The preceding listings use the following algorithm: + +. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. +. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. +. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example4]] +==== Aggregation Framework Example 4 + +This example demonstrates the use of simple arithmetic operations in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .and("netPrice").plus(1).as("netPricePlus1") + .and("netPrice").minus(1).as("netPriceMinus1") + .and("netPrice").multiply(1.19).as("grossPrice") + .and("netPrice").divide(2).as("netPriceDiv2") + .and("spaceUnits").mod(2).as("spaceUnitsMod2") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example5]] +==== Aggregation Framework Example 5 + +This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("netPrice + 1").as("netPricePlus1") + .andExpression("netPrice - 1").as("netPriceMinus1") + .andExpression("netPrice / 2").as("netPriceDiv2") + .andExpression("netPrice * 1.19").as("grossPrice") + .andExpression("spaceUnits % 2").as("spaceUnitsMod2") + .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") + +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +[[mongo.aggregation.examples.example6]] +==== Aggregation Framework Example 6 + +This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. + +Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +double shippingCosts = 1.2; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we can also refer to other fields of the document within the SpEL expression. + +[[mongo.aggregation.examples.example7]] +==== Aggregation Framework Example 7 + +This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. + +[source,java] +---- +public class InventoryItem { + + @Id int id; + String item; + String description; + int qty; +} + +public class InventoryItemProjection { + + @Id int id; + String item; + String description; + int qty; + int discount +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(InventoryItem.class, + project("item").and("discount") + .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) + .then(30) + .otherwise(20)) + .and(ifNull("description", "Unspecified")).as("description") +); + +AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); +List stateStatsList = result.getMappedResults(); +---- + +This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. + +As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. + +.Conditional aggregation projection +==== +[source,java] +---- +TypedAggregation agg = Aggregation.newAggregation(Book.class, + project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> + .equalToValue("")) <2> + .then("$$REMOVE") <3> + .otherwiseValueOf("author.middle") <4> + ) + .as("author.middle")); +---- +<1> If the value of the field `author.middle` +<2> does not contain a value, +<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. +<4> Otherwise, add the field value of `author.middle`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc b/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc new file mode 100644 index 0000000000..014dde02ca --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc @@ -0,0 +1,47 @@ +[[mongo.auditing]] += Auditing + +Since Spring Data MongoDB 1.4, auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@EnableMongoAuditing +class Config { + + @Bean + public AuditorAware myAuditorProvider() { + return new AuditorAwareImpl(); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +@EnableReactiveMongoAuditing +class Config { + + @Bean + public ReactiveAuditorAware myAuditorProvider() { + return new ReactiveAuditorAwareImpl(); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +---- +====== + +If you expose a bean of type `AuditorAware` / `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. +If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc b/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc new file mode 100644 index 0000000000..1f999500bf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc @@ -0,0 +1,94 @@ +[[change-streams]] += Change Streams + +As of MongoDB 3.6, https://docs.mongodb.com/manual/changeStreams/[Change Streams] let applications get notified about changes without having to tail the oplog. + +NOTE: Change Stream support is only possible for replica sets or for a sharded cluster. + +Change Streams can be consumed with both, the imperative and the reactive MongoDB Java driver. It is highly recommended to use the reactive variant, as it is less resource-intensive. However, if you cannot use the reactive API, you can still obtain change events by using the messaging concept that is already prevalent in the Spring ecosystem. + +It is possible to watch both on a collection as well as database level, whereas the database level variant publishes +changes from all collections within the database. When subscribing to a database change stream, make sure to use a + suitable type for the event type as conversion might not apply correctly across different entity types. +In doubt, use `Document`. + +[[change-streams-with-messagelistener]] +== Change Streams with `MessageListener` + +Listening to a https://docs.mongodb.com/manual/tutorial/change-streams-example/[Change Stream by using a Sync Driver] creates a long running, blocking task that needs to be delegated to a separate component. +In this case, we need to first create a javadoc:org.springframework.data.mongodb.core.messaging.MessageListenerContainer[] which will be the main entry point for running the specific `SubscriptionRequest` tasks. +Spring Data MongoDB already ships with a default implementation that operates on `MongoTemplate` and is capable of creating and running `Task` instances for a javadoc:org.springframework.data.mongodb.core.messaging.ChangeStreamRequest[]. + +The following example shows how to use Change Streams with `MessageListener` instances: + +.Change Streams with `MessageListener` instances +==== +[source,java] +---- +MessageListenerContainer container = new DefaultMessageListenerContainer(template); +container.start(); <1> + +MessageListener, User> listener = System.out::println; <2> +ChangeStreamRequestOptions options = new ChangeStreamRequestOptions("db", "user", ChangeStreamOptions.empty()); <3> + +Subscription subscription = container.register(new ChangeStreamRequest<>(listener, options), User.class); <4> + +// ... + +container.stop(); <5> +---- +<1> Starting the container initializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately. +<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion. +<3> Set the collection to listen to and provide additional options through `ChangeStreamOptions`. +<4> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel it to free resources. +<5> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container. +==== + +[NOTE] +==== +Errors while processing are passed on to an `org.springframework.util.ErrorHandler`. If not stated otherwise a log appending `ErrorHandler` gets applied by default. + +Please use `register(request, body, errorHandler)` to provide additional functionality. +==== + +[[reactive-change-streams]] +== Reactive Change Streams + +Subscribing to Change Streams with the reactive API is a more natural approach to work with streams. Still, the essential building blocks, such as `ChangeStreamOptions`, remain the same. The following example shows how to use Change Streams emitting ``ChangeStreamEvent``s: + +.Change Streams emitting `ChangeStreamEvent` +==== +[source,java] +---- +Flux> flux = reactiveTemplate.changeStream(User.class) <1> + .watchCollection("people") + .filter(where("age").gte(38)) <2> + .listen(); <3> +---- +<1> The event target type the underlying document should be converted to. Leave this out to receive raw results without conversion. +<2> Use an aggregation pipeline or just a query `Criteria` to filter events. +<3> Obtain a `Flux` of change stream events. The `ChangeStreamEvent#getBody()` is converted to the requested domain type from (2). +==== + +[[resuming-change-streams]] +== Resuming Change Streams + +Change Streams can be resumed and resume emitting events where you left. To resume the stream, you need to supply either a resume +token or the last known server time (in UTC). Use javadoc:org.springframework.data.mongodb.core.ChangeStreamOptions[] to set the value accordingly. + +The following example shows how to set the resume offset using server time: + +.Resume a Change Stream +==== +[source,java] +---- +Flux> resumed = template.changeStream(User.class) + .watchCollection("people") + .resumeAt(Instant.now().minusSeconds(1)) <1> + .listen(); +---- +<1> You may obtain the server time of an `ChangeStreamEvent` through the `getTimestamp` method or use the `resumeToken` +exposed through `getResumeToken`. +==== + +TIP: In some cases an `Instant` might not be a precise enough measure when resuming a Change Stream. Use a MongoDB native +https://docs.mongodb.com/manual/reference/bson-types/#timestamps[BsonTimestamp] for that purpose. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc new file mode 100644 index 0000000000..f825690d7a --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc @@ -0,0 +1,483 @@ +[[mongo.sessions]] += Sessions & Transactions + +As of version 3.6, MongoDB supports the concept of sessions. +The use of sessions enables MongoDB's https://docs.mongodb.com/manual/core/read-isolation-consistency-recency/#causal-consistency[Causal Consistency] model, which guarantees running operations in an order that respects their causal relationships. +Those are split into `ServerSession` instances and `ClientSession` instances. +In this section, when we speak of a session, we refer to `ClientSession`. + +WARNING: Operations within a client session are not isolated from operations outside the session. + +Both `MongoOperations` and `ReactiveMongoOperations` provide gateway methods for tying a `ClientSession` to the operations. +`MongoCollection` and `MongoDatabase` use session proxy objects that implement MongoDB's collection and database interfaces, so you need not add a session on each call. +This means that a potential call to `MongoCollection#find()` is delegated to `MongoCollection#find(ClientSession)`. + +NOTE: Methods such as `(Reactive)MongoOperations#getCollection` return native MongoDB Java Driver gateway objects (such as `MongoCollection`) that themselves offer dedicated methods for `ClientSession`. +These methods are *NOT* session-proxied. +You should provide the `ClientSession` where needed when interacting directly with a `MongoCollection` or `MongoDatabase` and not through one of the `#execute` callbacks on `MongoOperations`. + +[[mongo.sessions.sync]] +[[mongo.sessions.reactive]] +== ClientSession support + +The following example shows the usage of a session: + +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +ClientSessionOptions sessionOptions = ClientSessionOptions.builder() + .causallyConsistent(true) + .build(); + +ClientSession session = client.startSession(sessionOptions); <1> + +template.withSession(() -> session) + .execute(action -> { + + Query query = query(where("name").is("Durzo Blint")); + Person durzo = action.findOne(query, Person.class); <2> + + Person azoth = new Person("Kylar Stern"); + azoth.setMaster(durzo); + + action.insert(azoth); <3> + + return azoth; + }); + +session.close() <4> +---- + +<1> Obtain a new session from the server. +<2> Use `MongoOperation` methods as before. +The `ClientSession` gets applied automatically. +<3> Make sure to close the `ClientSession`. +<4> Close the session. + +WARNING: When dealing with `DBRef` instances, especially lazily loaded ones, it is essential to *not* close the `ClientSession` before all data is loaded. +Otherwise, lazy fetch fails. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +ClientSessionOptions sessionOptions = ClientSessionOptions.builder() +.causallyConsistent(true) +.build(); + +Publisher session = client.startSession(sessionOptions); <1> + +template.withSession(session) +.execute(action -> { + + Query query = query(where("name").is("Durzo Blint")); + return action.findOne(query, Person.class) + .flatMap(durzo -> { + + Person azoth = new Person("Kylar Stern"); + azoth.setMaster(durzo); + + return action.insert(azoth); <2> + }); + }, ClientSession::close) <3> + .subscribe(); <4> +---- + +<1> Obtain a `Publisher` for new session retrieval. +<2> Use `ReactiveMongoOperation` methods as before. +The `ClientSession` is obtained and applied automatically. +<3> Make sure to close the `ClientSession`. +<4> Nothing happens until you subscribe. +See https://projectreactor.io/docs/core/release/reference/#reactive.subscribe[the Project Reactor Reference Guide] for details. + +By using a `Publisher` that provides the actual session, you can defer session acquisition to the point of actual subscription. +Still, you need to close the session when done, so as to not pollute the server with stale sessions. +Use the `doFinally` hook on `execute` to call `ClientSession#close()` when you no longer need the session. +If you prefer having more control over the session itself, you can obtain the `ClientSession` through the driver and provide it through a `Supplier`. + +NOTE: Reactive use of `ClientSession` is limited to Template API usage. +There's currently no session integration with reactive repositories. +==== +====== + +[[mongo.transactions]] +== MongoDB Transactions + +As of version 4, MongoDB supports https://www.mongodb.com/transactions[Transactions]. +Transactions are built on top of xref:mongodb/client-session-transactions.adoc[Sessions] and, consequently, require an active `ClientSession`. + +NOTE: Unless you specify a `MongoTransactionManager` within your application context, transaction support is *DISABLED*. +You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions. + +To get full programmatic control over transactions, you may want to use the session callback on `MongoOperations`. + +The following example shows programmatic transaction control: + +.Programmatic transactions +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +ClientSession session = client.startSession(options); <1> + +template.withSession(session) + .execute(action -> { + + session.startTransaction(); <2> + + try { + + Step step = // ...; + action.insert(step); + + process(step); + + action.update(Step.class).apply(Update.set("state", // ... + + session.commitTransaction(); <3> + + } catch (RuntimeException e) { + session.abortTransaction(); <4> + } + }, ClientSession::close) <5> +---- + +<1> Obtain a new `ClientSession`. +<2> Start the transaction. +<3> If everything works out as expected, commit the changes. +<4> Something broke, so roll back everything. +<5> Do not forget to close the session when done. + +The preceding example lets you have full control over transactional behavior while using the session scoped `MongoOperations` instance within the callback to ensure the session is passed on to every server call. +To avoid some of the overhead that comes with this approach, you can use a `TransactionTemplate` to take away some of the noise of manual transaction flow. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = Mono + .from(client.startSession()) <1> + + .flatMap(session -> { + session.startTransaction(); <2> + + return Mono.from(collection.deleteMany(session, ...)) <3> + + .onErrorResume(e -> Mono.from(session.abortTransaction()).then(Mono.error(e))) <4> + + .flatMap(val -> Mono.from(session.commitTransaction()).then(Mono.just(val))) <5> + + .doFinally(signal -> session.close()); <6> + }); +---- + +<1> First we obviously need to initiate the session. +<2> Once we have the `ClientSession` at hand, start the transaction. +<3> Operate within the transaction by passing on the `ClientSession` to the operation. +<4> If the operations completes exceptionally, we need to stop the transaction and preserve the error. +<5> Or of course, commit the changes in case of success. +Still preserving the operations result. +<6> Lastly, we need to make sure to close the session. + +The culprit of the above operation is in keeping the main flows `DeleteResult` instead of the transaction outcome published via either `commitTransaction()` or `abortTransaction()`, which leads to a rather complicated setup. + +NOTE: Unless you specify a `ReactiveMongoTransactionManager` within your application context, transaction support is *DISABLED*. +You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions. +==== +====== + +[[mongo.transactions.transaction-template]] +[[mongo.transactions.reactive-operator]] +== Transactions with TransactionTemplate / TransactionalOperator + +Spring Data MongoDB transactions support both `TransactionTemplate` and `TransactionalOperator`. + +.Transactions with `TransactionTemplate` / `TransactionalOperator` +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.setSessionSynchronization(ALWAYS); <1> + +// ... + +TransactionTemplate txTemplate = new TransactionTemplate(anyTxManager); <2> + +txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { <3> + + Step step = // ...; + template.insert(step); + + process(step); + + template.update(Step.class).apply(Update.set("state", // ... + } +}); +---- + +<1> Enable transaction synchronization during Template API configuration. +<2> Create the `TransactionTemplate` using the provided `PlatformTransactionManager`. +<3> Within the callback the `ClientSession` and transaction are already registered. + +CAUTION: Changing state of `MongoTemplate` during runtime (as you might think would be possible in item 1 of the preceding listing) can cause threading and visibility issues. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +template.setSessionSynchronization(ALWAYS); <1> + +// ... + +TransactionalOperator rxtx = TransactionalOperator.create(anyTxManager, + new DefaultTransactionDefinition()); <2> + + +Step step = // ...; +template.insert(step); + +Mono process(step) + .then(template.update(Step.class).apply(Update.set("state", …)) + .as(rxtx::transactional) <3> + .then(); +---- + +<1> Enable transaction synchronization for Transactional participation. +<2> Create the `TransactionalOperator` using the provided `ReactiveTransactionManager`. +<3> `TransactionalOperator.transactional(…)` provides transaction management for all upstream operations. +==== +====== + +[[mongo.transactions.tx-manager]] +[[mongo.transactions.reactive-tx-manager]] +== Transactions with MongoTransactionManager & ReactiveMongoTransactionManager + +`MongoTransactionManager` / `ReactiveMongoTransactionManager` is the gateway to the well known Spring transaction support. +It lets applications use link:{springDocsUrl}/data-access.html#transaction[the managed transaction features of Spring]. +The `MongoTransactionManager` binds a `ClientSession` to the thread whereas the `ReactiveMongoTransactionManager` is using the `ReactorContext` for this. +`MongoTemplate` detects the session and operates on these resources which are associated with the transaction accordingly. +`MongoTemplate` can also participate in other, ongoing transactions. +The following example shows how to create and use transactions with a `MongoTransactionManager`: + +.Transactions with `MongoTransactionManager` / `ReactiveMongoTransactionManager` +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +static class Config extends AbstractMongoClientConfiguration { + + @Bean + MongoTransactionManager transactionManager(MongoDatabaseFactory dbFactory) { <1> + return new MongoTransactionManager(dbFactory); + } + + // ... +} + +@Component +public class StateService { + + @Transactional + void someBusinessFunction(Step step) { <2> + + template.insert(step); + + process(step); + + template.update(Step.class).apply(Update.set("state", // ... + }; +}); + +---- + +<1> Register `MongoTransactionManager` in the application context. +<2> Mark methods as transactional. + +NOTE: `@Transactional(readOnly = true)` advises `MongoTransactionManager` to also start a transaction that adds the +`ClientSession` to outgoing requests. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class Config extends AbstractReactiveMongoConfiguration { + + @Bean + ReactiveMongoTransactionManager transactionManager(ReactiveMongoDatabaseFactory factory) { <1> + return new ReactiveMongoTransactionManager(factory); + } + + // ... +} + +@Service +public class StateService { + + @Transactional + Mono someBusinessFunction(Step step) { <2> + + return template.insert(step) + .then(process(step)) + .then(template.update(Step.class).apply(Update.set("state", …)); + }; +}); + +---- + +<1> Register `ReactiveMongoTransactionManager` in the application context. +<2> Mark methods as transactional. + +NOTE: `@Transactional(readOnly = true)` advises `ReactiveMongoTransactionManager` to also start a transaction that adds the `ClientSession` to outgoing requests. +==== +====== + +[[mongo.transaction.options]] +=== Controlling MongoDB-specific Transaction Options + +Transactional service methods can require specific transaction options to run a transaction. +Spring Data MongoDB's transaction managers support evaluation of transaction labels such as `@Transactional(label = { "mongo:readConcern=available" })`. + +By default, the label namespace using the `mongo:` prefix is evaluated by `MongoTransactionOptionsResolver` that is configured by default. +Transaction labels are provided by `TransactionAttribute` and available to programmatic transaction control through `TransactionTemplate` and `TransactionalOperator`. +Due to their declarative nature, `@Transactional(label = …)` provides a good starting point that also can serve as documentation. + +Currently, the following options are supported: + +Max Commit Time:: + +Controls the maximum execution time on the server for the commitTransaction operation. +The format of the value corresponds with ISO-8601 duration format as used with `Duration.parse(…)`. ++ +Usage: +`mongo:maxCommitTime=PT1S` + +Read Concern:: + +Sets the read concern for the transaction. ++ +Usage: +`mongo:readConcern=LOCAL|MAJORITY|LINEARIZABLE|SNAPSHOT|AVAILABLE` + +Read Preference:: + +Sets the read preference for the transaction. ++ +Usage: +`mongo:readPreference=PRIMARY|SECONDARY|SECONDARY_PREFERRED|PRIMARY_PREFERRED|NEAREST` + +Write Concern:: + +Sets the write concern for the transaction. ++ +Usage: +`mongo:writeConcern=ACKNOWLEDGED|W1|W2|W3|UNACKNOWLEDGED|JOURNALED|MAJORITY` + +NOTE: Nested transactions that join the outer transaction do not affect the initial transaction options as the transaction is already started. +Transaction options are only applied when a new transaction is started. + +[[mongo.transactions.behavior]] +== Special behavior inside transactions + +Inside transactions, MongoDB server has a slightly different behavior. + +*Connection Settings* + +The MongoDB drivers offer a dedicated replica set name configuration option turing the driver into auto-detection mode. +This option helps identify the primary replica set nodes and command routing during a transaction. + +NOTE: Make sure to add `replicaSet` to the MongoDB URI. +Please refer to https://docs.mongodb.com/manual/reference/connection-string/#connections-connection-options[connection string options] for further details. + +*Collection Operations* + +MongoDB does *not* support collection operations, such as collection creation, within a transaction. +This also affects the on the fly collection creation that happens on first usage. +Therefore make sure to have all required structures in place. + +*Transient Errors* + +MongoDB can add special labels to errors raised during transactional operations. +Those may indicate transient failures that might vanish by merely retrying the operation. +We highly recommend https://github.com/spring-projects/spring-retry[Spring Retry] for those purposes. +Nevertheless one may override `MongoTransactionManager#doCommit(MongoTransactionObject)` to implement a https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation[Retry Commit Operation] +behavior as outlined in the MongoDB reference manual. + +*Count* + +MongoDB `count` operates upon collection statistics which may not reflect the actual situation within a transaction. +The server responds with _error 50851_ when issuing a `count` command inside of a multi-document transaction. +Once `MongoTemplate` detects an active transaction, all exposed `count()` methods are converted and delegated to the aggregation framework using `$match` and `$count` operators, preserving `Query` settings, such as `collation`. + +Restrictions apply when using geo commands inside of the aggregation count helper. +The following operators cannot be used and must be replaced with a different operator: + +* `$where` -> `$expr` +* `$near` -> `$geoWithin` with `$center` +* `$nearSphere` -> `$geoWithin` with `$centerSphere` + +Queries using `Criteria.near(…)` and `Criteria.nearSphere(…)` must be rewritten to `Criteria.within(…)` respective `Criteria.withinSphere(…)`. +Same applies for the `near` query keyword in repository query methods that must be changed to `within`. +See also MongoDB JIRA ticket https://jira.mongodb.org/browse/DRIVERS-518[DRIVERS-518] for further reference. + +The following snippet shows `count` usage inside the session-bound closure: + +==== +[source,javascript] +---- +session.startTransaction(); + +template.withSession(session) + .execute(action -> { + action.count(query(where("state").is("active")), Step.class) + ... +---- +==== + +The snippet above materializes in the following command: + +==== +[source,javascript] +---- +db.collection.aggregate( + [ + { $match: { state: "active" } }, + { $count: "totalEntityCount" } + ] +) +---- +==== + +instead of: + +==== +[source,javascript] +---- +db.collection.find( { state: "active" } ).count() +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc b/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc new file mode 100644 index 0000000000..0f99995b92 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc @@ -0,0 +1,90 @@ +[[mongo.collation]] += Collations + +Since version 3.4, MongoDB supports collations for collection and index creation and various query operations. +Collations define string comparison rules based on the http://userguide.icu-project.org/collation/concepts[ICU collations]. +A collation document consists of various properties that are encapsulated in `Collation`, as the following listing shows: + +==== +[source,java] +---- +Collation collation = Collation.of("fr") <1> + + .strength(ComparisonLevel.secondary() <2> + .includeCase()) + + .numericOrderingEnabled() <3> + + .alternate(Alternate.shifted().punct()) <4> + + .forwardDiacriticSort() <5> + + .normalizationEnabled(); <6> +---- +<1> `Collation` requires a locale for creation. This can be either a string representation of the locale, a `Locale` (considering language, country, and variant) or a `CollationLocale`. The locale is mandatory for creation. +<2> Collation strength defines comparison levels that denote differences between characters. You can configure various options (case-sensitivity, case-ordering, and others), depending on the selected strength. +<3> Specify whether to compare numeric strings as numbers or as strings. +<4> Specify whether the collation should consider whitespace and punctuation as base characters for purposes of comparison. +<5> Specify whether strings with diacritics sort from back of the string, such as with some French dictionary ordering. +<6> Specify whether to check whether text requires normalization and whether to perform normalization. +==== + +Collations can be used to create collections and indexes. If you create a collection that specifies a collation, the +collation is applied to index creation and queries unless you specify a different collation. A collation is valid for a +whole operation and cannot be specified on a per-field basis. + +Like other metadata, collations can be be derived from the domain type via the `collation` attribute of the `@Document` +annotation and will be applied directly when running queries, creating collections or indexes. + +NOTE: Annotated collations will not be used when a collection is auto created by MongoDB on first interaction. This would +require additional store interaction delaying the entire process. Please use `MongoOperations.createCollection` for those cases. + +[source,java] +---- +Collation french = Collation.of("fr"); +Collation german = Collation.of("de"); + +template.createCollection(Person.class, CollectionOptions.just(collation)); + +template.indexOps(Person.class).ensureIndex(new Index("name", Direction.ASC).collation(german)); +---- + +NOTE: MongoDB uses simple binary comparison if no collation is specified (`Collation.simple()`). + +Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show: + +.Using collation with `find` +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +Query query = new Query(Criteria.where("firstName").is("Amél")).collation(collation); + +List results = template.find(query, Person.class); +---- +==== + +.Using collation with `aggregate` +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +AggregationOptions options = AggregationOptions.builder().collation(collation).build(); + +Aggregation aggregation = newAggregation( + project("tags"), + unwind("tags"), + group("tags") + .count().as("count") +).withOptions(options); + +AggregationResults results = template.aggregate(aggregation, "tags", TagCount.class); +---- +==== + +WARNING: Indexes are only used if the collation used for the operation matches the index collation. + +xref:mongodb/repositories/repositories.adoc[MongoDB Repositories] support `Collations` via the `collation` attribute of the `@Query` annotation. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc b/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc new file mode 100644 index 0000000000..1034acdd79 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc @@ -0,0 +1,336 @@ +[[mongodb-connectors]] += Connecting to MongoDB + +One of the first tasks when using MongoDB and Spring is to create a `MongoClient` object using the IoC container. +There are two main ways to do this, either by using Java-based bean metadata or by using XML-based bean metadata. + +NOTE: For those not familiar with how to configure the Spring container using Java-based bean metadata instead of XML-based metadata, see the high-level introduction in the reference docs https://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here] as well as the detailed documentation https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#beans-java-instantiating-container[here]. + +[[mongo.mongo-java-config]] +== Registering a Mongo Instance + +The following example shows an example to register an instance of a `MongoClient`: + +.Registering `MongoClient` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class AppConfig { + + /* + * Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance. + */ + public @Bean com.mongodb.client.MongoClient mongoClient() { + return com.mongodb.client.MongoClients.create("mongodb://localhost:27017"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class AppConfig { + + /* + * Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance. + */ + public @Bean com.mongodb.reactivestreams.client.MongoClient mongoClient() { + return com.mongodb.reactivestreams.client.MongoClients.create("mongodb://localhost:27017"); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + + +---- +====== + +This approach lets you use the standard `MongoClient` instance, with the container using Spring's `MongoClientFactoryBean`/`ReactiveMongoClientFactoryBean`. +As compared to instantiating a `MongoClient` instance directly, the `FactoryBean` has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. +This hierarchy and the use of `@Repository` is described in link:{springDocsUrl}/data-access.html[Spring's DAO support features]. + +The following example shows an example of a Java-based bean metadata that supports exception translation on `@Repository` annotated classes: + +.Registering a `MongoClient` via `MongoClientFactoryBean` / `ReactiveMongoClientFactoryBean` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class AppConfig { + + /* + * Factory bean that creates the com.mongodb.client.MongoClient instance + */ + public @Bean MongoClientFactoryBean mongo() { + MongoClientFactoryBean mongo = new MongoClientFactoryBean(); + mongo.setHost("localhost"); + return mongo; + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class AppConfig { + + /* + * Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance + */ + public @Bean ReactiveMongoClientFactoryBean mongo() { + ReactiveMongoClientFactoryBean mongo = new ReactiveMongoClientFactoryBean(); + mongo.setHost("localhost"); + return mongo; + } +} +---- +====== + +To access the `MongoClient` object created by the `FactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired MongoClient mongoClient;` field. + +[[mongo.mongo-db-factory]] +== The MongoDatabaseFactory Interface + +While `MongoClient` is the entry point to the MongoDB driver API, connecting to a specific MongoDB database instance requires additional information, such as the database name and an optional username and password. +With that information, you can obtain a `MongoDatabase` object and access all the functionality of a specific MongoDB database instance. +Spring provides the `org.springframework.data.mongodb.core.MongoDatabaseFactory` & `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interfaces, shown in the following listing, to bootstrap connectivity to the database: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface MongoDatabaseFactory { + + MongoDatabase getDatabase() throws DataAccessException; + + MongoDatabase getDatabase(String dbName) throws DataAccessException; +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactiveMongoDatabaseFactory { + + Mono getDatabase() throws DataAccessException; + + Mono getDatabase(String dbName) throws DataAccessException; +} +---- +====== + +The following sections show how you can use the container with either Java-based or XML-based metadata to configure an instance of the `MongoDatabaseFactory` interface. +In turn, you can use the `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory` instance to configure `MongoTemplate` / `ReactiveMongoTemplate`. + +Instead of using the IoC container to create an instance of the template, you can use them in standard Java code, as follows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class MongoApplication { + + public static void main(String[] args) throws Exception { + + MongoOperations mongoOps = new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database")); + + // ... + } +} +---- +The code in bold highlights the use of `SimpleMongoClientDbFactory` and is the only difference between the listing shown in the xref:mongodb/getting-started.adoc[getting started section]. +Use `SimpleMongoClientDbFactory` when choosing `com.mongodb.client.MongoClient` as the entrypoint of choice. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class ReactiveMongoApplication { + + public static void main(String[] args) throws Exception { + + ReactiveMongoOperations mongoOps = new MongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database")); + + // ... + } +} +---- +====== + +[[mongo.mongo-db-factory-java]] +[[mongo.mongo-db-factory.config]] +== Registering a `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory` + +To register a `MongoDatabaseFactory`/ `ReactiveMongoDatabaseFactory` instance with the container, you write code much like what was highlighted in the previous section. +The following listing shows a simple example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoConfiguration { + + @Bean + public MongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class ReactiveMongoConfiguration { + + @Bean + public ReactiveMongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"); + } +} +---- +====== + +MongoDB Server generation 3 changed the authentication model when connecting to the DB. +Therefore, some of the configuration options available for authentication are no longer valid. +You should use the `MongoClient`-specific options for setting credentials through `MongoCredential` to provide authentication data, as shown in the following example: + +[tabs] +====== +Java:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoAppConfig extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureClientSettings(Builder builder) { + + builder + .credential(MongoCredential.createCredential("name", "db", "pwd".toCharArray())) + .applyToClusterSettings(settings -> { + settings.hosts(singletonList(new ServerAddress("127.0.0.1", 27017))); + }); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +---- +Username and password credentials used in XML-based configuration must be URL-encoded when these contain reserved characters, such as `:`, `%`, `@`, or `,`. +The following example shows encoded credentials: +`m0ng0@dmin:mo_res:bw6},Qsdxx@admin@database` -> `m0ng0%40dmin:mo_res%3Abw6%7D%2CQsdxx%40admin@database` +See https://tools.ietf.org/html/rfc3986#section-2.2[section 2.2 of RFC 3986] for further details. +====== + +If you need to configure additional options on the `com.mongodb.client.MongoClient` instance that is used to create a `SimpleMongoClientDbFactory`, you can refer to an existing bean as shown in the following example. To show another common usage pattern, the following listing shows the use of a property placeholder, which lets you parametrize the configuration and the creation of a `MongoTemplate`: + +[tabs] +====== +Java:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@PropertySource("classpath:/com/myapp/mongodb/config/mongo.properties") +public class MongoAppConfig extends AbstractMongoClientConfiguration { + + @Autowired + Environment env; + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureClientSettings(Builder builder) { + + builder.applyToClusterSettings(settings -> { + settings.hosts(singletonList( + new ServerAddress(env.getProperty("mongo.host"), env.getProperty("mongo.port", Integer.class)))); + }); + + builder.applyToConnectionPoolSettings(settings -> { + + settings.maxConnectionLifeTime(env.getProperty("mongo.pool-max-life-time", Integer.class), TimeUnit.MILLISECONDS) + .minSize(env.getProperty("mongo.pool-min-size", Integer.class)) + .maxSize(env.getProperty("mongo.pool-max-size", Integer.class)) + .maintenanceFrequency(10, TimeUnit.MILLISECONDS) + .maintenanceInitialDelay(11, TimeUnit.MILLISECONDS) + .maxConnectionIdleTime(30, TimeUnit.SECONDS) + .maxWaitTime(15, TimeUnit.MILLISECONDS); + }); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + +---- +====== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc b/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc new file mode 100644 index 0000000000..ea0876aa54 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc @@ -0,0 +1,125 @@ +[[mongo-template.type-mapping]] +== Type Mapping + +MongoDB collections can contain documents that represent instances of a variety of types. +This feature can be useful if you store a hierarchy of classes or have a class with a property of type `Object`.In the latter case, the values held inside that property have to be read in correctly when retrieving the object.Thus, we need a mechanism to store type information alongside the actual document. + +To achieve that, the `MappingMongoConverter` uses a `MongoTypeMapper` abstraction with `DefaultMongoTypeMapper` as its main implementation.Its default behavior to store the fully qualified classname under `_class` inside the document.Type hints are written for top-level documents as well as for every value (if it is a complex type and a subtype of the declared property type).The following example (with a JSON representation at the end) shows how the mapping works: + +.Type mapping +==== +[source,java] +---- +class Sample { + Contact value; +} + +abstract class Contact { … } + +class Person extends Contact { … } + +Sample sample = new Sample(); +sample.value = new Person(); + +mongoTemplate.save(sample); + +{ + "value" : { "_class" : "com.acme.Person" }, + "_class" : "com.acme.Sample" +} +---- +==== + +Spring Data MongoDB stores the type information as the last field for the actual root class as well as for the nested type (because it is complex and a subtype of `Contact`).So, if you now use `mongoTemplate.findAll(Object.class, "sample")`, you can find out that the document stored is a `Sample` instance.You can also find out that the value property is actually a `Person`. + +[[customizing-type-mapping]] +=== Customizing Type Mapping + +If you want to avoid writing the entire Java class name as type information but would rather like to use a key, you can use the `@TypeAlias` annotation on the entity class.If you need to customize the mapping even more, have a look at the `TypeInformationMapper` interface.An instance of that interface can be configured at the `DefaultMongoTypeMapper`, which can, in turn, be configured on `MappingMongoConverter`.The following example shows how to define a type alias for an entity: + +.Defining a type alias for an Entity +==== +[source,java] +---- +@TypeAlias("pers") +class Person { + +} +---- +==== + +Note that the resulting document contains `pers` as the value in the `_class` Field. + +[WARNING] +==== +Type aliases only work if the mapping context is aware of the actual type. +The required entity metadata is determined either on first save or has to be provided via the configurations initial entity set. +By default, the configuration class scans the base package for potential candidates. + +[source,java] +---- +@Configuration +class AppConfig extends AbstractMongoClientConfiguration { + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + + // ... +} +---- +==== + +[[configuring-custom-type-mapping]] +=== Configuring Custom Type Mapping + +The following example shows how to configure a custom `MongoTypeMapper` in `MappingMongoConverter`: + +[source,java] +---- +class CustomMongoTypeMapper extends DefaultMongoTypeMapper { + //implement custom type mapping here +} +---- + +.Configuring a custom `MongoTypeMapper` +==== +.Java +[source,java,role="primary"] +---- +@Configuration +class SampleMongoConfiguration extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Bean + @Override + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + MappingMongoConverter mmc = super.mappingMongoConverter(); + mmc.setTypeMapper(customTypeMapper()); + return mmc; + } + + @Bean + public MongoTypeMapper customTypeMapper() { + return new CustomMongoTypeMapper(); + } +} +---- + +.XML +[source,xml,role="secondary"] +---- + + + +---- +==== + +Note that the preceding example extends the `AbstractMongoClientConfiguration` class and overrides the bean definition of the `MappingMongoConverter` where we configured our custom `MongoTypeMapper`. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc b/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc new file mode 100644 index 0000000000..a850604594 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc @@ -0,0 +1,40 @@ +TODO: add the following section somewhere + +[[mongo.geo-json.jackson-modules]] +== GeoJSON Jackson Modules + +By using the <>, Spring Data registers additional Jackson ``Modules``s to the `ObjectMapper` for de-/serializing common Spring Data domain types. +Please refer to the <> section to learn more about the infrastructure setup of this feature. + +The MongoDB module additionally registers ``JsonDeserializer``s for the following GeoJSON types via its `GeoJsonConfiguration` exposing the `GeoJsonModule`. +---- +org.springframework.data.mongodb.core.geo.GeoJsonPoint +org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint +org.springframework.data.mongodb.core.geo.GeoJsonLineString +org.springframework.data.mongodb.core.geo.GeoJsonMultiLineString +org.springframework.data.mongodb.core.geo.GeoJsonPolygon +org.springframework.data.mongodb.core.geo.GeoJsonMultiPolygon +---- + +[NOTE] +==== +The `GeoJsonModule` only registers ``JsonDeserializer``s! + +To equip the `ObjectMapper` with a symmetric set of ``JsonSerializer``s you need to either manually configure those for the `ObjectMapper` or provide a custom `SpringDataJacksonModules` configuration exposing `GeoJsonModule.serializers()` as a Spring Bean. + +[source,java] +---- +class GeoJsonConfiguration implements SpringDataJacksonModules { + + @Bean + public Module geoJsonSerializers() { + return GeoJsonModule.serializers(); + } +} +---- +==== + +[WARNING] +==== +The next major version (`4.0`) will register both, ``JsonDeserializer``s and ``JsonSerializer``s for GeoJSON types by default. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc b/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc new file mode 100644 index 0000000000..a52e336ea3 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc @@ -0,0 +1,62 @@ +[[mongodb-getting-started]] += Getting Started + +An easy way to bootstrap setting up a working environment is to create a Spring-based project via https://start.spring.io/#!type=maven-project&dependencies=data-mongodb[start.spring.io] or create a Spring project in https://spring.io/tools[Spring Tools]. + +[[mongo.examples-repo]] +== Examples Repository + +The GitHub https://github.com/spring-projects/spring-data-examples[spring-data-examples repository] hosts several examples that you can download and play around with to get a feel for how the library works. + +[[mongodb.hello-world]] +== Hello World + +First, you need to set up a running MongoDB server. Refer to the https://docs.mongodb.org/manual/core/introduction/[MongoDB Quick Start guide] for an explanation on how to startup a MongoDB instance. +Once installed, starting MongoDB is typically a matter of running the following command: `/bin/mongod` + +Then you can create a `Person` class to persist: + +==== +[source,java] +---- +include::example$example/Person.java[tags=file] +---- +==== + +You also need a main application to run: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +include::example$example/MongoApplication.java[tags=file] +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +include::example$example/ReactiveMongoApplication.java[tags=file] +---- +====== + +When you run the main program, the preceding examples produce the following output: + +[source] +---- +10:01:32,265 DEBUG o.s.data.mongodb.core.MongoTemplate - insert Document containing fields: [_class, age, name] in collection: Person +10:01:32,765 DEBUG o.s.data.mongodb.core.MongoTemplate - findOne using query: { "name" : "Joe"} in db.collection: database.Person +Person [id=4ddbba3c0be56b7e1b210166, name=Joe, age=34] +10:01:32,984 DEBUG o.s.data.mongodb.core.MongoTemplate - Dropped collection [database.person] +---- + +Even in this simple example, there are few things to notice: + +* You can instantiate the central helper class of Spring Mongo, xref:mongodb/template-api.adoc[`MongoTemplate`], by using the standard or reactive `MongoClient` object and the name of the database to use. +* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See xref:mongodb/mapping/mapping.adoc[here]). +* Conventions are used for handling the `id` field, converting it to be an `ObjectId` when stored in the database. +* Mapping conventions can use field access. Notice that the `Person` class has only getters. +* If the constructor argument names match the field names of the stored document, they are used to instantiate the object + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc b/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc new file mode 100644 index 0000000000..8b98bcebd2 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc @@ -0,0 +1,70 @@ +[[mongo.jmx]] += JMX support + +[NOTE] +==== +JMX support has been deprecated in 4.5 and will be removed in 5.0. + +We recommend switching to Spring Boot https://docs.spring.io/spring-boot/reference/actuator/endpoints.html[Actuator Endpoints] and expose those over JMX if needed. +==== + +The JMX support for MongoDB exposes the results of running the 'serverStatus' command on the admin database for a single MongoDB server instance. It also exposes an administrative MBean, `MongoAdmin`, that lets you perform administrative operations, such as dropping or creating a database. The JMX features build upon the JMX feature set available in the Spring Framework. See link:{springDocsUrl}/integration.html#jmx[here] for more details. + +[[mongodb:jmx-configuration]] +== MongoDB JMX Configuration + +Spring's Mongo namespace lets you enable JMX functionality, as the following example shows: + +.XML schema to configure MongoDB +==== +[source,xml] +---- + + + + + + + + + + + + + + + + + + + + +---- +==== + +The preceding code exposes several MBeans: + +* `AssertMetrics` +* `BackgroundFlushingMetrics` +* `BtreeIndexCounters` +* `ConnectionMetrics` +* `GlobalLockMetrics` +* `MemoryMetrics` +* `OperationCounters` +* `ServerInfo` +* `MongoAdmin` + +The following screenshot from JConsole shows the resulting configuration: + +image::jconsole.png[] diff --git a/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc b/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc new file mode 100644 index 0000000000..e7a41a3426 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc @@ -0,0 +1,147 @@ +[[mongodb.mapping-usage.events]] += Lifecycle Events + +The MongoDB mapping framework includes several `org.springframework.context.ApplicationEvent` events that your application can respond to by registering special beans in the `ApplicationContext`. +Being based on Spring's `ApplicationContext` event infrastructure enables other products, such as Spring Integration, to easily receive these events, as they are a well known eventing mechanism in Spring-based applications. + +Entity lifecycle events can be costly and you may notice a change in the performance profile when loading large result sets. +You can disable lifecycle events on the javadoc:org.springframework.data.mongodb.core.MongoTemplate#setEntityLifecycleEventsEnabled(boolean)[Template API]. + +To intercept an object before it goes through the conversion process (which turns your domain object into a `org.bson.Document`), you can register a subclass of `AbstractMongoEventListener` that overrides the `onBeforeConvert` method. +When the event is dispatched, your listener is called and passed the domain object before it goes into the converter. +The following example shows how to do so: + +==== +[source,java] +---- +public class BeforeConvertListener extends AbstractMongoEventListener { + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + ... does some auditing manipulation, set timestamps, whatever ... + } +} +---- +==== + +To intercept an object before it goes into the database, you can register a subclass of javadoc:org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener[] that overrides the `onBeforeSave` method. When the event is dispatched, your listener is called and passed the domain object and the converted `com.mongodb.Document`. The following example shows how to do so: + +==== +[source,java] +---- +public class BeforeSaveListener extends AbstractMongoEventListener { + @Override + public void onBeforeSave(BeforeSaveEvent event) { + … change values, delete them, whatever … + } +} +---- +==== + +Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched. + +.Callbacks on `AbstractMappingEventListener`: +[%collapsible] +==== +* `onBeforeConvert`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations before the object is converted to a `Document` by a `MongoConverter`. +* `onBeforeSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *before* inserting or saving the `Document` in the database. +* `onAfterSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *after* inserting or saving the `Document` in the database. +* `onAfterLoad`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database. +* `onAfterConvert`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database was converted to a POJO. +==== + +NOTE: Lifecycle events are only emitted for root level types. +Complex types used as properties within a document root are not subject to event publication unless they are document references annotated with `@DBRef`. + +WARNING: Lifecycle events depend on an `ApplicationEventMulticaster`, which in case of the `SimpleApplicationEventMulticaster` can be configured with a `TaskExecutor`, and therefore gives no guarantees when an Event is processed. + +include::{commons}@data-commons::page$entity-callbacks.adoc[leveloffset=+1] + +[[mongo.entity-callbacks]] +== Store specific EntityCallbacks + +Spring Data MongoDB uses the `EntityCallback` API for its auditing support and reacts on the following callbacks. + +.Supported Entity Callbacks +[%header,cols="4"] +|=== +| Callback +| Method +| Description +| Order + +| `ReactiveBeforeConvertCallback` +`BeforeConvertCallback` +| `onBeforeConvert(T entity, String collection)` +| Invoked before a domain object is converted to `org.bson.Document`. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAfterConvertCallback` +`AfterConvertCallback` +| `onAfterConvert(T entity, org.bson.Document target, String collection)` +| Invoked after a domain object is loaded. + +Can modify the domain object after reading it from a `org.bson.Document`. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAuditingEntityCallback` +`AuditingEntityCallback` +| `onBeforeConvert(Object entity, String collection)` +| Marks an auditable entity _created_ or _modified_ +| 100 + +| `ReactiveBeforeSaveCallback` +`BeforeSaveCallback` +| `onBeforeSave(T entity, org.bson.Document target, String collection)` +| Invoked before a domain object is saved. + +Can modify the target, to be persisted, `Document` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAfterSaveCallback` +`AfterSaveCallback` +| `onAfterSave(T entity, org.bson.Document target, String collection)` +| Invoked before a domain object is saved. + +Can modify the domain object, to be returned after save, `Document` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +|=== + +=== Bean Validation + +Spring Data MongoDB supports Bean Validation for MongoDB entities annotated with https://beanvalidation.org/[https://xxx][Jakarta Validation annotations]. + +You can enable Bean Validation by registering `ValidatingEntityCallback` respectively `ReactiveValidatingEntityCallback` for reactive driver usage in your Spring `ApplicationContext` as shown in the following example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +class Config { + + @Bean + public ValidatingEntityCallback validatingEntityCallback(Validator validator) { + return new ValidatingEntityCallback(validator); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +class Config { + + @Bean + public ReactiveValidatingEntityCallback validatingEntityCallback(Validator validator) { + return new ReactiveValidatingEntityCallback(validator); + } +} +---- +====== + +If you're using both, imperative and reactive, then you can enable also both callbacks. + +NOTE: When using XML-based configuration, historically, `ValidatingMongoEventListener` is registered through our namespace handlers when configuring ``. +If you want to use the newer Entity Callback variant, make sure to not use ``, otherwise you'll end up with both, the `ValidatingMongoEventListener` and the `ValidatingEntityCallback` being registered. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc new file mode 100644 index 0000000000..4553be1d43 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc @@ -0,0 +1,114 @@ +include::{commons}@data-commons::page$custom-conversions.adoc[] + +[[mongo.custom-converters]] +== Type based Converter + +The most trivial way of influencing the mapping result is by specifying the desired native MongoDB target type via the `@Field` annotation. +This allows to work with non MongoDB types like `BigDecimal` in the domain model while persisting values in native `org.bson.types.Decimal128` format. + +.Explicit target type mapping +==== +[source,java] +---- +public class Payment { + + @Id String id; <1> + + @Field(targetType = FieldType.DECIMAL128) <2> + BigDecimal value; + + Date date; <3> + +} +---- + +[source,java] +---- +{ + "_id" : ObjectId("5ca4a34fa264a01503b36af8"), <1> + "value" : NumberDecimal(2.099), <2> + "date" : ISODate("2019-04-03T12:11:01.870Z") <3> +} +---- +<1> String _id_ values that represent a valid `ObjectId` are converted automatically. See xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[How the `_id` Field is Handled in the Mapping Layer] +for details. +<2> The desired target type is explicitly defined as `Decimal128` which translates to `NumberDecimal`. +Otherwise, the +`BigDecimal` value would have been truned into a `String`. +<3> `Date` values are handled by the MongoDB driver itself are stored as `ISODate`. +==== + +The snippet above is handy for providing simple type hints. To gain more fine-grained control over the mapping process, + you can register Spring converters with the `MongoConverter` implementations, such as the `MappingMongoConverter`. + +The `MappingMongoConverter` checks to see if any Spring converters can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the `MappingConverter`. + +NOTE: For more information on the Spring type conversion service, see the reference docs link:{springDocsUrl}/core.html#validation[here]. + +[[mongo.custom-converters.writer]] +=== Writing Converter + +The following example shows an implementation of the `Converter` that converts from a `Person` object to a `org.bson.Document`: + +[source,java] +---- +import org.springframework.core.convert.converter.Converter; + +import org.bson.Document; + +public class PersonWriteConverter implements Converter { + + public Document convert(Person source) { + Document document = new Document(); + document.put("_id", source.getId()); + document.put("name", source.getFirstName()); + document.put("age", source.getAge()); + return document; + } +} +---- + +[[mongo.custom-converters.reader]] +=== Reading Converter + +The following example shows an implementation of a `Converter` that converts from a `Document` to a `Person` object: + +[source,java] +---- +public class PersonReadConverter implements Converter { + + public Person convert(Document source) { + Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); + p.setAge((Integer) source.get("age")); + return p; + } +} +---- + +[[mongo.custom-converters.xml]] +=== Registering Converters + +[source,java] +---- +class MyMongoConfiguration extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter adapter) { + adapter.registerConverter(new com.example.PersonReadConverter()); + adapter.registerConverter(new com.example.PersonWriteConverter()); + } +} +---- + +[[mongo.numeric-conversion]] +== Big Number Format + +MongoDB in its early days did not have support for large numeric values such as `BigDecimal`. +To persist `BigDecimal` and `BigInteger` values, Spring Data MongoDB converted values their `String` representation. +With MongoDB Server 3.4, `org.bson.types.Decimal128` offers a native representation for `BigDecimal` and `BigInteger`. +You can use the to the native representation by either annotating your properties with `@Field(targetType=DECIMAL128)` or by configuring the big decimal representation in `MongoCustomConversions` through `MongoCustomConversions.create(config -> config.bigDecimal(…))`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc new file mode 100644 index 0000000000..1dec452dcf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc @@ -0,0 +1,505 @@ +[[mapping-usage-references]] += Using DBRefs + +The mapping framework does not have to store child objects embedded within the document. +You can also store them separately and use a `DBRef` to refer to that document. +When the object is loaded from MongoDB, those references are eagerly resolved so that you get back a mapped object that looks the same as if it had been stored embedded within your top-level document. + +The following example uses a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): + +==== +[source,java] +---- +@Document +public class Account { + + @Id + private ObjectId id; + private Float total; +} + +@Document +public class Person { + + @Id + private ObjectId id; + @Indexed + private Integer ssn; + @DBRef + private List accounts; +} +---- +==== + +You need not use `@OneToMany` or similar mechanisms because the List of objects tells the mapping framework that you want a one-to-many relationship. +When the object is stored in MongoDB, there is a list of DBRefs rather than the `Account` objects themselves. +When it comes to loading collections of ``DBRef``s it is advisable to restrict references held in collection types to a specific MongoDB collection. +This allows bulk loading of all references, whereas references pointing to different MongoDB collections need to be resolved one by one. + +IMPORTANT: The mapping framework does not handle cascading saves. +If you change an `Account` object that is referenced by a `Person` object, you must save the `Account` object separately. +Calling `save` on the `Person` object does not automatically save the `Account` objects in the `accounts` property. + +``DBRef``s can also be resolved lazily. +In this case the actual `Object` or `Collection` of references is resolved on first access of the property. +Use the `lazy` attribute of `@DBRef` to specify this. +Required properties that are also defined as lazy loading ``DBRef`` and used as constructor arguments are also decorated with the lazy loading proxy making sure to put as little pressure on the database and network as possible. + +TIP: Lazily loaded ``DBRef``s can be hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()` or some inline debug rendering invoking property getters. +Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. + +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + +[[mapping-usage.document-references]] +== Using Document References + +Using `@DocumentReference` offers a flexible way of referencing entities in MongoDB. +While the goal is the same as when using xref:mongodb/mapping/document-references.adoc[DBRefs], the store representation is different. +`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. + +Document references, do not follow a specific format. +They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB. +By default, the mapping layer will use the referenced entities _id_ value for storage and retrieval, like in the sample below. + +==== +[source,java] +---- +@Document +class Account { + + @Id + String id; + Float total; +} + +@Document +class Person { + + @Id + String id; + + @DocumentReference <1> + List accounts; +} +---- + +[source,java] +---- +Account account = … + +template.insert(account); <2> + +template.update(Person.class) + .matching(where("id").is(…)) + .apply(new Update().push("accounts").value(account)) <3> + .first(); +---- + +[source,json] +---- +{ + "_id" : …, + "accounts" : [ "6509b9e" … ] <4> +} +---- +<1> Mark the collection of `Account` values to be referenced. +<2> The mapping framework does not handle cascading saves, so make sure to persist the referenced entity individually. +<3> Add the reference to the existing entity. +<4> Referenced `Account` entities are represented as an array of their `_id` values. +==== + +The sample above uses an ``_id``-based fetch query (`{ '_id' : ?#{#target} }`) for data retrieval and resolves linked entities eagerly. +It is possible to alter resolution defaults (listed below) using the attributes of `@DocumentReference` + +.@DocumentReference defaults +[cols="2,3,5",options="header"] +|=== +| Attribute | Description | Default + +| `db` +| The target database name for collection lookup. +| `MongoDatabaseFactory.getMongoDatabase()` + +| `collection` +| The target collection name. +| The annotated property's domain type, respectively the value type in case of `Collection` like or `Map` properties, collection name. + +| `lookup` +| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. +| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. + +| `sort` +| Used for sorting result documents on server side. +| None by default. +Result order of `Collection` like properties is restored based on the used lookup query on a best-effort basis. + +| `lazy` +| If set to `true` value resolution is delayed upon first access of the property. +| Resolves properties eagerly by default. +|=== + +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + +`@DocumentReference(lookup)` allows defining filter queries that can be different from the `_id` field and therefore offer a flexible way of defining references between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @Field("publisher_ac") + @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") <1> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + @DocumentReference(lazy = true) <2> + List books; + +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher_ac" : "DR" +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 1a23e45, + "acronym" : "DR", + "name" : "Del Rey", + … +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> Lazy load back references to the `Book` collection. +==== + +The above snippet shows the reading side of things when working with custom referenced objects. +Writing requires a bit of additional setup as the mapping information do not express where `#target` stems from. +The mapping layer requires registration of a `Converter` between the target document and `DocumentPointer`, like the one below: + +==== +[source,java] +---- +@WritingConverter +class PublisherReferenceConverter implements Converter> { + + @Override + public DocumentPointer convert(Publisher source) { + return () -> source.getAcronym(); + } +} +---- +==== + +If no `DocumentPointer` converter is provided the target reference document can be computed based on the given lookup query. +In this case the association target properties are evaluated as shown in the following sample. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc} }") <1> <2> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + // ... +} +---- + +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher" : { + "acc" : "DOC" + } +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> The field value placeholders of the lookup query (like `acc`) is used to form the reference document. +==== + +It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. +This approach allows link types without storing the linking values within the owning document but rather on the referencing document as shown in the example below. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + ObjectId publisherId; <1> +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; + String name; + + @ReadOnlyProperty <2> + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") <3> + List books; +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisherId" : 8cfb002 +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 8cfb002, + "acronym" : "DR", + "name" : "Del Rey" +} +---- +<1> Set up the link from `Book` (reference) to `Publisher` (owner) by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be readonly. +This prevents storing references to individual ``Book``s with the `Publisher` document. +<3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. +==== + +With all the above in place it is possible to model all kind of associations between entities. +Have a look at the non-exhaustive list of samples below to get feeling for what is possible. + +.Simple Document Reference using _id_ field +==== +[source,java] +---- +class Entity { + @DocumentReference + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" <1> +} +---- +<1> MongoDB simple type can be directly used without further configuration. +==== + +.Simple Document Reference using _id_ field with explicit lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") <1> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> _target_ defines the reference value itself. +==== + +.Document Reference extracting the `refKey` field for the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{refKey}' }") <1> <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("refKey", source.id); <1> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "refKey" : "9a48e32" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> The key used for obtaining the reference value must be the one used during write. +<2> `refKey` is short for `target.refKey`. +==== + +.Document Reference with multiple values forming the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ 'firstname' : '?#{fn}', 'lastname' : '?#{ln}' }") <1> <2> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "fn" : "Josh", <1> + "ln" : "Long" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32", + "firstname" : "Josh", <2> + "lastname" : "Long", <2> +} +---- +<1> Read/write the keys `fn` & `ln` from/to the linkage document based on the lookup query. +<2> Use non _id_ fields for the lookup of the target documents. +==== + +.Document Reference reading from a target collection +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("id", source.id) <1> + .append("collection", … ); <2> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "id" : "9a48e32", <1> + "collection" : "…" <2> + } +} +---- +<1> Read/write the keys `_id` from/to the reference document to use them in the lookup query. +<2> The collection name can be read from the reference document using its key. +==== + +[WARNING] +==== +We know it is tempting to use all kinds of MongoDB query operators in the lookup query and this is fine. +But there a few aspects to consider: + +* Make sure to have indexes in place that support your lookup. +* Make sure to use the same data types: `@DocumentReference(lookup="{'someRef':?#{#self._id} }")` can easily fail when using `@Id String id` and `String someRef` as ``String @Id``'s are subject to automatic ObjectId conversion (but not other `String` properties containing `ObjectId.toString()`). +Reference lookup uses values from the resulting `Document` and in that case, it would query a String field using an `ObjectId` yielding no results. +* Mind that resolution requires a server roundtrip inducing latency, consider a lazy strategy. +* A collection of document references is bulk loaded using the `$or` operator. + +The original element order is restored in memory on a best-effort basis. +Restoring the order is only possible when using equality expressions and cannot be done when using MongoDB query operators. +In this case results will be ordered as they are received from the store or via the provided `@DocumentReference(sort)` attribute. + +A few more general remarks: + +* Do you use cyclic references? +Ask your self if you need them. +* Lazy document references are hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()`. +* There is no support for reading document references using reactive infrastructure. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc new file mode 100644 index 0000000000..b094b925d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc @@ -0,0 +1,340 @@ +[[mapping.index-creation]] += Index Creation + +Spring Data MongoDB can automatically create indexes for entity types annotated with `@Document`. +Index creation must be explicitly enabled since version 3.0 to prevent undesired effects with collection lifecyle and performance impact. +Indexes are automatically created for the initial entity set on application startup and when accessing an entity type for the first time while the application runs. + +We generally recommend explicit index creation for application-based control of indexes as Spring Data cannot automatically create indexes for collections that were recreated while the application was running. + +`IndexResolver` provides an abstraction for programmatic index definition creation if you want to make use of `@Indexed` annotations such as `@GeoSpatialIndexed`, `@TextIndexed`, `@CompoundIndex` and `@WildcardIndexed`. +You can use index definitions with `IndexOperations` to create indexes. +A good point in time for index creation is on application startup, specifically after the application context was refreshed, triggered by observing `ContextRefreshedEvent`. +This event guarantees that the context is fully initialized. +Note that at this time other components, especially bean factories might have access to the MongoDB database. + +[WARNING] +==== +``Map``-like properties are skipped by the `IndexResolver` unless annotated with `@WildcardIndexed` because the _map key_ must be part of the index definition. Since the purpose of maps is the usage of dynamic keys and values, the keys cannot be resolved from static mapping metadata. +==== + +.Programmatic Index Creation for a single Domain Type +==== +[source,java] +---- +class MyListener { + + @EventListener(ContextRefreshedEvent.class) + public void initIndicesAfterStartup() { + + MappingContext, MongoPersistentProperty> mappingContext = mongoTemplate + .getConverter().getMappingContext(); + + IndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext); + + IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class); + resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex); + } +} +---- +==== + +.Programmatic Index Creation for all Initial Entities +==== +[source,java] +---- +class MyListener{ + + @EventListener(ContextRefreshedEvent.class) + public void initIndicesAfterStartup() { + + MappingContext, MongoPersistentProperty> mappingContext = mongoTemplate + .getConverter().getMappingContext(); + + // consider only entities that are annotated with @Document + mappingContext.getPersistentEntities() + .stream() + .filter(it -> it.isAnnotationPresent(Document.class)) + .forEach(it -> { + + IndexOperations indexOps = mongoTemplate.indexOps(it.getType()); + resolver.resolveIndexFor(it.getType()).forEach(indexOps::ensureIndex); + }); + } +} +---- +==== + +Alternatively, if you want to ensure index and collection presence before any component is able to access your database from your application, declare a `@Bean` method for `MongoTemplate` and include the code from above before returning the `MongoTemplate` object. + +[NOTE] +==== +To turn automatic index creation _ON_ please override `autoIndexCreation()` in your configuration. +[source,java] +---- +@Configuration +public class Config extends AbstractMongoClientConfiguration { + + @Override + public boolean autoIndexCreation() { + return true; + } + +// ... +} +---- +==== + +IMPORTANT: Automatic index creation is turned _OFF_ by default as of version 3.0. + +[[mapping-usage-indexes.compound-index]] +== Compound Indexes + +Compound indexes are also supported. They are defined at the class level, rather than on individual properties. + +NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields + +Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order: + +.Example Compound Index Usage +==== +[source,java] +---- +package com.mycompany.domain; + +@Document +@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") +public class Person { + + @Id + private ObjectId id; + private Integer age; + private String firstName; + private String lastName; + +} +---- +==== + +[TIP] +==== +`@CompoundIndex` is repeatable using `@CompoundIndexes` as its container. + +[source,java] +---- +@Document +@CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}") +@CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}") +public class Person { + + String firstname; + String lastname; + + Address address; + + // ... +} +---- +==== + +[[mapping-usage-indexes.hashed-index]] +== Hashed Indexes + +Hashed indexes allow hash based sharding within a sharded cluster. +Using hashed field values to shard collections results in a more random distribution. +For details, refer to the https://docs.mongodb.com/manual/core/index-hashed/[MongoDB Documentation]. + +Here's an example that creates a hashed index for `_id`: + +.Example Hashed Index Usage +==== +[source,java] +---- +@Document +public class DomainType { + + @HashIndexed @Id String id; + + // ... +} +---- +==== + +Hashed indexes can be created next to other index definitions like shown below, in that case both indices are created: + +.Example Hashed Index Usage togehter with simple index +==== +[source,java] +---- +@Document +public class DomainType { + + @Indexed + @HashIndexed + String value; + + // ... +} +---- +==== + +In case the example above is too verbose, a compound annotation allows to reduce the number of annotations that need to be declared on a property: + +.Example Composed Hashed Index Usage +==== +[source,java] +---- +@Document +public class DomainType { + + @IndexAndHash(name = "idx...") <1> + String value; + + // ... +} + +@Indexed +@HashIndexed +@Retention(RetentionPolicy.RUNTIME) +public @interface IndexAndHash { + + @AliasFor(annotation = Indexed.class, attribute = "name") <1> + String name() default ""; +} +---- +<1> Potentially register an alias for certain attributes of the meta annotation. +==== + +[NOTE] +==== +Although index creation via annotations comes in handy for many scenarios cosider taking over more control by setting up indices manually via `IndexOperations`. + +[source,java] +---- +mongoOperations.indexOpsFor(Jedi.class) + .ensureIndex(HashedIndex.hashed("useTheForce")); +---- +==== + +[[mapping-usage-indexes.wildcard-index]] +== Wildcard Indexes + +A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern. +For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation]. + +The index can be set up programmatically using `WildcardIndex` via `IndexOperations`. + +.Programmatic WildcardIndex setup +==== +[source,java] +---- +mongoOperations + .indexOps(User.class) + .ensureIndex(new WildcardIndex("userMetadata")); +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property. + +If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a +wildcard index for it. + +.Wildcard index on domain type +==== +[source,java] +---- +@Document +@WildcardIndexed +public class Product { + // … +} +---- +[source,javascript] +---- +db.product.createIndex({ "$**" : 1 },{}) +---- +==== + +The `wildcardProjection` can be used to specify keys to in-/exclude in the index. + +.Wildcard index with `wildcardProjection` +==== +[source,java] +---- +@Document +@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }") +public class User { + private @Id String id; + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex( + { "$**" : 1 }, + { "wildcardProjection" : + { "userMetadata.age" : 0 } + } +) +---- +==== + +Wildcard indexes can also be expressed by adding the annotation directly to the field. +Please note that `wildcardProjection` is not allowed on nested paths such as properties. +Projections on types annotated with `@WildcardIndexed` are omitted during index creation. + +.Wildcard index on property +==== +[source,java] +---- +@Document +public class User { + private @Id String id; + + @WildcardIndexed + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +[[mapping-usage-indexes.text-index]] +== Text Indexes + +NOTE: The text index feature is disabled by default for MongoDB v.2.4. + +Creating a text index allows accumulating several fields into a searchable full-text index. +It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index. +Properties can be weighted to influence the document score for ranking results. +The default language for the text index is English.To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`). +Using a property called `language` or `@Language` lets you define a language override on a per-document base. +The following example shows how to created a text index and set the language to Spanish: + +.Example Text Index Usage +==== +[source,java] +---- +@Document(language = "spanish") +class SomeEntity { + + @TextIndexed String foo; + + @Language String lang; + + Nested nested; +} + +class Nested { + + @TextIndexed(weight=5) String bar; + String roo; +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc new file mode 100644 index 0000000000..6c8e200083 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc @@ -0,0 +1,466 @@ +[[mongo.jsonSchema]] += JSON Schema + +As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema]. +The schema itself and both validation action and level can be defined when creating the collection, as the following example shows: + +.Sample JSON schema +==== +[source,json] +---- +{ + "type": "object", <1> + + "required": [ "firstname", "lastname" ], <2> + + "properties": { <3> + + "firstname": { <4> + "type": "string", + "enum": [ "luke", "han" ] + }, + "address": { <5> + "type": "object", + "properties": { + "postCode": { "type": "string", "minLength": 4, "maxLength": 5 } + } + } + } +} +---- +<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain +embedded schema objects that describe properties and subdocuments. +<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other +schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords]. +<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints. +<4> `firstname` specifies constraints for the `firstname` field inside the document. Here, it is a string-based `properties` element declaring + possible field values. +<5> `address` is a subdocument defining a schema for values in its `postCode` field. +==== + +You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema: + +.Creating a JSON schema +==== +[source,java] +---- +MongoJsonSchema.builder() <1> + .required("lastname") <2> + + .properties( + required(string("firstname").possibleValues("luke", "han")), <3> + + object("address") + .properties(string("postCode").minLength(4).maxLength(5))) + + .build(); <4> +---- +<1> Obtain a schema builder to configure the schema with a fluent API. +<2> Configure required properties either directly as shown here or with more details as in 3. +<3> Configure the required String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`. +<4> Build the schema object. +==== + +There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available +through static methods on the gateway interfaces. +However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows: + +[source,java] +---- +// "birthdate" : { "bsonType": "date" } +JsonSchemaProperty.named("birthdate").ofType(Type.dateType()); + +// "birthdate" : { "bsonType": "date", "description", "Must be a date" } +JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date")); +---- + +`CollectionOptions` provides the entry point to schema support for collections, as the following example shows: + +.Create collection with `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- +==== + +[[mongo.jsonSchema.generated]] +== Generating a Schema + +Setting up a schema can be a time consuming task and we encourage everyone who decides to do so, to really take the time it takes. +It's important, schema changes can be hard. +However, there might be times when one does not want to balked with it, and that is where `JsonSchemaCreator` comes into play. + +`JsonSchemaCreator` and its default implementation generates a `MongoJsonSchema` out of domain types metadata provided by the mapping infrastructure. +This means, that xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[annotated properties] as well as potential xref:mongodb/mapping/mapping.adoc#mapping-configuration[custom conversions] are considered. + +.Generate Json Schema from domain type +==== +[source,java] +---- +public class Person { + + private final String firstname; <1> + private final int age; <2> + private Species species; <3> + private Address address; <4> + private @Field(fieldType=SCRIPT) String theForce; <5> + private @Transient Boolean useTheForce; <6> + + public Person(String firstname, int age) { <1> <2> + + this.firstname = firstname; + this.age = age; + } + + // gettter / setter omitted +} + +MongoJsonSchema schema = MongoJsonSchemaCreator.create(mongoOperations.getConverter()) + .createSchemaFor(Person.class); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- + +[source,json] +---- +{ + 'type' : 'object', + 'required' : ['age'], <2> + 'properties' : { + 'firstname' : { 'type' : 'string' }, <1> + 'age' : { 'bsonType' : 'int' } <2> + 'species' : { <3> + 'type' : 'string', + 'enum' : ['HUMAN', 'WOOKIE', 'UNKNOWN'] + } + 'address' : { <4> + 'type' : 'object' + 'properties' : { + 'postCode' : { 'type': 'string' } + } + }, + 'theForce' : { 'type' : 'javascript'} <5> + } +} +---- +<1> Simple object properties are consideres regular properties. +<2> Primitive types are considered required properties +<3> Enums are restricted to possible values. +<4> Object type properties are inspected and represented as nested documents. +<5> `String` type property that is converted to `Code` by the converter. +<6> `@Transient` properties are omitted when generating the schema. +==== + +NOTE: `_id` properties using types that can be converted into `ObjectId` like `String` are mapped to `{ type : 'object' }` +unless there is more specific information available via the `@MongoId` annotation. + +[cols="2,2,6", options="header"] +.Sepcial Schema Generation rules +|=== +| Java +| Schema Type +| Notes + +| `Object` +| `type : object` +| with `properties` if metadata available. + +| `Collection` +| `type : array` +| - + +| `Map` +| `type : object` +| - + +| `Enum` +| `type : string` +| with `enum` property holding the possible enumeration values. + +| `array` +| `type : array` +| simple type array unless it's a `byte[]` + +| `byte[]` +| `bsonType : binData` +| - + +|=== + +The above example demonstrated how to derive the schema from a very precise typed source. +Using polymorphic elements within the domain model can lead to inaccurate schema representation for `Object` and generic `` types, which are likely to represented as `{ type : 'object' }` without further specification. +`MongoJsonSchemaCreator.property(…)` allows defining additional details such as nested document types that should be considered when rendering the schema. + +.Specify additional types for properties +==== +[source,java] +---- +class Root { + Object value; +} + +class A { + String aValue; +} + +class B { + String bValue; +} +MongoJsonSchemaCreator.create() + .property("value").withTypes(A.class, B.class) <1> +---- + +[source,json] +---- +{ + 'type' : 'object', + 'properties' : { + 'value' : { + 'type' : 'object', + 'properties' : { <1> + 'aValue' : { 'type' : 'string' }, + 'bValue' : { 'type' : 'string' } + } + } + } +} +---- +<1> Properties of the given types are merged into one element. +==== + +MongoDBs schema-free approach allows storing documents of different structure in one collection. +Those may be modeled having a common base class. +Regardless of the chosen approach, `MongoJsonSchemaCreator.merge(…)` can help circumvent the need of merging multiple schema into one. + +.Merging multiple Schemas into a single Schema definition +==== +[source,java] +---- +abstract class Root { + String rootValue; +} + +class A extends Root { + String aValue; +} + +class B extends Root { + String bValue; +} + +MongoJsonSchemaCreator.mergedSchemaFor(A.class, B.class) <1> +---- + +[source,json] +---- +{ + 'type' : 'object', + 'properties' : { <1> + 'rootValue' : { 'type' : 'string' }, + 'aValue' : { 'type' : 'string' }, + 'bValue' : { 'type' : 'string' } + } + } +} +---- +<1> Properties (and their inherited ones) of the given types are combined into one schema. +==== + +[NOTE] +==== +Properties with the same name need to refer to the same JSON schema in order to be combined. +The following example shows a definition that cannot be merged automatically because of a data type mismatch. +In this case a `ConflictResolutionFunction` must be provided to `MongoJsonSchemaCreator`. + +[source,java] +---- +class A extends Root { + String value; +} + +class B extends Root { + Integer value; +} +---- +==== + +[[mongo.jsonSchema.encrypted-fields]] +== Encrypted Fields + +MongoDB 4.2 https://docs.mongodb.com/master/core/security-client-side-encryption/[Field Level Encryption] allows to directly encrypt individual properties. + +Properties can be wrapped within an encrypted property when setting up the JSON Schema as shown in the example below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder() + .properties( + encrypted(string("ssn")) + .algorithm("AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + .keyId("*key0_id") + ).build(); +---- +==== + +Instead of defining encrypted fields manually it is possible leverage the `@Encrypted` annotation as shown in the snippet below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +@Document +@Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") <1> +static class Patient { + + @Id String id; + String name; + + @Encrypted <2> + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") <3> + Integer ssn; +} +---- +<1> Default encryption settings that will be set for `encryptMetadata`. +<2> Encrypted field using default encryption settings. +<3> Encrypted field overriding the default encryption algorithm. +==== + +[TIP] +==== +The `@Encrypted` Annotation supports resolving keyIds via SpEL Expressions. +To do so additional environment metadata (via the `MappingContext`) is required and must be provided. + +[source,java] +---- +@Document +@Encrypted(keyId = "#{mongocrypt.keyId(#target)}") +static class Patient { + + @Id String id; + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + Integer ssn; +} + +MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); +MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); +---- + +The `mongocrypt.keyId` function is defined via an `EvaluationContextExtension` as shown in the snippet below. +Providing a custom extension provides the most flexible way of computing keyIds. + +[source,java] +---- +public class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getFunctions() { + return Collections.singletonMap("keyId", new Function(getMethod("computeKeyId", String.class), this)); + } + + public String computeKeyId(String target) { + // ... lookup via target element name + } +} +---- +==== + +[[mongo.jsonSchema.types]] +== JSON Schema Types + +The following table shows the supported JSON schema types: + +[cols="3,1,6", options="header"] +.Supported JSON schema types +|=== +| Schema Type +| Java Type +| Schema Properties + +| `untyped` +| - +| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not` + +| `object` +| `Object` +| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties` + +| `array` +| any array except `byte[]` +| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems` + +| `string` +| `String` +| `minLength`, `maxLentgth`, `pattern` + +| `int` +| `int`, `Integer` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `long` +| `long`, `Long` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `double` +| `float`, `Float`, `double`, `Double` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `decimal` +| `BigDecimal` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `number` +| `Number` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `binData` +| `byte[]` +| (none) + +| `boolean` +| `boolean`, `Boolean` +| (none) + +| `null` +| `null` +| (none) + +| `objectId` +| `ObjectId` +| (none) + +| `date` +| `java.util.Date` +| (none) + +| `timestamp` +| `BsonTimestamp` +| (none) + +| `regex` +| `java.util.regex.Pattern` +| (none) + +|=== + +NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types. + +For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema]. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc new file mode 100644 index 0000000000..d76266c36a --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc @@ -0,0 +1,717 @@ +[[mapping-chapter]] += Object Mapping + +Rich mapping support is provided by the `MappingMongoConverter`. +The converter holds a metadata model that provides a full feature set to map domain objects to MongoDB documents. +The mapping metadata model is populated by using annotations on your domain objects. +However, the infrastructure is not limited to using annotations as the only source of metadata information. +The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions. + +This section describes the features of the `MappingMongoConverter`, including fundamentals, how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata. + +include::{commons}@data-commons::page$object-mapping.adoc[leveloffset=+1] + +[[mapping-conventions]] +== Convention-based Mapping + +`MappingMongoConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided. +The conventions are: + +* The short Java class name is mapped to the collection name in the following manner. +The class `com.bigbank.SavingsAccount` maps to the `savingsAccount` collection name. +* All nested objects are stored as nested objects in the document and *not* as DBRefs. +* The converter uses any Spring Converters registered with it to override the default mapping of object properties to document fields and values. +* The fields of an object are used to convert to and from fields in the document. +Public `JavaBean` properties are not used. +* If you have a single non-zero-argument constructor whose constructor argument names match top-level field names of document, that constructor is used.Otherwise, the zero-argument constructor is used.If there is more than one non-zero-argument constructor, an exception will be thrown. + +[[mapping.conventions.id-field]] +=== How the `_id` field is handled in the mapping layer. + +MongoDB requires that you have an `_id` field for all documents.If you don't provide one the driver will assign a ObjectId with a generated value.The `_id` field can be of any type, other than arrays, so long as it is unique.The driver naturally supports all primitive types and Dates.When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class are mapped to the `_id` field. + +The following outlines what field will be mapped to the `_id` document field: + +* A field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the `_id` field. + +Additionally, the name of the document field can be customized via the `@Field` annotation, in which case the document will not contain a field `_id`. +* A field without an annotation but named `id` will be mapped to the `_id` field. + +[cols="1,2",options="header"] +.Examples for the translation of `_id` field definitions +|=== +| Field definition +| Resulting Id-Fieldname in MongoDB + +| `String` id +| `_id` + +| `@Field` `String` id +| `_id` + +| `@Field("x")` `String` id +| `x` + +| `@Id` `String` x +| `_id` + +| `@Field("x")` `@Id` `String` y +| `_id` (`@Field(name)` is ignored, `@Id` takes precedence) +|=== + +The following outlines what type conversion, if any, will be done on the property mapped to the _id document field. + +* If a field named `id` is declared as a String or BigInteger in the Java class it will be converted to and stored as an ObjectId if possible. +ObjectId as a field type is also valid. +If you specify a value for `id` in your application, the conversion to an ObjectId is done by the MongoDB driver. +If the specified `id` value cannot be converted to an ObjectId, then the value will be stored as is in the document's `_id` field. +This also applies if the field is annotated with `@Id`. +* If a field is annotated with `@MongoId` in the Java class it will be converted to and stored as using its actual type. +No further conversion happens unless `@MongoId` declares a desired field type. +If no value is provided for the `id` field, a new `ObjectId` will be created and converted to the properties type. +* If a field is annotated with `@MongoId(FieldType.…)` in the Java class it will be attempted to convert the value to the declared `FieldType`. +If no value is provided for the `id` field, a new `ObjectId` will be created and converted to the declared type. +* If a field named `id` is not declared as a String, BigInteger, or ObjectID in the Java class then you should assign it a value in your application so it can be stored 'as-is' in the document's `_id` field. +* If no field named `id` is present in the Java class then an implicit `_id` file will be generated by the driver but not mapped to a property or field of the Java class. + +When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. + +[[mapping-conversion]] +== Data Mapping and Type Conversion + +Spring Data MongoDB supports all types that can be represented as BSON, MongoDB's internal document format. +In addition to these types, Spring Data MongoDB provides a set of built-in converters to map additional types. +You can provide your own converters to adjust type conversion. +See xref:mongodb/mapping/custom-conversions.adoc[Custom Conversions - Overriding Default Mapping] for further details. + +.Built in Type conversions: +[%collapsible] +==== +[cols="3,1,6",options="header"] +.Type +|=== +| Type +| Type conversion +| Sample + +| `String` +| native +| `{"firstname" : "Dave"}` + +| `double`, `Double`, `float`, `Float` +| native +| `{"weight" : 42.5}` + +| `int`, `Integer`, `short`, `Short` +| native + +32-bit integer +| `{"height" : 42}` + +| `long`, `Long` +| native + +64-bit integer +| `{"height" : 42}` + +| `Date`, `Timestamp` +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `byte[]` +| native +| `{"bin" : { "$binary" : "AQIDBA==", "$type" : "00" }}` + +| `java.util.UUID` (Legacy UUID) +| native +| `{"uuid" : { "$binary" : "MEaf1CFQ6lSphaa3b9AtlA==", "$type" : "03" }}` + +| `Date` +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `ObjectId` +| native +| `{"_id" : ObjectId("5707a2690364aba3136ab870")}` + +| Array, `List`, `BasicDBList` +| native +| `{"cookies" : [ … ]}` + +| `boolean`, `Boolean` +| native +| `{"active" : true}` + +| `null` +| native +| `{"value" : null}` + +| `Document` +| native +| `{"value" : { … }}` + +| `Decimal128` +| native +| `{"value" : NumberDecimal(…)}` + +| `AtomicInteger` + +calling `get()` before the actual conversion +| converter + +32-bit integer +| `{"value" : "741" }` + +| `AtomicLong` + +calling `get()` before the actual conversion +| converter + +64-bit integer +| `{"value" : "741" }` + +| `BigInteger` +| converter + +`NumberDecimal`, `String` +| `{"value" : NumberDecimal(741) }`, `{"value" : "741" }` + +| `BigDecimal` +| converter + +`NumberDecimal`, `String` +| `{"value" : NumberDecimal(741.99) }`, `{"value" : "741.99" }` + +| `URL` +| converter +| `{"website" : "https://spring.io/projects/spring-data-mongodb/" }` + +| `Locale` +| converter +| `{"locale : "en_US" }` + +| `char`, `Character` +| converter +| `{"char" : "a" }` + +| `NamedMongoScript` +| converter + +`Code` +| `{"_id" : "script name", value: (some javascript code)`} + +| `java.util.Currency` +| converter +| `{"currencyCode" : "EUR"}` + +| `Instant` + +(Java 8) +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `Instant` + +(Joda, JSR310-BackPort) +| converter +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `LocalDate` + +(Joda, Java 8, JSR310-BackPort) +| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]] +| `{"date" : ISODate("2019-11-12T00:00:00.000Z")}` + +| `LocalDateTime`, `LocalTime` + +(Joda, Java 8, JSR310-BackPort) +| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]] +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `DateTime` (Joda) +| converter +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `ZoneId` (Java 8, JSR310-BackPort) +| converter +| `{"zoneId" : "ECT - Europe/Paris"}` + +| `Box` +| converter +| `{"box" : { "first" : { "x" : 1.0 , "y" : 2.0} , "second" : { "x" : 3.0 , "y" : 4.0}}` + +| `Polygon` +| converter +| `{"polygon" : { "points" : [ { "x" : 1.0 , "y" : 2.0} , { "x" : 3.0 , "y" : 4.0} , { "x" : 4.0 , "y" : 5.0}]}}` + +| `Circle` +| converter +| `{"circle" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` + +| `Point` +| converter +| `{"point" : { "x" : 1.0 , "y" : 2.0}}` + +| `GeoJsonPoint` +| converter +| `{"point" : { "type" : "Point" , "coordinates" : [3.0 , 4.0] }}` + +| `GeoJsonMultiPoint` +| converter +| `{"geoJsonLineString" : {"type":"MultiPoint", "coordinates": [ [ 0 , 0 ], [ 0 , 1 ], [ 1 , 1 ] ] }}` + +| `Sphere` +| converter +| `{"sphere" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` + +| `GeoJsonPolygon` +| converter +| `{"polygon" : { "type" : "Polygon", "coordinates" : [[ [ 0 , 0 ], [ 3 , 6 ], [ 6 , 1 ], [ 0 , 0 ] ]] }}` + +| `GeoJsonMultiPolygon` +| converter +| `{"geoJsonMultiPolygon" : { "type" : "MultiPolygon", "coordinates" : [ +[ [ [ -73.958 , 40.8003 ] , [ -73.9498 , 40.7968 ] ] ], +[ [ [ -73.973 , 40.7648 ] , [ -73.9588 , 40.8003 ] ] ] +] }}` + +| `GeoJsonLineString` +| converter +| `{ "geoJsonLineString" : { "type" : "LineString", "coordinates" : [ [ 40 , 5 ], [ 41 , 6 ] ] }}` + +| `GeoJsonMultiLineString` +| converter +| `{"geoJsonLineString" : { "type" : "MultiLineString", coordinates: [ +[ [ -73.97162 , 40.78205 ], [ -73.96374 , 40.77715 ] ], +[ [ -73.97880 , 40.77247 ], [ -73.97036 , 40.76811 ] ] +] }}` +|=== +==== + +.Collection Handling +[NOTE] +==== +Collection handling depends on the actual values returned by MongoDB. + +* If a document does **not** contain a field mapped to a collection, the mapping will not update the property. +Which means the value will remain `null`, a java default or any value set during object creation. +* If a document contains a field to be mapped, but the field holds a `null` value (like: `{ 'list' : null }`), the property value is set to `null`. +* If a document contains a field to be mapped to a collection which is **not** `null` (like: `{ 'list' : [ ... ] }`), the collection is populated with the mapped values. + +Generally, if you use constructor creation, then you can get hold of the value to be set. +Property population can make use of default initialization values if a property value is not being provided by a query response. +==== + +[[mapping-configuration]] +== Mapping Configuration + +Unless explicitly configured, an instance of `MappingMongoConverter` is created by default when you create a `MongoTemplate`. +You can create your own instance of the `MappingMongoConverter`. +Doing so lets you dictate where in the classpath your domain classes can be found, so that Spring Data MongoDB can extract metadata and construct indexes. +Also, by creating your own instance, you can register Spring converters to map specific classes to and from the database. + +You can configure the `MappingMongoConverter` as well as `com.mongodb.client.MongoClient` and MongoTemplate by using either Java-based or XML-based metadata. +The following example shows the configuration: + +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoConfig extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + // the following are optional + + @Override + public String getMappingBasePackage() { <1> + return "com.bigbank.domain"; + } + + @Override + void configureConverters(MongoConverterConfigurationAdapter adapter) { <2> + + adapter.registerConverter(new org.springframework.data.mongodb.test.PersonReadConverter()); + adapter.registerConverter(new org.springframework.data.mongodb.test.PersonWriteConverter()); + } + + @Bean + public LoggingEventListener mappingEventsListener() { + return new LoggingEventListener(); + } +} +---- + +<1> The mapping base package defines the root path used to scan for entities used to pre initialize the `MappingContext`. +By default the configuration classes package is used. +<2> Configure additional custom converters for specific domain types that replace the default mapping procedure for those types with your custom implementation. +==== + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +---- +====== + +`AbstractMongoClientConfiguration` requires you to implement methods that define a `com.mongodb.client.MongoClient` as well as provide a database name. +`AbstractMongoClientConfiguration` also has a method named `getMappingBasePackage(…)` that you can override to tell the converter where to scan for classes annotated with the `@Document` annotation. + +You can add additional converters to the converter by overriding the `customConversionsConfiguration` method. +MongoDB's native JSR-310 support can be enabled through `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()`. +Also shown in the preceding example is a `LoggingEventListener`, which logs `MongoMappingEvent` instances that are posted onto Spring's `ApplicationContextEvent` infrastructure. + +[TIP] +==== +.Java Time Types +We recommend using MongoDB's native JSR-310 support via `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()` as described above as it is using an `UTC` based approach. +The default JSR-310 support for `java.time` types inherited from Spring Data Commons uses the local machine timezone as reference and should only be used for backwards compatibility. +==== + +NOTE: `AbstractMongoClientConfiguration` creates a `MongoTemplate` instance and registers it with the container under the name `mongoTemplate`. + +The `base-package` property tells it where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. + +[TIP] +==== +If you want to rely on https://spring.io/projects/spring-boot[Spring Boot] to bootstrap Data MongoDB, but still want to override certain aspects of the configuration, you may want to expose beans of that type. +For custom conversions you may eg. choose to register a bean of type `MongoCustomConversions` that will be picked up the by the Boot infrastructure. +To learn more about this please make sure to read the Spring Boot https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#data.nosql.mongodb[Reference Documentation]. +==== + +[[mapping-usage]] +== Metadata-based Mapping + +To take full advantage of the object mapping functionality inside the Spring Data MongoDB support, you should annotate your mapped objects with the `@Document` annotation. +Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. +If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. +The following example shows a domain object: + +.Example domain object +==== +[source,java] +---- +package com.mycompany.domain; + +@Document +public class Person { + + @Id + private ObjectId id; + + @Indexed + private Integer ssn; + + private String firstName; + + @Indexed + private String lastName; +} +---- +==== + +IMPORTANT: The `@Id` annotation tells the mapper which property you want to use for the MongoDB `_id` property, and the `@Indexed` annotation tells the mapping framework to call `createIndex(…)` on that property of your document, making searches faster. +Automatic index creation is only done for types annotated with `@Document`. + +WARNING: Auto index creation is **disabled** by default and needs to be enabled through the configuration (see xref:mongodb/mapping/mapping.adoc#mapping.index-creation[Index Creation]). + +[[mapping-usage-annotations]] +=== Mapping Annotation Overview + +The MappingMongoConverter can use metadata to drive the mapping of objects to documents. +The following annotations are available: + +* `@Id`: Applied at the field level to mark the field used for identity purpose. +* `@MongoId`: Applied at the field level to mark the field used for identity purpose. +Accepts an optional `FieldType` to customize id conversion. +* `@Document`: Applied at the class level to indicate this class is a candidate for mapping to the database. +You can specify the name of the collection where the data will be stored. +* `@DBRef`: Applied at the field to indicate it is to be stored using a com.mongodb.DBRef. +* `@DocumentReference`: Applied at the field to indicate it is to be stored as a pointer to another document. +This can be a single value (the _id_ by default), or a `Document` provided via a converter. +* `@Indexed`: Applied at the field level to describe how to index the field. +* `@CompoundIndex` (repeatable): Applied at the type level to declare Compound Indexes. +* `@GeoSpatialIndexed`: Applied at the field level to describe how to geoindex the field. +* `@TextIndexed`: Applied at the field level to mark the field to be included in the text index. +* `@HashIndexed`: Applied at the field level for usage within a hashed index to partition data across a sharded cluster. +* `@Language`: Applied at the field level to set the language override property for text index. +* `@Transient`: By default, all fields are mapped to the document. +This annotation excludes the field where it is applied from being stored in the database. +Transient properties cannot be used within a persistence constructor as the converter cannot materialize a value for the constructor argument. +* `@PersistenceConstructor`: Marks a given constructor - even a package protected one - to use when instantiating the object from the database. +Constructor arguments are mapped by name to the key values in the retrieved Document. +* `@Value`: This annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. +This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. +In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document. +* `@Field`: Applied at the field level it allows to describe the name and type of the field as it will be represented in the MongoDB BSON document thus allowing the name and type to be different than the fieldname of the class as well as the property type. +* `@Version`: Applied at field level is used for optimistic locking and checked for modification on save operations. +The initial value is `zero` (`one` for primitive types) which is bumped automatically on every update. + +The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. +Specific subclasses are using in the MongoDB support to support annotation based metadata. +Other strategies are also possible to put in place if there is demand. + +.Here is an example of a more complex mapping +[%collapsible] +==== +[source,java] +---- +@Document +@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") +public class Person { + + @Id + private String id; + + @Indexed(unique = true) + private Integer ssn; + + @Field("fName") + private String firstName; + + @Indexed + private String lastName; + + private Integer age; + + @Transient + private Integer accountTotal; + + @DBRef + private List accounts; + + private T address; + + public Person(Integer ssn) { + this.ssn = ssn; + } + + @PersistenceConstructor + public Person(Integer ssn, String firstName, String lastName, Integer age, T address) { + this.ssn = ssn; + this.firstName = firstName; + this.lastName = lastName; + this.age = age; + this.address = address; + } + + public String getId() { + return id; + } + + // no setter for Id. (getter is only exposed for some unit testing) + + public Integer getSsn() { + return ssn; + } + +// other getters/setters omitted +} +---- +==== + +[TIP] +==== +`@Field(targetType=...)` can come in handy when the native MongoDB type inferred by the mapping infrastructure does not match the expected one. +Like for `BigDecimal`, which is represented as `String` instead of `Decimal128`, just because earlier versions of MongoDB Server did not have support for it. + +[source,java] +---- +public class Balance { + + @Field(targetType = DECIMAL128) + private BigDecimal value; + + // ... +} +---- + +You may even consider your own, custom annotation. + +[source,java] +---- +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +@Field(targetType = FieldType.DECIMAL128) +public @interface Decimal128 { } + +// ... + +public class Balance { + + @Decimal128 + private BigDecimal value; + + // ... +} +---- +==== + +=== Special Field Names + +Generally speaking MongoDB uses the dot (`.`) character as a path separator for nested documents or arrays. +This means that in a query (or update statement) a key like `a.b.c` targets an object structure as outlined below: + +[source,json] +---- +{ + 'a' : { + 'b' : { + 'c' : … + } + } +} +---- + +Therefore, up until MongoDB 5.0 field names must not contain dots (`.`). + +Using a `MappingMongoConverter#setMapKeyDotReplacement` allowed circumvent some of the limitations when storing `Map` structures by substituting dots on write with another character. + +[source,java] +---- +converter.setMapKeyDotReplacement("-"); +// ... + +source.map = Map.of("key.with.dot", "value") +converter.write(source,...) // -> map : { 'key-with-dot', 'value' } +---- + +With the release of MongoDB 5.0 this restriction on `Document` field names containing special characters was lifted. +We highly recommend reading more about limitations on using dots in field names in the https://www.mongodb.com/docs/manual/core/dot-dollar-considerations/[MongoDB Reference]. + +To allow dots in `Map` structures please set `preserveMapKeys` on the `MappingMongoConverter`. + +Using `@Field` allows customizing the field name to consider dots in two ways. + +. `@Field(name = "a.b")`: The name is considered to be a path. +Operations expect a structure of nested objects such as `{ a : { b : … } }`. +. `@Field(name = "a.b", fieldNameType = KEY)`: The names is considered a name as-is. +Operations expect a field with the given value as `{ 'a.b' : ….. }` + +[WARNING] +==== +Due to the special nature of the dot character in both MongoDB query and update statements field names containing dots cannot be targeted directly and therefore are excluded from being used in derived query methods. +Consider the following `Item` having a `categoryId` property that is mapped to the field named `cat.id`. + +[source,java] +---- +public class Item { + + @Field(name = "cat.id", fieldNameType = KEY) + String categoryId; + + // ... +} +---- + +Its raw representation will look like + +[source,json] +---- +{ + 'cat.id' : "5b28b5e7-52c2", + ... +} +---- + +Since we cannot target the `cat.id` field directly (as this would be interpreted as a path) we need the help of the xref:mongodb/aggregation-framework.adoc#mongo.aggregation[Aggregation Framework]. + +.Query fields with a dot in its name +[source,java] +---- +template.query(Item.class) + // $expr : { $eq : [ { $getField : { input : '$$CURRENT', 'cat.id' }, '5b28b5e7-52c2' ] } + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("value")).equalToValue("5b28b5e7-52c2"))) <1> + .all(); +---- + +<1> The mapping layer takes care of translating the property name `value` into the actual field name. +It is absolutely valid to use the target field name here as well. + +.Update fields with a dot in its name +[source,java] +---- +template.update(Item.class) + .matching(where("id").is("r2d2")) + // $replaceWith: { $setField : { input: '$$CURRENT', field : 'cat.id', value : 'af29-f87f4e933f97' } } + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("value", "af29-f87f4e933f97")))) <1> + .first(); +---- + +<1> The mapping layer takes care of translating the property name `value` into the actual field name. +It is absolutely valid to use the target field name here as well. + +The above shows a simple example where the special field is present on the top document level. +Increased levels of nesting increase the complexity of the aggregation expression required to interact with the field. +==== + +[[mapping-custom-object-construction]] +=== Customized Object Construction + +The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation. +The values to be used for the constructor parameters are resolved in the following way: + +* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated and the result is used as the parameter value. +* If the Java type has a property whose name matches the given field of the input document, then it's property information is used to select the appropriate constructor parameter to pass the input field value to. +This works only if the parameter name information is present in the java `.class` files which can be achieved by compiling the source with debug information or using the new `-parameters` command-line switch for javac in Java 8. +* Otherwise, a `MappingException` will be thrown indicating that the given constructor parameter could not be bound. + +[source,java] +---- +class OrderItem { + + private @Id String id; + private int quantity; + private double unitPrice; + + OrderItem(String id, @Value("#root.qty ?: 0") int quantity, double unitPrice) { + this.id = id; + this.quantity = quantity; + this.unitPrice = unitPrice; + } + + // getters/setters ommitted +} + +Document input = new Document("id", "4711"); +input.put("unitPrice", 2.5); +input.put("qty",5); +OrderItem item = converter.read(OrderItem.class, input); +---- + +NOTE: The SpEL expression in the `@Value` annotation of the `quantity` parameter falls back to the value `0` if the given property path cannot be resolved. + +Additional examples for using the `@PersistenceConstructor` annotation can be found in the https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java[MappingMongoConverterUnitTests] test suite. + +[[mapping-usage-events]] +=== Mapping Framework Events + +Events are fired throughout the lifecycle of the mapping process. +This is described in the xref:mongodb/lifecycle-events.adoc[Lifecycle Events] section. + +Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc new file mode 100644 index 0000000000..fed1f4c33e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc @@ -0,0 +1,106 @@ +[[mongo.property-converters]] += Property Converters + +While xref:mongodb/mapping/custom-conversions.adoc[type-based conversion] already offers ways to influence the conversion and representation of certain types within the target store, it has limitations when only certain values or properties of a particular type should be considered for conversion. +Property-based converters allow configuring conversion rules on a per-property basis, either declaratively (via `@ValueConverter`) or programmatically (by registering a `PropertyValueConverter` for a specific property). + +A `PropertyValueConverter` can transform a given value into its store representation (write) and back (read) as the following listing shows. +The additional `ValueConversionContext` provides additional information, such as mapping metadata and direct `read` and `write` methods. + +.A simple PropertyValueConverter +==== +[source,java] +---- +class ReversingValueConverter implements PropertyValueConverter { + + @Override + public String read(String value, ValueConversionContext context) { + return reverse(value); + } + + @Override + public String write(String value, ValueConversionContext context) { + return reverse(value); + } +} +---- +==== + +You can obtain `PropertyValueConverter` instances from `CustomConversions#getPropertyValueConverter(…)` by delegating to `PropertyValueConversions`, typically by using a `PropertyValueConverterFactory` to provide the actual converter. +Depending on your application's needs, you can chain or decorate multiple instances of `PropertyValueConverterFactory` -- for example, to apply caching. +By default, Spring Data MongoDB uses a caching implementation that can serve types with a default constructor or enum values. +A set of predefined factories is available through the factory methods in `PropertyValueConverterFactory`. +You can use `PropertyValueConverterFactory.beanFactoryAware(…)` to obtain a `PropertyValueConverter` instance from an `ApplicationContext`. + +You can change the default behavior through `ConverterConfiguration`. + +[[mongo.property-converters.declarative]] +== Declarative Value Converter + +The most straight forward usage of a `PropertyValueConverter` is by annotating properties with the `@ValueConverter` annotation that defines the converter type: + +.Declarative PropertyValueConverter +==== +[source,java] +---- +class Person { + + @ValueConverter(ReversingValueConverter.class) + String ssn; +} +---- +==== + +[[mongo.property-converters.programmatic]] +== Programmatic Value Converter Registration + +Programmatic registration registers `PropertyValueConverter` instances for properties within an entity model by using a `PropertyValueConverterRegistrar`, as the following example shows. +The difference between declarative registration and programmatic registration is that programmatic registration happens entirely outside of the entity model. +Such an approach is useful if you cannot or do not want to annotate the entity model. + +.Programmatic PropertyValueConverter registration +==== +[source,java] +---- +PropertyValueConverterRegistrar registrar = new PropertyValueConverterRegistrar(); + +registrar.registerConverter(Address.class, "street", new PropertyValueConverter() { … }); <1> + +// type safe registration +registrar.registerConverter(Person.class, Person::getSsn()) <2> + .writing(value -> encrypt(value)) + .reading(value -> decrypt(value)); +---- + +<1> Register a converter for the field identified by its name. +<2> Type safe variant that allows to register a converter and its conversion functions. +This method uses class proxies to determine the property. +Make sure that neither the class nor the accessors are `final` as otherwise this approach doesn't work. +==== + +WARNING: Dot notation (such as `registerConverter(Person.class, "address.street", …)`) for nagivating across properties into subdocuments is *not* supported when registering converters. + +TIP: `MongoValueConverter` offers a pre-typed `PropertyValueConverter` interface that uses `MongoConversionContext`. + +[[mongocustomconversions-configuration]] +== MongoCustomConversions configuration + +By default, `MongoCustomConversions` can handle declarative value converters, depending on the configured `PropertyValueConverterFactory`. +`MongoConverterConfigurationAdapter` helps to set up programmatic value conversions or define the `PropertyValueConverterFactory` to be used. + +.Configuration Sample +==== +[source,java] +---- +MongoCustomConversions.create(configurationAdapter -> { + + SimplePropertyValueConversions valueConversions = new SimplePropertyValueConversions(); + valueConversions.setConverterFactory(…); + valueConversions.setValueConverterRegistry(new PropertyValueConverterRegistrar() + .registerConverter(…) + .buildRegistry()); + + configurationAdapter.setPropertyValueConversions(valueConversions); +}); +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc new file mode 100644 index 0000000000..cff702f179 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc @@ -0,0 +1,382 @@ +[[unwrapped-entities]] += Unwrapping Types + +Unwrapped entities are used to design value objects in your Java domain model whose properties are flattened out into the parent's MongoDB Document. + +[[unwrapped-entities.mapping]] +== Unwrapped Types Mapping + +Consider the following domain model where `User.name` is annotated with `@Unwrapped`. +The `@Unwrapped` annotation signals that all properties of `UserName` should be flattened out into the `user` document that owns the `name` property. + +.Sample Code of unwrapping objects +==== +[source,java] +---- +class User { + + @Id + String userId; + + @Unwrapped(onEmpty = USE_NULL) <1> + UserName name; +} + +class UserName { + + String firstname; + + String lastname; + +} +---- + +[source,json] +---- +{ + "_id" : "1da2ba06-3ba7", + "firstname" : "Emma", + "lastname" : "Frost" +} +---- +<1> When loading the `name` property its value is set to `null` if both `firstname` and `lastname` are either `null` or not present. +By using `onEmpty=USE_EMPTY` an empty `UserName`, with potential `null` value for its properties, will be created. +==== + +For less verbose embeddable type declarations use `@Unwrapped.Nullable` and `@Unwrapped.Empty` instead `@Unwrapped(onEmpty = USE_NULL)` and `@Unwrapped(onEmpty = USE_EMPTY)`. +Both annotations are meta-annotated with JSR-305 `@javax.annotation.Nonnull` to aid with nullability inspections. + +[WARNING] +==== +It is possible to use complex types within an unwrapped object. +However, those must not be, nor contain unwrapped fields themselves. +==== + +[[unwrapped-entities.mapping.field-names]] +== Unwrapped Types field names + +A value object can be unwrapped multiple times by using the optional `prefix` attribute of the `@Unwrapped` annotation. +By dosing so the chosen prefix is prepended to each property or `@Field("…")` name in the unwrapped object. +Please note that values will overwrite each other if multiple properties render to the same field name. + +.Sample Code of unwrapped object with name prefix +==== +[source,java] +---- +class User { + + @Id + String userId; + + @Unwrapped.Nullable(prefix = "u_") <1> + UserName name; + + @Unwrapped.Nullable(prefix = "a_") <2> + UserName name; +} + +class UserName { + + String firstname; + + String lastname; +} +---- + +[source,json] +---- +{ + "_id" : "a6a805bd-f95f", + "u_firstname" : "Jean", <1> + "u_lastname" : "Grey", + "a_firstname" : "Something", <2> + "a_lastname" : "Else" +} +---- +<1> All properties of `UserName` are prefixed with `u_`. +<2> All properties of `UserName` are prefixed with `a_`. +==== + +While combining the `@Field` annotation with `@Unwrapped` on the very same property does not make sense and therefore leads to an error. +It is a totally valid approach to use `@Field` on any of the unwrapped types properties. + +.Sample Code unwrapping objects with `@Field` annotation +==== +[source,java] +---- +public class User { + + @Id + private String userId; + + @Unwrapped.Nullable(prefix = "u-") <1> + UserName name; +} + +public class UserName { + + @Field("first-name") <2> + private String firstname; + + @Field("last-name") + private String lastname; +} +---- + +[source,json] +---- +{ + "_id" : "2647f7b9-89da", + "u-first-name" : "Barbara", <2> + "u-last-name" : "Gordon" +} +---- +<1> All properties of `UserName` are prefixed with `u-`. +<2> Final field names are a result of concatenating `@Unwrapped(prefix)` and `@Field(name)`. +==== + +[[unwrapped-entities.queries]] +== Query on Unwrapped Objects + +Defining queries on unwrapped properties is possible on type- as well as field-level as the provided `Criteria` is matched against the domain type. +Prefixes and potential custom field names will be considered when rendering the actual query. +Use the property name of the unwrapped object to match against all contained fields as shown in the sample below. + +.Query on unwrapped object +==== +[source,java] +---- +UserName userName = new UserName("Carol", "Danvers") +Query findByUserName = query(where("name").is(userName)); +User user = template.findOne(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "firstname" : "Carol", + "lastname" : "Danvers" +}) +---- +==== + +It is also possible to address any field of the unwrapped object directly using its property name as shown in the snippet below. + +.Query on field of unwrapped object +==== +[source,java] +---- +Query findByUserFirstName = query(where("name.firstname").is("Shuri")); +List users = template.findAll(findByUserFirstName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "firstname" : "Shuri" +}) +---- +==== + +[[unwrapped-entities.queries.sort]] +=== Sort by unwrapped field. + +Fields of unwrapped objects can be used for sorting via their property path as shown in the sample below. + +.Sort on unwrapped field +==== +[source,java] +---- +Query findByUserLastName = query(where("name.lastname").is("Romanoff")); +List user = template.findAll(findByUserName.withSort(Sort.by("name.firstname")), User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Romanoff" +}).sort({ "firstname" : 1 }) +---- +==== + +[NOTE] +==== +Though possible, using the unwrapped object itself as sort criteria includes all of its fields in unpredictable order and may result in inaccurate ordering. +==== + +[[unwrapped-entities.queries.project]] +=== Field projection on unwrapped objects + +Fields of unwrapped objects can be subject for projection either as a whole or via single fields as shown in the samples below. + +.Project on unwrapped object. +==== +[source,java] +---- +Query findByUserLastName = query(where("name.firstname").is("Gamora")); +findByUserLastName.fields().include("name"); <1> +List user = template.findAll(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Gamora" +}, +{ + "firstname" : 1, + "lastname" : 1 +}) +---- +<1> A field projection on an unwrapped object includes all of its properties. +==== + +.Project on a field of an unwrapped object. +==== +[source,java] +---- +Query findByUserLastName = query(where("name.lastname").is("Smoak")); +findByUserLastName.fields().include("name.firstname"); <1> +List user = template.findAll(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Smoak" +}, +{ + "firstname" : 1 +}) +---- +<1> A field projection on an unwrapped object includes all of its properties. +==== + +[[unwrapped-entities.queries.by-example]] +=== Query By Example on unwrapped object. + +Unwrapped objects can be used within an `Example` probe just as any other type. +Please review the xref:mongodb/template-query-operations.adoc#mongo.query-by-example[Query By Example] section, to learn more about this feature. + +[[unwrapped-entities.queries.repository]] +=== Repository Queries on unwrapped objects. + +The `Repository` abstraction allows deriving queries on fields of unwrapped objects as well as the entire object. + +.Repository queries on unwrapped objects. +==== +[source,java] +---- +interface UserRepository extends CrudRepository { + + List findByName(UserName username); <1> + + List findByNameFirstname(String firstname); <2> +} +---- +<1> Matches against all fields of the unwrapped object. +<2> Matches against the `firstname`. +==== + +[NOTE] +==== +Index creation for unwrapped objects is suspended even if the repository `create-query-indexes` namespace attribute is set to `true`. +==== + +[[unwrapped-entities.update]] +== Update on Unwrapped Objects + +Unwrapped objects can be updated as any other object that is part of the domain model. +The mapping layer takes care of flattening structures into their surroundings. +It is possible to update single attributes of the unwrapped object as well as the entire value as shown in the examples below. + +.Update a single field of an unwrapped object. +==== +[source,java] +---- +Update update = new Update().set("name.firstname", "Janet"); +template.update(User.class).matching(where("id").is("Wasp")) + .apply(update).first() +---- + +[source,json] +---- +db.collection.update({ + "_id" : "Wasp" +}, +{ + "$set" { "firstname" : "Janet" } +}, +{ ... } +) +---- +==== + +.Update an unwrapped object. +==== +[source,java] +---- +Update update = new Update().set("name", new Name("Janet", "van Dyne")); +template.update(User.class).matching(where("id").is("Wasp")) + .apply(update).first() +---- + +[source,json] +---- +db.collection.update({ + "_id" : "Wasp" +}, +{ + "$set" { + "firstname" : "Janet", + "lastname" : "van Dyne", + } +}, +{ ... } +) +---- +==== + +[[unwrapped-entities.aggregations]] +== Aggregations on Unwrapped Objects + +The xref:mongodb/aggregation-framework.adoc[Aggregation Framework] will attempt to map unwrapped values of typed aggregations. +Please make sure to work with the property path including the wrapper object when referencing one of its values. +Other than that no special action is required. + +[[unwrapped-entities.indexes]] +== Index on Unwrapped Objects + +It is possible to attach the `@Indexed` annotation to properties of an unwrapped type just as it is done with regular objects. +It is not possible to use `@Indexed` along with the `@Unwrapped` annotation on the owning property. + +==== +[source,java] +---- +public class User { + + @Id + private String userId; + + @Unwrapped(onEmpty = USE_NULL) + UserName name; <1> + + // Invalid -> InvalidDataAccessApiUsageException + @Indexed <2> + @Unwrapped(onEmpty = USE_Empty) + Address address; +} + +public class UserName { + + private String firstname; + + @Indexed + private String lastname; <1> +} +---- +<1> Index created for `lastname` in `users` collection. +<2> Invalid `@Indexed` usage along with `@Unwrapped` +==== + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc new file mode 100644 index 0000000000..14e866cf14 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc @@ -0,0 +1,313 @@ +[[mongo.encryption]] += Encryption + +Client Side Encryption is a feature that encrypts data in your application before it is sent to MongoDB. +We recommend you get familiar with the concepts, ideally from the https://www.mongodb.com/docs/manual/core/security-in-use-encryption/[MongoDB Documentation] to learn more about its capabilities and restrictions before you continue applying Encryption through Spring Data. + +[NOTE] +==== +Make sure to set the drivers `com.mongodb.AutoEncryptionSettings` to use client-side encryption. +MongoDB does not support encryption for all field types. +Specific data types require deterministic encryption to preserve equality comparison functionality. +==== + +== Client Side Field Level Encryption (CSFLE) + +Choosing CSFLE gives you full flexibility and allows you to use different keys for a single field, eg. in a one key per tenant scenario. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/csfle/[MongoDB CSFLE Documentation] before you continue reading. + +[[mongo.encryption.automatic]] +=== Automatic Encryption (CSFLE) + +MongoDB supports https://www.mongodb.com/docs/manual/core/csfle/[Client-Side Field Level Encryption] out of the box using the MongoDB driver with its Automatic Encryption feature. +Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. + +Please refer to the xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema] section for more information on defining a JSON Schema that holds encryption information. + +To make use of a the `MongoJsonSchema` it needs to be combined with `AutoEncryptionSettings` which can be done eg. via a `MongoClientSettingsBuilderCustomizer`. + +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) { + return (builder) -> { + + // ... keyVaultCollection, kmsProvider, ... + + MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); + MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + + AutoEncryptionSettings autoEncryptionSettings = AutoEncryptionSettings.builder() + .keyVaultNamespace(keyVaultCollection) + .kmsProviders(kmsProviders) + .extraOptions(extraOpts) + .schemaMap(Collections.singletonMap("db.patient", patientSchema.schemaDocument().toBsonDocument())) + .build(); + + builder.autoEncryptionSettings(autoEncryptionSettings); + }; +} +---- + +[[mongo.encryption.explicit]] +=== Explicit Encryption (CSFLE) + +Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks. +The `@ExplicitEncrypted` annotation is a combination of the `@Encrypted` annotation used for xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation] and a xref:mongodb/mapping/property-converters.adoc[Property Converter]. +In other words, `@ExplicitEncrypted` uses existing building blocks to combine them for simplified explicit encryption support. + +[NOTE] +==== +Fields annotated with `@ExplicitEncrypted` are always encrypted as whole. +Consider the following example: + +[source,java] +---- +@ExplicitEncrypted(…) +String simpleValue; <1> + +@ExplicitEncrypted(…) +Address address; <2> + +@ExplicitEncrypted(…) +List<...> list; <3> + +@ExplicitEncrypted(…) +Map<..., ...> mapOfString; <4> +---- + +<1> Encrypts the value of the simple type such as a `String` if not `null`. +<2> Encrypts the entire `Address` object and all its nested fields as `Document`. +To only encrypt parts of the `Address`, like `Address#street` the `street` field within `Address` needs to be annotated with `@ExplicitEncrypted`. +<3> ``Collection``-like fields are encrypted as single value and not per entry. +<4> ``Map``-like fields are encrypted as single value and not as a key/value entry. +==== + +Client-Side Field Level Encryption allows you to choose between a deterministic and a randomized algorithm. Depending on the https://www.mongodb.com/docs/v5.0/reference/security-client-side-automatic-json-schema/#std-label-field-level-encryption-json-schema/[chosen algorithm], https://www.mongodb.com/docs/manual/core/csfle/reference/supported-operations/[different operations] may be supported. +To pick a certain algorithm use `@ExplicitEncrypted(algorithm)`, see `EncryptionAlgorithms` for algorithm constants. +Please read the https://www.mongodb.com/docs/manual/core/csfle/fundamentals/encryption-algorithms[Encryption Types] manual for more information on algorithms and their usage. + +To perform the actual encryption we require a Data Encryption Key (DEK). +Please refer to the https://www.mongodb.com/docs/manual/core/csfle/quick-start/#create-a-data-encryption-key[MongoDB Documentation] for more information on how to set up key management and create a Data Encryption Key. +The DEK can be referenced directly via its `id` or a defined _alternative name_. +The `@EncryptedField` annotation only allows referencing a DEK via an alternative name. +It is possible to provide an `EncryptionKeyResolver`, which will be discussed later, to any DEK. + +.Reference the Data Encryption Key +==== +[source,java] +---- +@EncryptedField(algorithm=…, altKeyName = "secret-key") <1> +String ssn; +---- + +[source,java] +---- +@EncryptedField(algorithm=…, altKeyName = "/name") <2> +String ssn; +---- + +<1> Use the DEK stored with the alternative name `secret-key`. +<2> Uses a field reference that will read the actual field value and use that for key lookup. +Always requires the full document to be present for save operations. +Fields cannot be used in queries/aggregations. +==== + +By default, the `@ExplicitEncrypted(value=…)` attribute references a `MongoEncryptionConverter`. +It is possible to change the default implementation and exchange it with any `PropertyValueConverter` implementation by providing the according type reference. +To learn more about custom `PropertyValueConverters` and the required configuration, please refer to the xref:mongodb/mapping/property-converters.adoc[Property Converters - Mapping specific fields] section. + +[[mongo.encryption.queryable]] +== Queryable Encryption (QE) + +Choosing QE enables you to run different types of queries, like _range_ or _equality_, against encrypted fields. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/queryable-encryption/[MongoDB QE Documentation] before you continue reading to learn more about QE features and limitations. + +=== Collection Setup + +Queryable Encryption requires upfront declaration of certain aspects allowed within an actual query against an encrypted field. +The information covers the algorithm in use as well as allowed query types along with their attributes and must be provided when creating the collection. + +`MongoOperations#createCollection(...)` can be used to do the initial setup for collections utilizing QE. +The configuration for QE via Spring Data uses the same building blocks (a xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation]) as CSFLE, converting the schema/properties into the configuration format required by MongoDB. + +[tabs] +====== +Manual Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(options -> options + .queryable(encrypted(string("ssn")).algorithm("Indexed"), equality().contention(0)) + .queryable(encrypted(int32("age")).algorithm("Range"), range().contention(8).min(0).max(150)) + .queryable(encrypted(int64("address.sign")).algorithm("Range"), range().contention(2).min(-10L).max(10L)) +); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. +==== + +Derived Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class Patient { + + @Id String id; + + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) + String ssn; + + @RangeEncrypted(contentionFactor = 8, rangeOptions = "{ 'min' : 0, 'max' : 150 }") + Integer age; + + Address address; +} + +MongoJsonSchema patientSchema = MongoJsonSchemaCreator.create(mappingContext) + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(patientSchema); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. + +The `Queryable` annotation allows to define allowed query types for encrypted fields. +`@RangeEncrypted` is a combination of `@Encrypted` and `@Queryable` for fields allowing `range` queries. +It is possible to create custom annotations out of the provided ones. +==== + +MongoDB Collection Info:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="thrid"] +---- +{ + name: 'patient', + type: 'collection', + options: { + encryptedFields: { + escCollection: 'enxcol_.test.esc', + ecocCollection: 'enxcol_.test.ecoc', + fields: [ + { + keyId: ..., + path: 'ssn', + bsonType: 'string', + queries: [ { queryType: 'equality', contention: Long('0') } ] + }, + { + keyId: ..., + path: 'age', + bsonType: 'int', + queries: [ { queryType: 'range', contention: Long('8'), min: 0, max: 150 } ] + }, + { + keyId: ..., + path: 'address.sign', + bsonType: 'long', + queries: [ { queryType: 'range', contention: Long('2'), min: Long('-10'), max: Long('10') } ] + } + ] + } + } +} +---- +==== +====== + +[NOTE] +==== +- It is not possible to use both QE and CSFLE within the same collection. +- It is not possible to query a `range` indexed field with an `equality` operator. +- It is not possible to query an `equality` indexed field with a `range` operator. +- It is not possible to set `bypassAutoEncrytion(true)`. +- It is not possible to use self maintained encryption keys via `@Encrypted` in combination with Queryable Encryption. +- Contention is only optional on the server side, the clients requires you to set the value (Default us `8`). +- Additional options for eg. `min` and `max` need to match the actual field type. Make sure to use `$numberLong` etc. to ensure target types when parsing bson String. +- Queryable Encryption will an extra field `__safeContent__` to each of your documents. +Unless explicitly excluded the field will be loaded into memory when retrieving results. +==== + +[[mongo.encryption.queryable.automatic]] +=== Automatic Encryption (QE) + +MongoDB supports Queryable Encryption out of the box using the MongoDB driver with its Automatic Encryption feature. +Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. + +All you need to do is create the collection according to the MongoDB documentation. +You may utilize techniques to create the required configuration outlined in the section above. + +[[mongo.encryption.queryable.manual]] +=== Explicit Encryption (QE) + +Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks based on the meta information provided by annotation within the domain model. + +[NOTE] +==== +There is no official support for using Explicit Queryable Encryption. +The audacious user may combine `@Encrypted` and `@Queryable` with `@ValueConverter(MongoEncryptionConverter.class)` at their own risk. +==== + +[[mongo.encryption.explicit-setup]] +[[mongo.encryption.converter-setup]] +== MongoEncryptionConverter Setup + +The converter setup for `MongoEncryptionConverter` requires a few steps as several components are involved. +The bean setup consists of the following: + +1. The `ClientEncryption` engine +2. A `MongoEncryptionConverter` instance configured with `ClientEncryption` and a `EncryptionKeyResolver`. +3. A `PropertyValueConverterFactory` that uses the registered `MongoEncryptionConverter` bean. + +The `EncryptionKeyResolver` uses an `EncryptionContext` providing access to the property allowing for dynamic DEK resolution. + +.Sample MongoEncryptionConverter Configuration +==== +[source,java] +---- +class Config extends AbstractMongoClientConfiguration { + + @Autowired ApplicationContext appContext; + + @Bean + ClientEncryption clientEncryption() { <1> + ClientEncryptionSettings encryptionSettings = ClientEncryptionSettings.builder(); + // … + + return ClientEncryptions.create(encryptionSettings); + } + + @Bean + MongoEncryptionConverter encryptingConverter(ClientEncryption clientEncryption) { + + Encryption encryption = MongoClientEncryption.just(clientEncryption); + EncryptionKeyResolver keyResolver = EncryptionKeyResolver.annotated((ctx) -> …); <2> + + return new MongoEncryptionConverter(encryption, keyResolver); <3> + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter adapter) { + + adapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(appContext)); <4> + } +} +---- + +<1> Set up a `Encryption` engine using `com.mongodb.client.vault.ClientEncryption`. +The instance is stateful and must be closed after usage. +Spring takes care of this because `ClientEncryption` is ``Closeable``. +<2> Set up an annotation-based `EncryptionKeyResolver` to determine the `EncryptionKey` from annotations. +<3> Create the `MongoEncryptionConverter`. +<4> Enable for a `PropertyValueConverter` lookup from the `BeanFactory`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc new file mode 100644 index 0000000000..e8265b9837 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc @@ -0,0 +1,97 @@ +[[mongo.group]] += Group Operations + +As an alternative to using Map-Reduce to perform data aggregation, you can use the https://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group[`group` operation] which feels similar to using SQL's group by query style, so it may feel more approachable vs. using Map-Reduce. Using the group operations does have some limitations, for example it is not supported in a shared environment and it returns the full result set in a single BSON object, so the result should be small, less than 10,000 keys. + +Spring provides integration with MongoDB's group operation by providing methods on MongoOperations to simplify the creation and running of group operations. It can convert the results of the group operation to a POJO and also integrates with Spring's https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#resources[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files if often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. + +[[mongo.group.example]] +== Example Usage + +In order to understand how group operations work the following example is used, which is somewhat artificial. For a more realistic example consult the book 'MongoDB - The definitive guide'. A collection named `group_test_collection` created with the following rows. + +[source] +---- +{ "_id" : ObjectId("4ec1d25d41421e2015da64f1"), "x" : 1 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f2"), "x" : 1 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f3"), "x" : 2 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f4"), "x" : 3 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f5"), "x" : 3 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f6"), "x" : 3 } +---- + +We would like to group by the only field in each row, the `x` field and aggregate the number of times each specific value of `x` occurs. To do this we need to create an initial document that contains our count variable and also a reduce function which will increment it each time it is encountered. The Java code to run the group operation is shown below + +[source,java] +---- +GroupByResults results = mongoTemplate.group("group_test_collection", + GroupBy.key("x").initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"), + XObject.class); +---- + +The first argument is the name of the collection to run the group operation over, the second is a fluent API that specifies properties of the group operation via a `GroupBy` class. In this example we are using just the `intialDocument` and `reduceFunction` methods. You can also specify a key-function, as well as a finalizer as part of the fluent API. If you have multiple keys to group by, you can pass in a comma separated list of keys. + +The raw results of the group operation is a JSON document that looks like this + +[source] +---- +{ + "retval" : [ { "x" : 1.0 , "count" : 2.0} , + { "x" : 2.0 , "count" : 1.0} , + { "x" : 3.0 , "count" : 3.0} ] , + "count" : 6.0 , + "keys" : 3 , + "ok" : 1.0 +} +---- + +The document under the "retval" field is mapped onto the third argument in the group method, in this case XObject which is shown below. + +[source,java] +---- +public class XObject { + + private float x; + + private float count; + + + public float getX() { + return x; + } + + public void setX(float x) { + this.x = x; + } + + public float getCount() { + return count; + } + + public void setCount(float count) { + this.count = count; + } + + @Override + public String toString() { + return "XObject [x=" + x + " count = " + count + "]"; + } +} +---- + +You can also obtain the raw result as a `Document` by calling the method `getRawResults` on the `GroupByResults` class. + +There is an additional method overload of the group method on `MongoOperations` which lets you specify a `Criteria` object for selecting a subset of the rows. An example which uses a `Criteria` object, with some syntax sugar using static imports, as well as referencing a key-function and reduce function javascript files via a Spring Resource string is shown below. + +[source] +---- +import static org.springframework.data.mongodb.core.mapreduce.GroupBy.keyFunction; +import static org.springframework.data.mongodb.core.query.Criteria.where; + +GroupByResults results = mongoTemplate.group(where("x").gt(0), + "group_test_collection", + keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); +---- + +include:../:aggregation-framework.adoc[] + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc new file mode 100644 index 0000000000..bfccec44fa --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc @@ -0,0 +1,127 @@ +[[mongo.mapreduce]] += Map-Reduce Operations + +You can query MongoDB by using Map-Reduce, which is useful for batch processing, for data aggregation, and for when the query language does not fulfill your needs. + +Spring provides integration with MongoDB's Map-Reduce by providing methods on `MongoOperations` to simplify the creation and running of Map-Reduce operations.It can convert the results of a Map-Reduce operation to a POJO and integrates with Spring's link:{springDocsUrl}/core.html#resources[Resource abstraction].This lets you place your JavaScript files on the file system, classpath, HTTP server, or any other Spring Resource implementation and then reference the JavaScript resources through an easy URI style syntax -- for example, `classpath:reduce.js;`.Externalizing JavaScript code in files is often preferable to embedding them as Java strings in your code.Note that you can still pass JavaScript code as Java strings if you prefer. + +[[mongo.mapreduce.example]] +== Example Usage + +To understand how to perform Map-Reduce operations, we use an example from the book, _MongoDB - The Definitive Guide_ footnote:[Kristina Chodorow. _MongoDB - The Definitive Guide_. O'Reilly Media, 2013].In this example, we create three documents that have the values [a,b], [b,c], and [c,d], respectively.The values in each document are associated with the key, 'x', as the following example shows (assume these documents are in a collection named `jmr1`): + +[source] +---- +{ "_id" : ObjectId("4e5ff893c0277826074ec533"), "x" : [ "a", "b" ] } +{ "_id" : ObjectId("4e5ff893c0277826074ec534"), "x" : [ "b", "c" ] } +{ "_id" : ObjectId("4e5ff893c0277826074ec535"), "x" : [ "c", "d" ] } +---- + +The following map function counts the occurrence of each letter in the array for each document: + +[source,java] +---- +function () { + for (var i = 0; i < this.x.length; i++) { + emit(this.x[i], 1); + } +} +---- + +The follwing reduce function sums up the occurrence of each letter across all the documents: + +[source,java] +---- +function (key, values) { + var sum = 0; + for (var i = 0; i < values.length; i++) + sum += values[i]; + return sum; +} +---- + +Running the preceding functions result in the following collection: + +[source] +---- +{ "_id" : "a", "value" : 1 } +{ "_id" : "b", "value" : 2 } +{ "_id" : "c", "value" : 2 } +{ "_id" : "d", "value" : 1 } +---- + +Assuming that the map and reduce functions are located in `map.js` and `reduce.js` and bundled in your jar so they are available on the classpath, you can run a Map-Reduce operation as follows: + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); +for (ValueObject valueObject : results) { + System.out.println(valueObject); +} +---- + +The preceding exmaple produces the following output: + +[source] +---- +ValueObject [id=a, value=1.0] +ValueObject [id=b, value=2.0] +ValueObject [id=c, value=2.0] +ValueObject [id=d, value=1.0] +---- + +The `MapReduceResults` class implements `Iterable` and provides access to the raw output and timing and count statistics.The following listing shows the `ValueObject` class: + +[source,java] +---- +public class ValueObject { + + private String id; + private float value; + + public String getId() { + return id; + } + + public float getValue() { + return value; + } + + public void setValue(float value) { + this.value = value; + } + + @Override + public String toString() { + return "ValueObject [id=" + id + ", value=" + value + "]"; + } +} +---- + +By default, the output type of `INLINE` is used so that you need not specify an output collection.To specify additional Map-Reduce options, use an overloaded method that takes an additional `MapReduceOptions` argument.The class `MapReduceOptions` has a fluent API, so adding additional options can be done in a compact syntax.The following example sets the output collection to `jmr1_out` (note that setting only the output collection assumes a default output type of `REPLACE`): + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", + new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); +---- + +There is also a static import (`import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.options;`) that can be used to make the syntax slightly more compact, as the following example shows: + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", + options().outputCollection("jmr1_out"), ValueObject.class); +---- + +You can also specify a query to reduce the set of data that is fed into the Map-Reduce operation.The following example removes the document that contains [a,b] from consideration for Map-Reduce operations: + +[source,java] +---- +Query query = new Query(where("x").ne(new String[] { "a", "b" })); +MapReduceResults results = mongoOperations.mapReduce(query, "jmr1", "classpath:map.js", "classpath:reduce.js", + options().outputCollection("jmr1_out"), ValueObject.class); +---- + +Note that you can specify additional limit and sort values on the query, but you cannot skip values. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc new file mode 100644 index 0000000000..345b5dbb6c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc @@ -0,0 +1,122 @@ +[[mongo.search]] += MongoDB Search + +MongoDB enables users to do keyword or lexical search as well as vector search data using dedicated search indexes. + +[[mongo.search.vector]] +== Vector Search + +MongoDB Vector Search uses the `$vectorSearch` aggregation stage to run queries against specialized indexes. +Please refer to the MongoDB documentation to learn more about requirements and restrictions of `vectorSearch` indexes. + +[[mongo.search.vector.index]] +=== Managing Vector Indexes + +`SearchIndexOperationsProvider` implemented by `MongoTemplate` are the entrypoint to `SearchIndexOperations` offering various methods for managing vector indexes. + +The following snippet shows how to create a vector index for a collection + +.Create a Vector Index +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +VectorIndex index = new VectorIndex("vector_index") + .addVector("plotEmbedding"), vector -> vector.dimensions(1536).similarity(COSINE)) <1> + .addFilter("year"); <2> + +mongoTemplate.searchIndexOps(Movie.class) <3> + .createIndex(index); +---- +<1> A vector index may cover multiple vector embeddings that can be added via the `addVector` method. +<2> Vector indexes can contain additional fields to narrow down search results when running queries. +<3> Obtain `SearchIndexOperations` bound to the `Movie` type which is used for field name mapping. +==== + +Mongo Shell:: ++ +==== +[source,console,indent=0,subs="verbatim,quotes",role="secondary"] +---- +db.movie.createSearchIndex("movie", "vector_index", + { + "fields": [ + { + "type": "vector", + "numDimensions": 1536, + "path": "plot_embedding", <1> + "similarity": "cosine" + }, + { + "type": "filter", + "path": "year" + } + ] + } +) +---- +<1> Field name `plotEmbedding` got mapped to `plot_embedding` considering a `@Field(name = "...")` annotation. +==== +====== + +Once created, vector indexes are not immediately ready to use although the `exists` check returns `true`. +The actual status of a search index can be obtained via `SearchIndexOperations#status(...)`. +The `READY` state indicates the index is ready to accept queries. + +[[mongo.search.vector.query]] +=== Querying Vector Indexes + +Vector indexes can be queried by issuing an aggregation using a `VectorSearchOperation` via `MongoOperations` as shown in the following example + +.Query a Vector Index +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +VectorSearchOperation search = VectorSearchOperation.search("vector_index") <1> + .path("plotEmbedding") <2> + .vector( ... ) + .numCandidates(150) + .limit(10) + .withSearchScore("score"); <3> + +AggregationResults results = mongoTemplate + .aggregate(newAggregation(Movie.class, search), MovieWithSearchScore.class); +---- +<1> Provide the name of the vector index to query since a collection may hold multiple ones. +<2> The name of the path used for comparison. +<3> Optionally add the search score with given name to the result document. +==== + +Mongo Shell:: ++ +==== +[source,console,indent=0,subs="verbatim,quotes",role="secondary"] +---- +db.embedded_movies.aggregate([ + { + "$vectorSearch": { + "index": "vector_index", + "path": "plot_embedding", <1> + "queryVector": [ ... ], + "numCandidates": 150, + "limit": 10 + } + }, + { + "$addFields": { + "score": { $meta: "vectorSearchScore" } + } + } +]) +---- +<1> Field name `plotEmbedding` got mapped to `plot_embedding` considering a `@Field(name = "...")` annotation. +==== +====== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc new file mode 100644 index 0000000000..0cac130b63 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc @@ -0,0 +1,28 @@ +[[mongo.server-side-scripts]] += Script Operations + +[WARNING] +==== +https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the `eval` command used +by `ScriptOperations`. + +There is no replacement for the removed functionality. +==== + +MongoDB allows running JavaScript functions on the server by either directly sending the script or calling a stored one. `ScriptOperations` can be accessed through `MongoTemplate` and provides basic abstraction for `JavaScript` usage. The following example shows how to us the `ScriptOperations` class: + +==== +[source,java] +---- +ScriptOperations scriptOps = template.scriptOps(); + +ExecutableMongoScript echoScript = new ExecutableMongoScript("function(x) { return x; }"); +scriptOps.execute(echoScript, "directly execute script"); <1> + +scriptOps.register(new NamedMongoScript("echo", echoScript)); <2> +scriptOps.call("echo", "execute script via name"); <3> +---- +<1> Run the script directly without storing the function on server side. +<2> Store the script using 'echo' as its name. The given name identifies the script and allows calling it later. +<3> Run the script with name 'echo' using the provided parameters. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc new file mode 100644 index 0000000000..06b2a42dc8 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc @@ -0,0 +1,38 @@ +[[mongodb.repositories.misc.cdi-integration]] += CDI Integration + +Instances of the repository interfaces are usually created by a container, and Spring is the most natural choice when working with Spring Data. +As of version 1.3.0, Spring Data MongoDB ships with a custom CDI extension that lets you use the repository abstraction in CDI environments. +The extension is part of the JAR. +To activate it, drop the Spring Data MongoDB JAR into your classpath. +You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`, as the following example shows: + +[source,java] +---- +class MongoTemplateProducer { + + @Produces + @ApplicationScoped + public MongoOperations createMongoTemplate() { + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database"); + return new MongoTemplate(factory); + } +} +---- + +The Spring Data MongoDB CDI extension picks up the `MongoTemplate` available as a CDI bean and creates a proxy for a Spring Data repository whenever a bean of a repository type is requested by the container. +Thus, obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property, as the following example shows: + +[source,java] +---- +class RepositoryClient { + + @Inject + PersonRepository repository; + + public void businessMethod() { + List people = repository.findAll(); + } +} +---- diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc new file mode 100644 index 0000000000..3d195ca0a9 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc @@ -0,0 +1,100 @@ +[[mongodb.repositories.queries]] += MongoDB-specific Data Manipulation Methods + +Next to the xref:mongodb/repositories/query-methods.adoc[query methods] it is possible to update data with specialized methods. + +[[mongodb.repositories.queries.update]] +== Update Methods + +You can also use the keywords in the preceding table to create queries that identify matching documents for running updates on them. +The actual update action is defined by the `@Update` annotation on the method itself, as the following listing shows. +Note that the naming schema for derived queries starts with `find`. +Using `update` (as in `updateAllByLastname(...)`) is allowed only in combination with `@Query`. + +The update is applied to *all* matching documents and it is *not* possible to limit the scope by passing in a `Page` or by using any of the <>. +The return type can be either `void` or a _numeric_ type, such as `long`, to hold the number of modified documents. + +.Update Methods +==== +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + @Update("{ '$inc' : { 'visits' : 1 } }") + long findAndIncrementVisitsByLastname(String lastname); <1> + + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncrementVisitsByLastname(String lastname, int increment); <2> + + @Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + long findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); <3> + + @Update(pipeline = {"{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }"}) + void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); <4> + + @Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + long findAndPushShippingAddressByEmail(String email, Address address); <5> + + @Query("{ 'lastname' : ?0 }") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void updateAllByLastname(String lastname, int increment); <6> +} +---- + +<1> The filter query for the update is derived from the method name. +The update is "`as is`" and does not bind any parameters. +<2> The actual increment value is defined by the `increment` method argument that is bound to the `?1` placeholder. +<3> Use the Spring Expression Language (SpEL) for parameter binding. +<4> Use the `pipeline` attribute to issue xref:mongodb/template-crud-operations.adoc#mongo-template.aggregation-update[aggregation pipeline updates]. +<5> The update may contain complex objects. +<6> Combine a xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[string based query] with an update. +==== + +WARNING: Repository updates do not emit persistence nor mapping lifecycle events. + +[[mongodb.repositories.queries.delete]] +== Delete Methods + +The keywords in the preceding table can be used in conjunction with `delete…By` or `remove…By` to create queries that delete matching documents. + +.`Delete…By` Query +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + List deleteByLastname(String lastname); <1> + + Long deletePersonByLastname(String lastname); <2> + + @Nullable + Person deleteSingleByLastname(String lastname); <3> + + Optional deleteByBirthdate(Date birthdate); <4> +} +---- +<1> Using a return type of `List` retrieves and returns all matching documents before actually deleting them. +<2> A numeric return type directly removes the matching documents, returning the total number of documents removed. +<3> A single domain type result retrieves and removes the first matching document. +<4> Same as in 3 but wrapped in an `Optional` type. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + Flux deleteByLastname(String lastname); <1> + + Mono deletePersonByLastname(String lastname); <2> + + Mono deleteSingleByLastname(String lastname); <3> +} +---- +<1> Using a return type of `Flux` retrieves and returns all matching documents before actually deleting them. +<2> A numeric return type directly removes the matching documents, returning the total number of documents removed. +<3> A single domain type result retrieves and removes the first matching document. +====== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc new file mode 100644 index 0000000000..adb2392f04 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc @@ -0,0 +1,869 @@ +[[mongodb.repositories.queries]] += MongoDB-specific Query Methods + +Most of the data access operations you usually trigger on a repository result in a query being executed against the MongoDB databases. +Defining such a query is a matter of declaring a method on the repository interface, as the following example shows: + +.PersonRepository with query methods +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends PagingAndSortingRepository { + + List findByLastname(String lastname); <1> + + Page findByFirstname(String firstname, Pageable pageable); <2> + + Person findByShippingAddresses(Address address); <3> + + Person findFirstByLastname(String lastname); <4> + + Stream findAllBy(); <5> +} +---- +<1> The `findByLastname` method shows a query for all people with the given last name. +The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. +Thus, the method name results in a query expression of `{"lastname" : lastname}`. +<2> Applies pagination to a query. +You can equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and Spring Data automatically pages the query accordingly. +<3> Shows that you can query based on properties that are not primitive types. +Throws `IncorrectResultSizeDataAccessException` if more than one match is found. +<4> Uses the `First` keyword to restrict the query to only the first result. +Unlike <3>, this method does not throw an exception if more than one match is found. +<5> Uses a Java 8 `Stream` that reads and converts individual elements while iterating the stream. + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactivePersonRepository extends ReactiveSortingRepository { + + Flux findByFirstname(String firstname); <1> + + Flux findByFirstname(Publisher firstname); <2> + + Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3> + + Mono findByFirstnameAndLastname(String firstname, String lastname); <4> + + Mono findFirstByLastname(String lastname); <5> +} +---- +<1> The method shows a query for all people with the given `lastname`. The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. Thus, the method name results in a query expression of `{"lastname" : lastname}`. +<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`. +<3> Use `Pageable` to pass offset and sorting parameters to the database. +<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results. +<5> Unless <4>, the first entity is always emitted even if the query yields more result documents. + +WARNING: The `Page` return type (as in `Mono`) is not supported by reactive repositories. + +It is possible to use `Pageable` in derived finder methods, to pass on `sort`, `limit` and `offset` parameters to the query to reduce load and network traffic. +The returned `Flux` will only emit data within the declared range. + +[source,java] +---- +Pageable page = PageRequest.of(1, 10, Sort.by("lastname")); +Flux persons = repository.findByFirstnameOrderByLastname("luke", page); +---- +==== +====== + +NOTE: We do not support referring to parameters that are mapped as `DBRef` in the domain class. + +.Supported keywords for query methods +[%collapsible] +==== +[cols="1,2,3",options="header"] +|=== +| Keyword +| Sample +| Logical result + +| `After` +| `findByBirthdateAfter(Date date)` +| `{"birthdate" : {"$gt" : date}}` + +| `GreaterThan` +| `findByAgeGreaterThan(int age)` +| `{"age" : {"$gt" : age}}` + +| `GreaterThanEqual` +| `findByAgeGreaterThanEqual(int age)` +| `{"age" : {"$gte" : age}}` + +| `Before` +| `findByBirthdateBefore(Date date)` +| `{"birthdate" : {"$lt" : date}}` + +| `LessThan` +| `findByAgeLessThan(int age)` +| `{"age" : {"$lt" : age}}` + +| `LessThanEqual` +| `findByAgeLessThanEqual(int age)` +| `{"age" : {"$lte" : age}}` + +| `Between` +| `findByAgeBetween(int from, int to)` + +`findByAgeBetween(Range range)` +| `{"age" : {"$gt" : from, "$lt" : to}}` + +lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` + +| `In` +| `findByAgeIn(Collection ages)` +| `{"age" : {"$in" : [ages...]}}` + +| `NotIn` +| `findByAgeNotIn(Collection ages)` +| `{"age" : {"$nin" : [ages...]}}` + +| `IsNotNull`, `NotNull` +| `findByFirstnameNotNull()` +| `{"firstname" : {"$ne" : null}}` + +| `IsNull`, `Null` +| `findByFirstnameNull()` +| `{"firstname" : null}` + +| `Like`, `StartingWith`, `EndingWith` +| `findByFirstnameLike(String name)` +| `{"firstname" : name} (name as regex)` + +| `NotLike`, `IsNotLike` +| `findByFirstnameNotLike(String name)` +| `{"firstname" : { "$not" : name }} (name as regex)` + +| `Containing` on String +| `findByFirstnameContaining(String name)` +| `{"firstname" : name} (name as regex)` + +| `NotContaining` on String +| `findByFirstnameNotContaining(String name)` +| `{"firstname" : { "$not" : name}} (name as regex)` + +| `Containing` on Collection +| `findByAddressesContaining(Address address)` +| `{"addresses" : { "$in" : address}}` + +| `NotContaining` on Collection +| `findByAddressesNotContaining(Address address)` +| `{"addresses" : { "$not" : { "$in" : address}}}` + +| `Regex` +| `findByFirstnameRegex(String firstname)` +| `{"firstname" : {"$regex" : firstname }}` + +| `(No keyword)` +| `findByFirstname(String name)` +| `{"firstname" : name}` + +| `Not` +| `findByFirstnameNot(String name)` +| `{"firstname" : {"$ne" : name}}` + +| `Near` +| `findByLocationNear(Point point)` +| `{"location" : {"$near" : [x,y]}}` + +| `Near` +| `findByLocationNear(Point point, Distance max)` +| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}` + +| `Near` +| `findByLocationNear(Point point, Distance min, Distance max)` +| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}` + +| `Within` +| `findByLocationWithin(Circle circle)` +| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}` + +| `Within` +| `findByLocationWithin(Box box)` +| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}` + +| `IsTrue`, `True` +| `findByActiveIsTrue()` +| `{"active" : true}` + +| `IsFalse`, `False` +| `findByActiveIsFalse()` +| `{"active" : false}` + +| `Exists` +| `findByLocationExists(boolean exists)` +| `{"location" : {"$exists" : exists }}` + +| `IgnoreCase` +| `findByUsernameIgnoreCase(String username)` +| `{"username" : {"$regex" : "^username$", "$options" : "i" }}` +|=== +==== + +NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. + +[[mongodb.repositories.queries.geo-spatial]] +== Geo-spatial Queries + +As you saw in the preceding table of keywords, a few keywords trigger geo-spatial operations within a MongoDB query. +The `Near` keyword allows some further modification, as the next few examples show. + +The following example shows how to define a `near` query that finds all persons with a given distance of a given point: + +.Advanced `Near` queries +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} + List findByLocationNear(Point location, Distance distance); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface PersonRepository extends ReactiveMongoRepository { + + // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} + Flux findByLocationNear(Point location, Distance distance); +} +---- +====== + +Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. +If the `Distance` was set up containing a `Metric`, we transparently use `$nearSphere` instead of `$code`, as the following example shows: + +.Using `Distance` with `Metrics` +==== +[source,java] +---- +Point point = new Point(43.7, 48.8); +Distance distance = new Distance(200, Metrics.KILOMETERS); +… = repository.findByLocationNear(point, distance); +// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} +---- +==== + +NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. However, you can still pass in a `Pageable` argument to page results yourself. + +Using a `Distance` with a `Metric` causes a `$nearSphere` (instead of a plain `$near`) clause to be added. +Beyond that, the actual distance gets calculated according to the `Metrics` used. + +(Note that `Metric` does not refer to metric units of measure. +It could be miles rather than kilometers. +Rather, `metric` refers to the concept of a system of measurement, regardless of which system you use.) + +NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of the `$nearSphere` operator. + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + // {'geoNear' : 'location', 'near' : [x, y] } + GeoResults findByLocationNear(Point location); + + // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, + // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } + GeoResults findByLocationNear(Point location, Distance distance); + + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, + // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, + // 'spherical' : true } + GeoResults findByLocationNear(Point location, Distance min, Distance max); + + // {'geoNear' : 'location', 'near' : [x, y] } + GeoResults findByLocationNear(Point location); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface PersonRepository extends ReactiveMongoRepository { + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); + + // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, + // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } + Flux> findByLocationNear(Point location, Distance distance); + + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, + // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, + // 'spherical' : true } + Flux> findByLocationNear(Point location, Distance min, Distance max); + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); +} +---- +====== + +[[mongodb.repositories.queries.json-based]] +== JSON-based Query Methods and Field Restriction + +By adding the `org.springframework.data.mongodb.repository.Query` annotation to your repository query methods, you can specify a MongoDB JSON query string to use instead of having the query be derived from the method name, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{ 'firstname' : ?0 }") + List findByThePersonsFirstname(String firstname); + +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{ 'firstname' : ?0 }") + Flux findByThePersonsFirstname(String firstname); + +} +---- +====== + +The `?0` placeholder lets you substitute the value from the method arguments into the JSON query string. + +NOTE: `String` parameter values are escaped during the binding process, which means that it is not possible to add MongoDB specific operators through the argument. + +You can also use the filter property to restrict the set of properties that is mapped into the Java object, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") + List findByThePersonsFirstname(String firstname); + +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") + Flux findByThePersonsFirstname(String firstname); + +} +---- +====== + +The query in the preceding example returns only the `firstname`, `lastname` and `Id` properties of the `Person` objects. +The `age` property, a `java.lang.Integer`, is not set and its value is therefore null. + +[[mongodb.repositories.queries.json-spel]] +== JSON-based Queries with SpEL Expressions + +Query strings and field definitions can be used together with SpEL expressions to create dynamic queries at runtime. +SpEL expressions can provide predicate values and can be used to extend predicates with subdocuments. + +Expressions expose method arguments through an array that contains all the arguments. +The following query uses `[0]` +to declare the predicate value for `lastname` (which is equivalent to the `?0` parameter binding): + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{'lastname': ?#{[0]} }") + List findByQueryWithExpression(String param0); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{'lastname': ?#{[0]} }") + Flux findByQueryWithExpression(String param0); +} +---- +====== + +Expressions can be used to invoke functions, evaluate conditionals, and construct values. +SpEL expressions used in conjunction with JSON reveal a side-effect, because Map-like declarations inside of SpEL read like JSON, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}") + List findByQueryWithExpressionAndNestedObject(boolean param0, String param1); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}") + Flux findByQueryWithExpressionAndNestedObject(boolean param0, String param1); +} +---- +====== + +WARNING: SpEL in query strings can be a powerful way to enhance queries. +However, they can also accept a broad range of unwanted arguments. +Make sure to sanitize strings before passing them to the query to avoid creation of vulnerabilities or unwanted changes to your query. + +Expression support is extensible through the Query SPI: `EvaluationContextExtension` & `ReactiveEvaluationContextExtension` +The Query SPI can contribute properties and functions and can customize the root object. +Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built. +The following example shows how to use an evaluation context extension: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport { + + @Override + public String getExtensionId() { + return "security"; + } + + @Override + public Map getProperties() { + return Collections.singletonMap("principal", SecurityContextHolder.getCurrent().getPrincipal()); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class SampleEvaluationContextExtension implements ReactiveEvaluationContextExtension { + + @Override + public String getExtensionId() { + return "security"; + } + + @Override + public Mono getExtension() { + return Mono.just(new EvaluationContextExtensionSupport() { ... }); + } +} +---- +====== + +NOTE: Bootstrapping `MongoRepositoryFactory` yourself is not application context-aware and requires further configuration to pick up Query SPI extensions. + +NOTE: Reactive query methods can make use of `org.springframework.data.spel.spi.ReactiveEvaluationContextExtension`. + +[[mongodb.repositories.queries.full-text]] +== Full-text Search Queries + +MongoDB's full-text search feature is store-specific and, therefore, can be found on `MongoRepository` rather than on the more general `CrudRepository`. +We need a document with a full-text index (see "`xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Indexes]`" to learn how to create a full-text index). + +Additional methods on `MongoRepository` take `TextCriteria` as an input parameter. +In addition to those explicit methods, it is also possible to add a `TextCriteria`-derived repository method. +The criteria are added as an additional `AND` criteria. +Once the entity contains a `@TextScore`-annotated property, the document's full-text score can be retrieved. +Furthermore, the `@TextScore` annotated also makes it possible to sort by the document's score, as the following example shows: + +[source,java] +---- +@Document +class FullTextDocument { + + @Id String id; + @TextIndexed String title; + @TextIndexed String content; + @TextScore Float score; +} + +interface FullTextRepository extends Repository { + + // Execute a full-text search and define sorting dynamically + List findAllBy(TextCriteria criteria, Sort sort); + + // Paginate over a full-text search result + Page findAllBy(TextCriteria criteria, Pageable pageable); + + // Combine a derived query with a full-text search + List findByTitleOrderByScoreDesc(String title, TextCriteria criteria); +} + + +Sort sort = Sort.by("score"); +TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data"); +List result = repository.findAllBy(criteria, sort); + +criteria = TextCriteria.forDefaultLanguage().matching("film"); +Page page = repository.findAllBy(criteria, PageRequest.of(1, 1, sort)); +List result = repository.findByTitleOrderByScoreDesc("mongodb", criteria); +---- + +[[mongodb.repositories.queries.aggregation]] +== Aggregation Methods + +The repository layer offers means to interact with xref:mongodb/aggregation-framework.adoc[the aggregation framework] via annotated repository query methods. +Similar to the xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[JSON based queries], you can define a pipeline using the `org.springframework.data.mongodb.repository.Aggregation` annotation. +The definition may contain simple placeholders like `?0` as well as link:{springDocsUrl}/core.html#expressions[SpEL expressions] `?#{ … }`. + +.Aggregating Repository Method +==== +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); <1> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(Sort sort); <2> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") + List groupByLastnameAnd(String property); <3> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") + Slice groupByLastnameAnd(String property, Pageable page); <4> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + Stream groupByLastnameAndFirstnamesAsStream(); <5> + + @Aggregation(pipeline = { + "{ '$match' : { 'lastname' : '?0'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" + }) + Stream groupByLastnameAndFirstnamesAsStream(); <6> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + SumValue sumAgeUsingValueWrapper(); <7> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + Long sumAge(); <8> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + AggregationResults sumAgeRaw(); <9> + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + List findAllLastnames(); <10> + + @Aggregation(pipeline = { + "{ $group : { _id : '$author', books: { $push: '$title' } } }", + "{ $out : 'authors' }" + }) + void groupAndOutSkippingOutput(); <11> +} +---- +[source,java] +---- +public class PersonAggregate { + + private @Id String lastname; <2> + private List names; + + public PersonAggregate(String lastname, List names) { + // ... + } + + // Getter / Setter omitted +} + +public class SumValue { + + private final Long total; <6> <8> + + public SumValue(Long total) { + // ... + } + + // Getter omitted +} + +interface PersonProjection { + String getFirstname(); + String getLastname(); +} +---- +<1> Aggregation pipeline to group first names by `lastname` in the `Person` collection returning these as `PersonAggregate`. +<2> If `Sort` argument is present, `$sort` is appended after the declared pipeline stages so that it only affects the order of the final results after having passed all other aggregation stages. +Therefore, the `Sort` properties are mapped against the methods return type `PersonAggregate` which turns `Sort.by("lastname")` into `{ $sort : { '_id', 1 } }` because `PersonAggregate.lastname` is annotated with `@Id`. +<3> Replaces `?0` with the given value for `property` for a dynamic aggregation pipeline. +<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination. +<5> Aggregation methods can return interface based projections wrapping the resulting `org.bson.Document` behind a proxy, exposing getters delegating to fields within the document. +<6> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`. +<7> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type. +<8> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`. +To gain more control, you might consider `AggregationResult` as method return type as shown in <7>. +<9> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`. +<10> Like in <6>, a single value can be directly obtained from multiple result ``Document``s. +<11> Skips the output of the `$out` stage when return type is `void`. +==== + +In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk. +Use the `@Meta` annotation to set those options via `maxExecutionTimeMs`, `comment` or `allowDiskUse`. + +[source,java] +---- +interface PersonRepository extends CrudRepository { + + @Meta(allowDiskUse = true) + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); +} +---- + +Or use `@Meta` to create your own annotation as shown in the sample below. + +[source,java] +---- +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD }) +@Meta(allowDiskUse = true) +@interface AllowDiskUse { } + +interface PersonRepository extends CrudRepository { + + @AllowDiskUse + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); +} +---- + +[NOTE] +==== +Simple-type single-result inspects the returned `Document` and checks for the following: + +. Only one entry in the document, return it. +. Two entries, one is the `_id` value. Return the other. +. Return for the first value assignable to the return type. +. Throw an exception if none of the above is applicable. +==== + +WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However, you can use a +`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline and let the method return `Slice`. + +[[mongodb.repositories.queries.by-example]] +include::../../repositories/query-by-example.adoc[leveloffset=+1] + +[[mongodb.repositories.queries.scroll]] +include::{commons}@data-commons::page$repositories/scrolling.adoc[leveloffset=+1] + +[[mongodb.repositories.queries.sort]] +== Sorting Results + +MongoDB repositories allow various approaches to define sorting order. +Let's take a look at the following example: + +.Sorting Query Results +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + List findByFirstnameSortByAgeDesc(String firstname); <1> + + List findByFirstname(String firstname, Sort sort); <2> + + @Query(sort = "{ age : -1 }") + List findByFirstname(String firstname); <3> + + @Query(sort = "{ age : -1 }") + List findByLastname(String lastname, Sort sort); <4> +} +---- +<1> Static sorting derived from method name. `SortByAgeDesc` results in `{ age : -1 }` for the sort parameter. +<2> Dynamic sorting using a method argument. +`Sort.by(DESC, "age")` creates `{ age : -1 }` for the sort parameter. +<3> Static sorting via `Query` annotation. +Sort parameter applied as stated in the `sort` attribute. +<4> Default sorting via `Query` annotation combined with dynamic one via a method argument. `Sort.unsorted()` +results in `{ age : -1 }`. +Using `Sort.by(ASC, "age")` overrides the defaults and creates `{ age : 1 }`. +`Sort.by +(ASC, "firstname")` alters the default and results in `{ age : -1, firstname : 1 }`. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + Flux findByFirstnameSortByAgeDesc(String firstname); + + Flux findByFirstname(String firstname, Sort sort); + + @Query(sort = "{ age : -1 }") + Flux findByFirstname(String firstname); + + @Query(sort = "{ age : -1 }") + Flux findByLastname(String lastname, Sort sort); +} +---- +====== + +[[mongodb.repositories.index-hint]] +== Index Hints + +The `@Hint` annotation allows to override MongoDB's default index selection and forces the database to use the specified index instead. + +.Example of index hints +==== +[source,java] +---- +@Hint("lastname-idx") <1> +List findByLastname(String lastname); + +@Query(value = "{ 'firstname' : ?0 }", hint = "firstname-idx") <2> +List findByFirstname(String firstname); +---- +<1> Use the index with name `lastname-idx`. +<2> The `@Query` annotation defines the `hint` alias which is equivalent to adding the `@Hint` annotation. +==== + +For more information about index creation please refer to the xref:mongodb/template-collection-management.adoc[Collection Management] section. + +[[mongo.repositories.collation]] +== Collation Support + +Next to the xref:mongodb/collation.adoc[general Collation Support] repositories allow to define the collation for various operations. + +==== +[source,java] +---- +public interface PersonRepository extends MongoRepository { + + @Query(collation = "en_US") <1> + List findByFirstname(String firstname); + + @Query(collation = "{ 'locale' : 'en_US' }") <2> + List findPersonByFirstname(String firstname); + + @Query(collation = "?1") <3> + List findByFirstname(String firstname, Object collation); + + @Query(collation = "{ 'locale' : '?1' }") <4> + List findByFirstname(String firstname, String collation); + + List findByFirstname(String firstname, Collation collation); <5> + + @Query(collation = "{ 'locale' : 'en_US' }") + List findByFirstname(String firstname, @Nullable Collation collation); <6> +} +---- +<1> Static collation definition resulting in `{ 'locale' : 'en_US' }`. +<2> Static collation definition resulting in `{ 'locale' : 'en_US' }`. +<3> Dynamic collation depending on 2nd method argument. Allowed types include `String` (eg. 'en_US'), `Locacle` (eg. Locacle.US) +and `Document` (eg. new Document("locale", "en_US")) +<4> Dynamic collation depending on 2nd method argument. +<5> Apply the `Collation` method parameter to the query. +<6> The `Collation` method parameter overrides the default `collation` from `@Query` if not null. + +NOTE: In case you enabled the automatic index creation for repository finder methods a potential static collation definition, +as shown in (1) and (2), will be included when creating the index. + +TIP: The most specifc `Collation` outrules potentially defined others. Which means Method argument over query method annotation over domain type annotation. +==== + +To streamline usage of collation attributes throughout the codebase it is also possible to use the `@Collation` annotation, which serves as a meta annotation for the ones mentioned above. +The same rules and locations apply, plus, direct usage of `@Collation` supersedes any collation values defined on `@Query` and other annotations. +Which means, if a collation is declared via `@Query` and additionally via `@Collation`, then the one from `@Collation` is picked. + +.Using `@Collation` +==== +[source,java] +---- +@Collation("en_US") <1> +class Game { + // ... +} + +interface GameRepository extends Repository { + + @Collation("en_GB") <2> + List findByTitle(String title); + + @Collation("de_AT") <3> + @Query(collation="en_GB") + List findByDescriptionContaining(String keyword); +} +---- +<1> Instead of `@Document(collation=...)`. +<2> Instead of `@Query(collation=...)`. +<3> Favors `@Collation` over meta usage. +==== + +== Read Preferences + +The `@ReadPreference` annotation allows you to configure MongoDB's ReadPreferences. + +.Example of read preferences +==== +[source,java] +---- + +@ReadPreference("primaryPreferred") <1> +public interface PersonRepository extends CrudRepository { + + @ReadPreference("secondaryPreferred") <2> + List findWithReadPreferenceAnnotationByLastname(String lastname); + + @Query(readPreference = "nearest") <3> + List findWithReadPreferenceAtTagByFirstname(String firstname); + + List findWithReadPreferenceAtTagByFirstname(String firstname); <4> +---- +<1> Configure read preference for all repository operations (including inherited, non custom implementation ones) that do not have a query-level definition. Therefore, in this case the read preference mode will be `primaryPreferred` +<2> Use the read preference mode defined in annotation `ReadPreference`, in this case secondaryPreferred +<3> The `@Query` annotation defines the `read preference mode` alias which is equivalent to adding the `@ReadPreference` annotation. +<4> This query will use the read preference mode defined in the repository. +==== + +[TIP] +==== +The `MongoOperations` and `Query` API offer more fine grained control for `ReadPreference`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc new file mode 100644 index 0000000000..0746a0909e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc @@ -0,0 +1,213 @@ +[[mongo.repositories]] += MongoDB Repositories + +[[mongo-repo-intro]] +This chapter points out the specialties for repository support for MongoDB. +This chapter builds on the core repository support explained in xref:repositories/core-concepts.adoc[core concepts]. +You should have a sound understanding of the basic concepts explained there. + +[[mongo-repo-usage]] +== Usage + +To access domain entities stored in a MongoDB, you can use our sophisticated repository support that eases implementation quite significantly. +To do so, create an interface for your repository, as the following example shows: + +.Sample Person entity +==== +[source,java] +---- +public class Person { + + @Id + private String id; + private String firstname; + private String lastname; + private Address address; + + // … getters and setters omitted +} +---- +==== + +Note that the domain type shown in the preceding example has a property named `id` of type `String`.The default serialization mechanism used in `MongoTemplate` (which backs the repository support) regards properties named `id` as the document ID. +Currently, we support `String`, `ObjectId`, and `BigInteger` as ID types. +Please see xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[ID mapping] for more information about on how the `id` field is handled in the mapping layer. + +Now that we have a domain object, we can define an interface that uses it, as follows: + +.Basic repository interface to persist Person entities +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends PagingAndSortingRepository { + + // additional custom query methods go here +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveSortingRepository { + + // additional custom query methods go here +} +---- +====== + +To start using the repository, use the `@EnableMongoRepositories` annotation. +That annotation carries the same attributes as the namespace element. +If no base package is configured, the infrastructure scans the package of the annotated configuration class. +The following example shows how to configuration your application to use MongoDB repositories: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@EnableMongoRepositories("com.acme.*.repositories") +class ApplicationConfig extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "e-store"; + } + + @Override + protected String getMappingBasePackage() { + return "com.acme.*.repositories"; + } +} +---- + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +@EnableReactiveMongoRepositories("com.acme.*.repositories") +class ApplicationConfig extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "e-store"; + } + + @Override + protected String getMappingBasePackage() { + return "com.acme.*.repositories"; + } +} +---- + +NOTE: MongoDB uses two different drivers for imperative (synchronous/blocking) and reactive (non-blocking) data access. You must create a connection by using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support. Consequently, you must provide a separate configuration for MongoDB's Reactive Streams driver. Note that your application operates on two different connections if you use reactive and blocking Spring Data MongoDB templates and repositories. +==== + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + + + + + + + + +---- +====== + +This namespace element causes the base packages to be scanned for interfaces that extend `MongoRepository` and create Spring beans for each one found. +By default, the repositories get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention. + +Because our domain repository extends `PagingAndSortingRepository`, it provides you with methods for paginated and sorted access to the entities. +In the case of reactive repositories only `ReactiveSortingRepository` is available since the notion of a `Page` is not applicable. +However finder methods still accept a `Sort` and `Limit` parameter. + +[NOTE] +==== +The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor]. + +Spring Data MongoDB is built on top of the https://mongodb.github.io/mongo-java-driver-reactivestreams/[MongoDB Reactive Streams] driver, to provide maximal interoperability by relying on the https://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs, such as `ReactiveMongoOperations`, are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa), but conversion can easily clutter your code. + +Spring Data's Reactive Repository abstraction is a dynamic API, mostly defined by you and your requirements as you declare query methods. Reactive MongoDB repositories can be implemented by using either RxJava or Project Reactor wrapper types by extending from one of the following library-specific repository interfaces: + +* `ReactiveCrudRepository` +* `ReactiveSortingRepository` +* `RxJava3CrudRepository` +* `RxJava3SortingRepository` + +Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library. +==== + +In case you want to obtain methods for basic CRUD operations also add the `CrudRepository` interface. +Working with the repository instance is just a matter of dependency injecting it into a client . +Consequently, accessing the second page of `Person` objects at a page size of 10 would resemble the following code: + +.Paging access to Person entities +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@ExtendWith(SpringExtension.class) +@ContextConfiguration +class PersonRepositoryTests { + + @Autowired PersonRepository repository; + + @Test + void readsFirstPageCorrectly() { + + Page persons = repository.findAll(PageRequest.of(0, 10)); + assertThat(persons.isFirstPage()).isTrue(); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@ExtendWith(SpringExtension.class) +@ContextConfiguration +class PersonRepositoryTests { + + @Autowired PersonRepository repository; + + @Test + void readsFirstPageCorrectly() { + + Flux persons = repository.findAll(Sort.unsorted(), Limit.of(10)); + + persons.as(StepVerifer::create) + .expectNextCount(10) + .verifyComplete(); + } +} +---- +====== + +The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases. +Inside the test method, we use the repository to query the datastore. +We hand the repository a `PageRequest` instance that requests the first page of `Person` objects at a page size of 10. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc b/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc new file mode 100644 index 0000000000..8678dc2178 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc @@ -0,0 +1,76 @@ +[[sharding]] += Sharding + +MongoDB supports large data sets via sharding, a method for distributing data across multiple database servers. +Please refer to the https://docs.mongodb.com/manual/sharding/[MongoDB Documentation] to learn how to set up a sharded cluster, its requirements and limitations. + +Spring Data MongoDB uses the `@Sharded` annotation to identify entities stored in sharded collections as shown below. + +==== +[source,java] +---- +@Document("users") +@Sharded(shardKey = { "country", "userId" }) <1> +public class User { + + @Id + Long id; + + @Field("userid") + String userId; + + String country; +} +---- +<1> The properties of the shard key get mapped to the actual field names. +==== + +[[sharding.sharded-collections]] +== Sharded Collections + +Spring Data MongoDB does not auto set up sharding for collections nor indexes required for it. +The snippet below shows how to do so using the MongoDB client API. + +==== +[source,java] +---- +MongoDatabase adminDB = template.getMongoDbFactory() + .getMongoDatabase("admin"); <1> + +adminDB.runCommand(new Document("enableSharding", "db")); <2> + +Document shardCmd = new Document("shardCollection", "db.users") <3> + .append("key", new Document("country", 1).append("userid", 1)); <4> + +adminDB.runCommand(shardCmd); +---- +<1> Sharding commands need to be run against the _admin_ database. +<2> Enable sharding for a specific database if necessary. +<3> Shard a collection within the database having sharding enabled. +<4> Specify the shard key. +This example uses range based sharding. +==== + +[[sharding.shard-key]] +== Shard Key Handling + +The shard key consists of a single or multiple properties that must exist in every document in the target collection. +It is used to distribute documents across shards. + +Adding the `@Sharded` annotation to an entity enables Spring Data MongoDB to apply best effort optimisations required for sharded scenarios. +This means essentially adding required shard key information, if not already present, to `replaceOne` filter queries when upserting entities. +This may require an additional server round trip to determine the actual value of the current shard key. + +TIP: By setting `@Sharded(immutableKey = true)` Spring Data does not attempt to check if an entity shard key was changed. + +Please see the https://docs.mongodb.com/manual/reference/method/db.collection.replaceOne/#upsert[MongoDB Documentation] for further details. +The following list contains which operations are eligible for shard key auto-inclusion: + +* `(Reactive)CrudRepository.save(…)` +* `(Reactive)CrudRepository.saveAll(…)` +* `(Reactive)MongoTemplate.save(…)` + + + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc b/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc new file mode 100644 index 0000000000..97433e1416 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc @@ -0,0 +1,101 @@ +// carry over the old bookmarks to prevent external links from failing +[[tailable-cursors]] += Tailable Cursors + +By default, MongoDB automatically closes a cursor when the client exhausts all results supplied by the cursor. +Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections], +you can use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client +consumed all initially returned data. + +TIP: Capped collections can be created with `MongoOperations.createCollection`. To do so, provide the required `CollectionOptions.empty().capped()...`. + +Tailable cursors can be consumed with both, the imperative and the reactive MongoDB API. It is highly recommended to use the +reactive variant, as it is less resource-intensive. However, if you cannot use the reactive API, you can still use a messaging +concept that is already prevalent in the Spring ecosystem. + +[[tailable-cursors.sync]] +== Tailable Cursors with `MessageListener` + +Listening to a capped collection using a Sync Driver creates a long running, blocking task that needs to be delegated to +a separate component. In this case, we need to first create a `MessageListenerContainer`, which will be the main entry point +for running the specific `SubscriptionRequest`. Spring Data MongoDB already ships with a default implementation that +operates on `MongoTemplate` and is capable of creating and running `Task` instances for a `TailableCursorRequest`. + +The following example shows how to use tailable cursors with `MessageListener` instances: + +.Tailable Cursors with `MessageListener` instances +==== +[source,java] +---- +MessageListenerContainer container = new DefaultMessageListenerContainer(template); +container.start(); <1> + +MessageListener listener = System.out::println; <2> + +TailableCursorRequest request = TailableCursorRequest.builder() + .collection("orders") <3> + .filter(query(where("value").lt(100))) <4> + .publishTo(listener) <5> + .build(); + +container.register(request, User.class); <6> + +// ... + +container.stop(); <7> +---- +<1> Starting the container intializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately. +<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion. +<3> Set the collection to listen to. +<4> Provide an optional filter for documents to receive. +<5> Set the message listener to publish incoming ``Message``s to. +<6> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel it to free resources. +<7> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container. +==== + +[[tailable-cursors.reactive]] +== Reactive Tailable Cursors + +Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection. + +Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "`end`" of the collection and the application then deletes that document. The following example shows how to create and use an infinite stream query: + +.Infinite Stream queries with ReactiveMongoOperations +==== +[source,java] +---- +Flux stream = template.tail(query(where("name").is("Joe")), Person.class); + +Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe(); + +// … + +// Later: Dispose the subscription to close the stream +subscription.dispose(); +---- +==== + +Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods that return `Flux` and other reactive types capable of emitting multiple elements, as the following example shows: + +.Infinite Stream queries with ReactiveMongoRepository +==== +[source,java] +---- + +public interface PersonRepository extends ReactiveMongoRepository { + + @Tailable + Flux findByFirstname(String firstname); + +} + +Flux stream = repository.findByFirstname("Joe"); + +Disposable subscription = stream.doOnNext(System.out::println).subscribe(); + +// … + +// Later: Dispose the subscription to close the stream +subscription.dispose(); +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc new file mode 100644 index 0000000000..f2a7a19bd6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc @@ -0,0 +1,181 @@ +[[mongo-template]] += Template API + +The javadoc:org.springframework.data.mongodb.core.MongoTemplate[] and its javadoc:org.springframework.data.mongodb.core.ReactiveMongoTemplate[reactive] counterpart class, located in the `org.springframework.data.mongodb.core` package, is the central class of Spring's MongoDB support and provides a rich feature set for interacting with the database. +The template offers convenience operations to create, update, delete, and query MongoDB documents and provides a mapping between your domain objects and MongoDB documents. + +NOTE: Once configured, `MongoTemplate` is thread-safe and can be reused across multiple instances. + +[[mongo-template.convenience-methods]] +== Convenience Methods + +The javadoc:org.springframework.data.mongodb.core.MongoTemplate[] class implements the interface javadoc:org.springframework.data.mongodb.core.MongoOperations[]. +In as much as possible, the methods on `MongoOperations` are named after methods available on the MongoDB driver `Collection` object, to make the API familiar to existing MongoDB developers who are used to the driver API. +For example, you can find methods such as `find`, `findAndModify`, `findAndReplace`, `findOne`, `insert`, `remove`, `save`, `update`, and `updateMulti`. +The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `MongoOperations`. +A major difference between the two APIs is that `MongoOperations` can be passed domain objects instead of `Document`. +Also, `MongoOperations` has fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations. + +For more information please refer to the xref:mongodb/template-crud-operations.adoc[CRUD] and xref:mongodb/template-query-operations.adoc[Query] sections of the documentation. + +NOTE: The preferred way to reference the operations on `MongoTemplate` instance is through its interface, `MongoOperations`. + +[[mongo-template.execute-callbacks]] +== Execute Callbacks + +`MongoTemplate` offers many convenience methods to help you easily perform common tasks. +However, if you need to directly access the MongoDB driver API, you can use one of several `Execute` callback methods. +The `execute` callbacks gives you a reference to either a `MongoCollection` or a `MongoDatabase` object. + +* ` T` *execute* `(Class entityClass, CollectionCallback action)`: Runs the given `CollectionCallback` for the entity collection of the specified class. + +* ` T` *execute* `(String collectionName, CollectionCallback action)`: Runs the given `CollectionCallback` on the collection of the given name. + +* ` T` *execute* `(DbCallback action)`: Runs a DbCallback, translating any exceptions as necessary. +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +* ` T` *execute* `(String collectionName, DbCallback action)`: Runs a `DbCallback` on the collection of the given name translating any exceptions as necessary. + +* ` T` *executeInSession* `(DbCallback action)`: Runs the given `DbCallback` within the same connection to the database so as to ensure consistency in a write-heavy environment where you may read the data that you wrote. + +The following example uses the javadoc:org.springframework.data.mongodb.core.CollectionCallback[] to return information about an index: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +boolean hasIndex = template.execute("geolocation", collection -> + Streamable.of(collection.listIndexes(org.bson.Document.class)) + .stream() + .map(document -> document.get("name")) + .anyMatch("location_2d"::equals) +); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono hasIndex = template.execute("geolocation", collection -> + Flux.from(collection.listIndexes(org.bson.Document.class)) + .map(document -> document.get("name")) + .filterWhen(name -> Mono.just("location_2d".equals(name))) + .map(it -> Boolean.TRUE) + .single(Boolean.FALSE) + ).next(); +---- +====== + +[[mongo-template.fluent-api]] +== Fluent API + +Being the central component when it comes to more low-level interaction with MongoDB `MongoTemplate` offers a wide range of methods covering needs from collection creation, index creation, and CRUD operations to more advanced functionality, such as Map-Reduce and aggregations. +You can find multiple overloads for each method. +Most of them cover optional or nullable parts of the API. + +`FluentMongoOperations` provides a more narrow interface for the common methods of `MongoOperations` and provides a more readable, fluent API. +The entry points (`insert(…)`, `find(…)`, `update(…)`, and others) follow a natural naming schema based on the operation to be run. +Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that invokes the actual `MongoOperations` counterpart -- the `all` method in the case of the following example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +List all = template.query(SWCharacter.class) <1> + .inCollection("star-wars") <2> + .as(Jedi.class) <3> + .matching(query(where("jedi").is(true))) <4> + .all(); +---- + +<1> The type used to map fields used in the query to. +<2> The collection name to use if not defined on the domain type. +<3> Result type if not using the original domain type. +<4> The lookup query. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux all = template.query(SWCharacter.class) + .inCollection("star-wars") + .as(Jedi.class) + .matching(query(where("jedi").is(true))) + .all(); +---- +====== + +NOTE: Using projections allows `MongoTemplate` to optimize result mapping by limiting the actual response to fields required by the projection target type. +This applies as long as the javadoc:org.springframework.data.mongodb.core.query.Query[] itself does not contain any field restriction and the target type is a closed interface or DTO projection. + +WARNING: Projections must not be applied to xref:mongodb/mapping/document-references.adoc[DBRefs]. + +You can switch between retrieving a single entity and retrieving multiple entities as a `List` or a `Stream` through the terminating methods: `first()`, `one()`, `all()`, or `stream()`. + +When writing a geo-spatial query with `near(NearQuery)`, the number of terminating methods is altered to include only the methods that are valid for running a `geoNear` command in MongoDB (fetching entities as a `GeoResult` within `GeoResults`), as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +GeoResults results = template.query(SWCharacter.class) + .as(Jedi.class) + .near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis… + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux> results = template.query(SWCharacter.class) + .as(Jedi.class) + .near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis… + .all(); +---- +====== + +[[mongo-template.exception-translation]] +== Exception Translation + +The Spring framework provides exception translation for a wide variety of database and mapping technologies. +This has traditionally been for JDBC and JPA. +The Spring support for MongoDB extends this feature to the MongoDB Database by providing an implementation of the `org.springframework.dao.support.PersistenceExceptionTranslator` interface. + +The motivation behind mapping to Spring's link:{springDocsUrl}/data-access.html#dao-exceptions[consistent data access exception hierarchy] is that you are then able to write portable and descriptive exception handling code without resorting to coding against MongoDB error codes. +All of Spring's data access exceptions are inherited from the root `DataAccessException` class so that you can be sure to catch all database related exception within a single try-catch block. +Note that not all exceptions thrown by the MongoDB driver inherit from the `MongoException` class. +The inner exception and message are preserved so that no information is lost. + +Some of the mappings performed by the javadoc:org.springframework.data.mongodb.core.MongoExceptionTranslator[] are `com.mongodb.Network` to `DataAccessResourceFailureException` and `MongoException` error codes 1003, 12001, 12010, 12011, and 12012 to `InvalidDataAccessApiUsageException`. +Look into the implementation for more details on the mapping. + +Exception Translation can be configured by setting a customized javadoc:org.springframework.data.mongodb.core.MongoExceptionTranslator[] on your `MongoDatabaseFactory` or its reactive variant. +You might also want to set the exception translator on the corresponding `MongoClientFactoryBean`. + +.Configuring `MongoExceptionTranslator` +==== +[source,java] +---- +ConnectionString uri = new ConnectionString("mongodb://username:password@localhost/database"); +SimpleMongoClientDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(uri); +mongoDbFactory.setExceptionTranslator(myCustomExceptionTranslator); +---- +==== + +A motivation to customize exception can be MongoDB's behavior during transactions where some failures (such as write conflicts) can become transient and where a retry could lead to a successful operation. +In such a case, you could wrap exceptions with a specific MongoDB label and apply a different exception translation stragegy. + +[[mongo-template.type-mapping]] +== Domain Type Mapping + +The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the javadoc:org.springframework.data.mongodb.core.convert.MongoConverter[] interface. +Spring provides javadoc:org.springframework.data.mongodb.core.convert.MappingMongoConverter[], but you can also write your own converter. +While the `MappingMongoConverter` can use additional metadata to specify the mapping of objects to documents, it can also convert objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. +These conventions, as well as the use of mapping annotations, are explained in the xref:mongodb/mapping/mapping.adoc[Mapping] chapter. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc new file mode 100644 index 0000000000..cdd20b335e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc @@ -0,0 +1,209 @@ +[[mongo-template.index-and-collections]] += Index and Collection Management + +`MongoTemplate` and `ReactiveMongoTemplate` provide methods for managing indexes and collections. +These methods are collected into a helper interface called `IndexOperations` respectively `ReactiveIndexOperations`. +You can access these operations by calling the `indexOps` method and passing in either the collection name or the `java.lang.Class` of your entity (the collection name is derived from the `.class`, either by name or from annotation metadata). + +The following listing shows the `IndexOperations` interface: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface IndexOperations { + + String ensureIndex(IndexDefinition indexDefinition); + + void alterIndex(String name, IndexOptions options); + + void dropIndex(String name); + + void dropAllIndexes(); + + List getIndexInfo(); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactiveIndexOperations { + + Mono ensureIndex(IndexDefinition indexDefinition); + + Mono alterIndex(String name, IndexOptions options); + + Mono dropIndex(String name); + + Mono dropAllIndexes(); + + Flux getIndexInfo(); +---- +====== + +[[mongo-template.index-and-collections.index]] +== Methods for Creating an Index + +You can create an index on a collection to improve query performance by using the MongoTemplate class, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.indexOps(Person.class) + .ensureIndex(new Index().on("name",Order.ASCENDING)); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono createIndex = template.indexOps(Person.class) + .ensureIndex(new Index().on("name",Order.ASCENDING)); +---- +====== + +`ensureIndex` makes sure that an index for the provided IndexDefinition exists for the collection. + +You can create standard, geospatial, and text indexes by using the `IndexDefinition`, `GeoSpatialIndex` and `TextIndexDefinition` classes. +For example, given the `Venue` class defined in a previous section, you could declare a geospatial query, as the following example shows: + +[source,java] +---- +template.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location")); +---- + +NOTE: `Index` and `GeospatialIndex` support configuration of xref:mongodb/template-query-operations.adoc#mongo.query.collation[collations]. + +[[mongo-template.index-and-collections.access]] +== Accessing Index Information + +The `IndexOperations` interface has the `getIndexInfo` method that returns a list of `IndexInfo` objects. +This list contains all the indexes defined on the collection. The following example defines an index on the `Person` class that has an `age` property: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.indexOps(Person.class) + .ensureIndex(new Index().on("age", Order.DESCENDING).unique()); + +List indexInfoList = template.indexOps(Person.class) + .getIndexInfo(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono ageIndex = template.indexOps(Person.class) + .ensureIndex(new Index().on("age", Order.DESCENDING).unique()); + +Flux indexInfo = ageIndex.then(template.indexOps(Person.class) + .getIndexInfo()); +---- +====== + +[[mongo-template.index-and-collections.collection]] +== Methods for Working with a Collection + +The following example shows how to create a collection: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +MongoCollection collection = null; +if (!template.getCollectionNames().contains("MyNewCollection")) { + collection = mongoTemplate.createCollection("MyNewCollection"); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +MongoCollection collection = template.getCollectionNames().collectList() + .flatMap(collectionNames -> { + if(!collectionNames.contains("MyNewCollection")) { + return template.createCollection("MyNewCollection"); + } + return template.getMongoDatabase().map(db -> db.getCollection("MyNewCollection")); + }); +---- +====== + +NOTE: Collection creation allows customization with `CollectionOptions` and supports xref:mongodb/collation.adoc[collations]. + +.Methods to interact with MongoCollections +[%collapsible] +==== +* *getCollectionNames*: Returns a set of collection names. +* *collectionExists*: Checks to see if a collection with a given name exists. +* *createCollection*: Creates an uncapped collection. +* *dropCollection*: Drops the collection. +* *getCollection*: Gets a collection by name, creating it if it does not exist. +==== + +[[time-series]] +== Time Series + +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events. +Those collections need to be created as such before inserting any data. +Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. + +.Create a Time Series Collection +==== +.Create a Time Series via the MongoDB Driver +[source,java] +---- +template.execute(db -> { + + com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions(); + options.timeSeriesOptions(new TimeSeriesOptions("timestamp")); + + db.createCollection("weather", options); + return "OK"; +}); +---- + +.Create a Time Series Collection with `CollectionOptions` +[source,java] +---- +template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); +---- + +.Create a Time Series Collection derived from an Annotation +[source,java] +---- +@TimeSeries(collection="weather", timeField = "timestamp") +public class Measurement { + + String id; + Instant timestamp; + // ... +} + +template.createCollection(Measurement.class); +---- +==== + +The snippets above can easily be transferred to the reactive API offering the very same methods. +Make sure to properly _subscribe_ to the returned publishers. + +[TIP] +==== +You can use the `@TimeSeries#expireAfter` option to have MongoDB automatically remove expired buckets. +The attribute allows different timeout formats like `10s`, `3h`,... as well as expression (`#{@mySpringBean.timeout}`) and property placeholder (`${my.property.timeout}`) syntax. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc new file mode 100644 index 0000000000..4d920e1c52 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc @@ -0,0 +1,167 @@ +[[mongo-template.instantiating]] += Configuration + +You can use the following configuration to create and register an instance of `MongoTemplate`, as the following example shows: + +.Registering a `MongoClient` object and enabling Spring's exception translation support +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +class ApplicationConfiguration { + + @Bean + MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost:27017"); + } + + @Bean + MongoOperations mongoTemplate(MongoClient mongoClient) { + return new MongoTemplate(mongoClient, "geospatial"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +class ReactiveApplicationConfiguration { + + @Bean + MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost:27017"); + } + + @Bean + ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) { + return new ReactiveMongoTemplate(mongoClient, "geospatial"); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + +---- +====== + +There are several overloaded constructors of javadoc:org.springframework.data.mongodb.core.MongoTemplate[] and javadoc:org.springframework.data.mongodb.core.ReactiveMongoTemplate[]: + +* `MongoTemplate(MongoClient mongo, String databaseName)`: Takes the `MongoClient` object and the default database name to operate against. +* `MongoTemplate(MongoDatabaseFactory mongoDbFactory)`: Takes a MongoDbFactory object that encapsulated the `MongoClient` object, database name, and username and password. +* `MongoTemplate(MongoDatabaseFactory mongoDbFactory, MongoConverter mongoConverter)`: Adds a `MongoConverter` to use for mapping. + +Other optional properties that you might like to set when creating a `MongoTemplate` / `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, `ReadPreference` and others listed below. + +[[mongo-template.read-preference]] +== Default Read Preference + +The default read preference applied to read operations if no other preference was defined via the xref:mongodb/template-query-operations.adoc#mongo.query.read-preference[Query]. + +[[mongo-template.writeresultchecking]] +== WriteResultChecking Policy + +When in development, it is handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully when, in fact, the database was not modified according to your expectations. You can set the `WriteResultChecking` property of `MongoTemplate` to one of the following values: `EXCEPTION` or `NONE`, to either throw an `Exception` or do nothing, respectively. The default is to use a `WriteResultChecking` value of `NONE`. + +[[mongo-template.writeconcern]] +== Default WriteConcern + +If it has not yet been specified through the driver at a higher level (such as `com.mongodb.client.MongoClient`), you can set the `com.mongodb.WriteConcern` property that the `MongoTemplate` uses for write operations. If the `WriteConcern` property is not set, it defaults to the one set in the MongoDB driver's DB or Collection setting. + +[[mongo-template.writeconcernresolver]] +== WriteConcernResolver + +For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert, and save operations), a strategy interface called `WriteConcernResolver` can be configured on `MongoTemplate`. Since `MongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The following listing shows the `WriteConcernResolver` interface: + +[source,java] +---- +public interface WriteConcernResolver { + WriteConcern resolve(MongoAction action); +} +---- + +You can use the `MongoAction` argument to determine the `WriteConcern` value or use the value of the Template itself as a default. +`MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `Document`, the operation (`REMOVE`, `UPDATE`, `INSERT`, `INSERT_LIST`, or `SAVE`), and a few other pieces of contextual information. +The following example shows two sets of classes getting different `WriteConcern` settings: + +[source,java] +---- +public class MyAppWriteConcernResolver implements WriteConcernResolver { + + @Override + public WriteConcern resolve(MongoAction action) { + if (action.getEntityType().getSimpleName().contains("Audit")) { + return WriteConcern.ACKNOWLEDGED; + } else if (action.getEntityType().getSimpleName().contains("Metadata")) { + return WriteConcern.JOURNALED; + } + return action.getDefaultWriteConcern(); + } +} +---- + +[[mongo-template.entity-lifecycle-events]] +== Publish entity lifecycle events + +The template publishes xref:mongodb/lifecycle-events.adoc#mongodb.mapping-usage.events[lifecycle events]. +In case there are no listeners present, this feature can be disabled. + +[source,java] +---- +@Bean +MongoOperations mongoTemplate(MongoClient mongoClient) { + MongoTemplate template = new MongoTemplate(mongoClient, "geospatial"); + template.setEntityLifecycleEventsEnabled(false); + // ... +} +---- + +[[mongo-template.entity-callbacks-config]] +== Configure EntityCallbacks + +Nest to lifecycle events the template invokes xref:mongodb/lifecycle-events.adoc#mongo.entity-callbacks[EntityCallbacks] which can be (if not auto configured) set via the template API. + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Bean +MongoOperations mongoTemplate(MongoClient mongoClient) { + MongoTemplate template = new MongoTemplate(mongoClient, "..."); + template.setEntityCallbacks(EntityCallbacks.create(...)); + // ... +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Bean +ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) { + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "..."); + template.setEntityCallbacks(ReactiveEntityCallbacks.create(...)); + // ... +} +---- +====== + +[[mongo-template.count-documents-config]] +== Document count configuration + +By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session]. +Please refer to the xref:mongodb/template-document-count.adoc#mongo.query.count[Counting Documents] section for more information. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc new file mode 100644 index 0000000000..491bb4ab7d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc @@ -0,0 +1,700 @@ +[[mongo-template.save-update-remove]] += Saving, Updating, and Removing Documents + +`MongoTemplate` / `ReactiveMongoTemplatge` let you save, update, and delete your domain objects and map those objects to documents stored in MongoDB. +The API signatures of the imperative and reactive API are mainly the same only differing in their return types. +While the synchronous API uses `void`, single `Object` and `List` the reactive counterpart consists of `Mono`, `Mono` and `Flux`. + +Consider the following class: + +[source,java] +---- +include::example$example/Person.java[tags=class] +---- + +Given the `Person` class in the preceding example, you can save, update and delete the object, as the following example shows: + +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class MongoApplication { + + private static final Log log = LogFactory.getLog(MongoApplication.class); + + public static void main(String[] args) { + + MongoOperations template = new MongoTemplate(new SimpleMongoClientDbFactory(MongoClients.create(), "database")); + + Person p = new Person("Joe", 34); + + // Insert is used to initially store the object into the database. + template.insert(p); + log.info("Insert: " + p); + + // Find + p = template.findById(p.getId(), Person.class); + log.info("Found: " + p); + + // Update + template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class); + p = template.findOne(query(where("name").is("Joe")), Person.class); + log.info("Updated: " + p); + + // Delete + template.remove(p); + + // Check that deletion worked + List people = template.findAll(Person.class); + log.info("Number of people = : " + people.size()); + + + template.dropCollection(Person.class); + } +} +---- + +The preceding example would produce the following log output (including debug messages from `MongoTemplate`): + +[source] +---- +DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. +DEBUG work.data.mongodb.core.MongoTemplate: 632 - insert Document containing fields: [_class, age, name] in collection: person +INFO org.spring.example.MongoApp: 30 - Insert: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] +DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "_id" : { "$oid" : "4ddc6e784ce5b1eba3ceaf5c"}} in db.collection: database.person +INFO org.spring.example.MongoApp: 34 - Found: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] +DEBUG work.data.mongodb.core.MongoTemplate: 778 - calling update using query: { "name" : "Joe"} and update: { "$set" : { "age" : 35}} in collection: person +DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "name" : "Joe"} in db.collection: database.person +INFO org.spring.example.MongoApp: 39 - Updated: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=35] +DEBUG work.data.mongodb.core.MongoTemplate: 823 - remove using query: { "id" : "4ddc6e784ce5b1eba3ceaf5c"} in collection: person +INFO org.spring.example.MongoApp: 46 - Number of people = : 0 +DEBUG work.data.mongodb.core.MongoTemplate: 376 - Dropped collection [database.person] +---- +==== + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class ReactiveMongoApplication { + + private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApplication.class); + + public static void main(String[] args) throws Exception { + + CountDownLatch latch = new CountDownLatch(1); + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + template.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person)) + .flatMap(person -> template.findById(person.getId(), Person.class)) + .doOnNext(person -> log.info("Found: " + person)) + .zipWith(person -> template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class)) + .flatMap(tuple -> template.remove(tuple.getT1())).flatMap(deleteResult -> template.findAll(Person.class)) + .count().doOnSuccess(count -> { + log.info("Number of people: " + count); + latch.countDown(); + }) + + .subscribe(); + + latch.await(); + } +} +---- +====== + +`MongoConverter` caused implicit conversion between a `String` and an `ObjectId` stored in the database by recognizing (through convention) the `Id` property name. + +The preceding example is meant to show the use of save, update, and remove operations on `MongoTemplate` / `ReactiveMongoTemplate` and not to show complex mapping functionality. +The query syntax used in the preceding example is explained in more detail in the section "`xref:mongodb/template-query-operations.adoc[Querying Documents]`". + +IMPORTANT: MongoDB requires that you have an `_id` field for all documents. Please refer to the xref:mongodb/template-crud-operations.adoc[ID handling] section for details on the special treatment of this field. + +IMPORTANT: MongoDB collections can contain documents that represent instances of a variety of types. Please refer to the xref:mongodb/converters-type-mapping.adoc[type mapping] for details. + +[[mongo-template.save-insert]] +== Insert / Save + +There are several convenient methods on `MongoTemplate` for saving and inserting your objects. +To have more fine-grained control over the conversion process, you can register Spring converters with the `MappingMongoConverter` -- for example `Converter` and `Converter`. + +NOTE: The difference between insert and save operations is that a save operation performs an insert if the object is not already present. + +The simple case of using the save operation is to save a POJO. +In this case, the collection name is determined by name (not fully qualified) of the class. +You may also call the save operation with a specific collection name. You can use mapping metadata to override the collection in which to store the object. + +When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database. +Consequently, for auto-generation of an `ObjectId` to succeed, the type of the `Id` property or field in your class must be a `String`, an `ObjectId`, or a `BigInteger`. + +The following example shows how to save a document and retrieving its contents: + +.Inserting and retrieving documents using the MongoTemplate +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Criteria.query; + +//... + +template.insert(new Person("Bob", 33)); + +Person person = template.query(Person.class) + .matching(query(where("age").is(33))) + .oneValue(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Criteria.query; + +//... + +Mono person = mongoTemplate.insert(new Person("Bob", 33)) + .then(mongoTemplate.query(Person.class) + .matching(query(where("age").is(33))) + .one()); +---- +====== + +The following insert and save operations are available: + +* `void` *save* `(Object objectToSave)`: Save the object to the default collection. +* `void` *save* `(Object objectToSave, String collectionName)`: Save the object to the specified collection. + +A similar set of insert operations is also available: + +* `void` *insert* `(Object objectToSave)`: Insert the object to the default collection. +* `void` *insert* `(Object objectToSave, String collectionName)`: Insert the object to the specified collection. + +[[mongo-template.id-handling]] +=== How the `_id` Field is Handled in the Mapping Layer + +MongoDB requires that you have an `_id` field for all documents. +If you do not provide one, the driver assigns an `ObjectId` with a generated value without considering your domain model as the server isn't aware of your identifier type. +When you use the `MappingMongoConverter`, certain rules govern how properties from the Java class are mapped to this `_id` field: + +. A property or field annotated with `@Id` (`org.springframework.data.annotation.Id`) maps to the `_id` field. +. A property or field without an annotation but named `id` maps to the `_id` field. + +The following outlines what type conversion, if any, is done on the property mapped to the `_id` document field when using the `MappingMongoConverter` (the default for `MongoTemplate`). + +. If possible, an `id` property or field declared as a `String` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter`. Valid conversion rules are delegated to the MongoDB Java driver. If it cannot be converted to an `ObjectId`, then the value is stored as a string in the database. +. An `id` property or field declared as `BigInteger` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter`. + +If no field or property specified in the previous sets of rules is present in the Java class, an implicit `_id` file is generated by the driver but not mapped to a property or field of the Java class. + +When querying and updating, `MongoTemplate` uses the converter that corresponds to the preceding rules for saving documents so that field names and types used in your queries can match what is in your domain classes. + +Some environments require a customized approach to map `Id` values such as data stored in MongoDB that did not run through the Spring Data mapping layer. Documents can contain `_id` values that can be represented either as `ObjectId` or as `String`. +Reading documents from the store back to the domain type works just fine. Querying for documents via their `id` can be cumbersome due to the implicit `ObjectId` conversion. Therefore documents cannot be retrieved that way. +For those cases `@MongoId` provides more control over the actual id mapping attempts. + +.`@MongoId` mapping +==== +[source,java] +---- +public class PlainStringId { + @MongoId String id; <1> +} + +public class PlainObjectId { + @MongoId ObjectId id; <2> +} + +public class StringToObjectId { + @MongoId(FieldType.OBJECT_ID) String id; <3> +} +---- +<1> The id is treated as `String` without further conversion. +<2> The id is treated as `ObjectId`. +<3> The id is treated as `ObjectId` if the given `String` is a valid `ObjectId` hex, otherwise as `String`. Corresponds to `@Id` usage. +==== + + +[[mongo-template.save-insert.collection]] +=== Into Which Collection Are My Documents Saved? + +There are two ways to manage the collection name that is used for the documents. +The default collection name that is used is the class name changed to start with a lower-case letter. +So a `com.test.Person` class is stored in the `person` collection. +You can customize this by providing a different collection name with the `@Document` annotation. +You can also override the collection name by providing your own collection name as the last parameter for the selected `MongoTemplate` method calls. + +[[mongo-template.save-insert.individual]] +=== Inserting or Saving Individual Objects + +The MongoDB driver supports inserting a collection of documents in a single operation. +The following methods in the `MongoOperations` interface support this functionality: + +* *insert*: Inserts an object. If there is an existing document with the same `id`, an error is generated. +* *insertAll*: Takes a `Collection` of objects as the first parameter. This method inspects each object and inserts it into the appropriate collection, based on the rules specified earlier. +* *save*: Saves the object, overwriting any object that might have the same `id`. + +[[mongo-template.save-insert.batch]] +=== Inserting Several Objects in a Batch + +The MongoDB driver supports inserting a collection of documents in one operation. +The following methods in the `MongoOperations` interface support this functionality via `insert` or a dedicated `BulkOperations` interface. + +.Batch Insert +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +Collection inserted = template.insert(List.of(...), Person.class); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux inserted = template.insert(List.of(...), Person.class); +---- +====== + +.Bulk Insert +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +BulkWriteResult result = template.bulkOps(BulkMode.ORDERED, Person.class) + .insert(List.of(...)) + .execute(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = template.bulkOps(BulkMode.ORDERED, Person.class) + .insert(List.of(...)) + .execute(); +---- +====== + +[NOTE] +==== +Server performance of batch and bulk is identical. +However bulk operations do not publish xref:mongodb/lifecycle-events.adoc[lifecycle events]. +==== + +[IMPORTANT] +==== +Any `@Version` property that has not been set prior to calling insert will be auto initialized with `1` (in case of a simple type like `int`) or `0` for wrapper types (eg. `Integer`). + +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongodb-template-update]] +== Update + +For updates, you can update the first document found by using `MongoOperation.updateFirst` or you can update all documents that were found to match the query by using the `MongoOperation.updateMulti` method or `all` on the fluent API. +The following example shows an update of all `SAVINGS` accounts where we are adding a one-time $50.00 bonus to the balance by using the `$inc` operator: + +.Updating documents by using the `MongoTemplate` / `ReactiveMongoTemplate` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import org.springframework.data.mongodb.core.query.Update; + +// ... + +UpdateResult result = template.update(Account.class) + .matching(where("accounts.accountType").is(Type.SAVINGS)) + .apply(new Update().inc("accounts.$.balance", 50.00)) + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import org.springframework.data.mongodb.core.query.Update; + +// ... + +Mono result = template.update(Account.class) + .matching(where("accounts.accountType").is(Type.SAVINGS)) + .apply(new Update().inc("accounts.$.balance", 50.00)) + .all(); +---- +====== + +In addition to the `Query` discussed earlier, we provide the update definition by using an `Update` object. +The `Update` class has methods that match the update modifiers available for MongoDB. +Most methods return the `Update` object to provide a fluent style for the API. + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically incremented. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongodb-template-update.methods]] +=== Methods for Running Updates for Documents + +* *updateFirst*: Updates the first document that matches the query document criteria with the updated document. +* *updateMulti*: Updates all objects that match the query document criteria with the updated document. + +WARNING: `updateFirst` does not support ordering for MongoDB Versions below 8.0. Running one of the older versions, please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`. + +NOTE: Index hints for the update operation can be provided via `Query.withHint(...)`. + +[[mongodb-template-update.update]] +=== Methods in the `Update` Class + +You can use a little "'syntax sugar'" with the `Update` class, as its methods are meant to be chained together. +Also, you can kick-start the creation of a new `Update` instance by using `public static Update update(String key, Object value)` and using static imports. + +The `Update` class contains the following methods: + +* `Update` *addToSet* `(String key, Object value)` Update using the `$addToSet` update modifier +* `Update` *currentDate* `(String key)` Update using the `$currentDate` update modifier +* `Update` *currentTimestamp* `(String key)` Update using the `$currentDate` update modifier with `$type` `timestamp` +* `Update` *inc* `(String key, Number inc)` Update using the `$inc` update modifier +* `Update` *max* `(String key, Object max)` Update using the `$max` update modifier +* `Update` *min* `(String key, Object min)` Update using the `$min` update modifier +* `Update` *multiply* `(String key, Number multiplier)` Update using the `$mul` update modifier +* `Update` *pop* `(String key, Update.Position pos)` Update using the `$pop` update modifier +* `Update` *pull* `(String key, Object value)` Update using the `$pull` update modifier +* `Update` *pullAll* `(String key, Object[] values)` Update using the `$pullAll` update modifier +* `Update` *push* `(String key, Object value)` Update using the `$push` update modifier +* `Update` *pushAll* `(String key, Object[] values)` Update using the `$pushAll` update modifier +* `Update` *rename* `(String oldName, String newName)` Update using the `$rename` update modifier +* `Update` *set* `(String key, Object value)` Update using the `$set` update modifier +* `Update` *setOnInsert* `(String key, Object value)` Update using the `$setOnInsert` update modifier +* `Update` *unset* `(String key)` Update using the `$unset` update modifier + +Some update modifiers, such as `$push` and `$addToSet`, allow nesting of additional operators. + +[source,java] +---- +// { $push : { "category" : { "$each" : [ "spring" , "data" ] } } } +new Update().push("category").each("spring", "data") + +// { $push : { "key" : { "$position" : 0 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } +new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel")); + +// { $push : { "key" : { "$slice" : 5 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } +new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")); + +// { $addToSet : { "values" : { "$each" : [ "spring" , "data" , "mongodb" ] } } } +new Update().addToSet("values").each("spring", "data", "mongodb"); +---- + +[[mongo-template.aggregation-update]] +=== Aggregation Pipeline Updates + +Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an xref:mongodb/aggregation-framework.adoc[Aggregation Pipeline] via `AggregationUpdate`. +Using `AggregationUpdate` allows leveraging https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline[MongoDB 4.2 aggregations] in an update operation. +Using aggregations in an update allows updating one or more fields by expressing multiple stages and multiple conditions with a single operation. + +The update can consist of the following stages: + +* `AggregationUpdate.set(...).toValue(...)` -> `$set : { ... }` +* `AggregationUpdate.unset(...)` -> `$unset : [ ... ]` +* `AggregationUpdate.replaceWith(...)` -> `$replaceWith : { ... }` + +.Update Aggregation +==== +[source,java] +---- +AggregationUpdate update = Aggregation.newUpdate() + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) <1> + .set("grade").toValue(ConditionalOperators.switchCases( <2> + when(valueOf("average").greaterThanEqualToValue(90)).then("A"), + when(valueOf("average").greaterThanEqualToValue(80)).then("B"), + when(valueOf("average").greaterThanEqualToValue(70)).then("C"), + when(valueOf("average").greaterThanEqualToValue(60)).then("D")) + .defaultTo("F") + ); + +template.update(Student.class) <3> + .apply(update) + .all(); <4> +---- +[source,javascript] +---- +db.students.update( <3> + { }, + [ + { $set: { average : { $avg: "$tests" } } }, <1> + { $set: { grade: { $switch: { <2> + branches: [ + { case: { $gte: [ "$average", 90 ] }, then: "A" }, + { case: { $gte: [ "$average", 80 ] }, then: "B" }, + { case: { $gte: [ "$average", 70 ] }, then: "C" }, + { case: { $gte: [ "$average", 60 ] }, then: "D" } + ], + default: "F" + } } } } + ], + { multi: true } <4> +) +---- +<1> The 1st `$set` stage calculates a new field _average_ based on the average of the _tests_ field. +<2> The 2nd `$set` stage calculates a new field _grade_ based on the _average_ field calculated by the first aggregation stage. +<3> The pipeline is run on the _students_ collection and uses `Student` for the aggregation field mapping. +<4> Apply the update to all matching documents in the collection. +==== + +[[mongo-template.upserts]] +== Upsert + +Related to performing an `updateFirst` operation, you can also perform an `upsert` operation, which will perform an insert if no document is found that matches the query. +The document that is inserted is a combination of the query document and the update document. +The following example shows how to use the `upsert` method: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +UpdateResult result = template.update(Person.class) + .matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")) + .apply(update("address", addr)) + .upsert(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = template.update(Person.class) + .matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")) + .apply(update("address", addr)) + .upsert(); +---- +====== + +WARNING: `upsert` does not support ordering. Please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`. + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically initialized. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongo-template.replace]] +=== Replacing Documents in a Collection + +The various `replace` methods available via `MongoTemplate` allow to override the first matching Document. +If no match is found a new one can be upserted (as outlined in the previous section) by providing `ReplaceOptions` with according configuration. + +==== +.Replace one +[source,java] +---- +Person tom = template.insert(new Person("Motte", 21)); <1> +Query query = Query.query(Criteria.where("firstName").is(tom.getFirstName())); <2> +tom.setFirstname("Tom"); <3> +template.replace(query, tom, ReplaceOptions.none()); <4> +---- +<1> Insert a new document. +<2> The query used to identify the single document to replace. +<3> Set up the replacement document which must hold either the same `_id` as the existing or no `_id` at all. +<4> Run the replace operation. +.Replace one with upsert +[source,java] +---- +Person tom = new Person("id-123", "Tom", 21) <1> +Query query = Query.query(Criteria.where("firstName").is(tom.getFirstName())); +template.replace(query, tom, ReplaceOptions.replaceOptions().upsert()); <2> +---- +<1> The `_id` value needs to be present for upsert, otherwise MongoDB will create a new potentially with the domain type incompatible `ObjectId`. +As MongoDB is not aware of your domain type, any `@Field(targetType)` hints are not considered and the resulting `ObjectId` might be not compatible with your domain model. +<2> Use `upsert` to insert a new document if no match is found +==== + +[WARNING] +==== +It is not possible to change the `_id` of existing documents with a replace operation. +On `upsert` MongoDB uses 2 ways of determining the new id for the entry: +* The `_id` is used within the query as in `{"_id" : 1234 }` +* The `_id` is present in the replacement document. +If no `_id` is provided in either way, MongoDB will create a new `ObjectId` for the document. +This may lead to mapping and data lookup malfunctions if the used domain types `id` property has a different type like e.g. `Long`. +==== + +[[mongo-template.find-and-upsert]] +== Find and Modify + +The `findAndModify(…)` method on `MongoCollection` can update a document and return either the old or newly updated document in a single operation. +`MongoTemplate` provides four `findAndModify` overloaded methods that take `Query` and `Update` classes and converts from `Document` to your POJOs: + +[source,java] +---- + T findAndModify(Query query, Update update, Class entityClass); + + T findAndModify(Query query, Update update, Class entityClass, String collectionName); + + T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + + T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, String collectionName); +---- + +The following example inserts a few `Person` objects into the container and performs a `findAndUpdate` operation: + +[source,java] +---- +template.insert(new Person("Tom", 21)); +template.insert(new Person("Dick", 22)); +template.insert(new Person("Harry", 23)); + +Query query = new Query(Criteria.where("firstName").is("Harry")); +Update update = new Update().inc("age", 1); + +Person oldValue = template.update(Person.class) + .matching(query) + .apply(update) + .findAndModifyValue(); // oldValue.age == 23 + +Person newValue = template.query(Person.class) + .matching(query) + .findOneValue(); // newValye.age == 24 + +Person newestValue = template.update(Person.class) + .matching(query) + .apply(update) + .withOptions(FindAndModifyOptions.options().returnNew(true)) // Now return the newly updated document when updating + .findAndModifyValue(); // newestValue.age == 25 +---- + +The `FindAndModifyOptions` method lets you set the options of `returnNew`, `upsert`, and `remove`. +An example extending from the previous code snippet follows: + +[source,java] +---- +Person upserted = template.update(Person.class) + .matching(new Query(Criteria.where("firstName").is("Mary"))) + .apply(update) + .withOptions(FindAndModifyOptions.options().upsert(true).returnNew(true)) + .findAndModifyValue() +---- + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically incremented. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongo-template.find-and-replace]] +== Find and Replace + +The most straight forward method of replacing an entire `Document` is via its `id` using the `save` method. +However this might not always be feasible. +`findAndReplace` offers an alternative that allows to identify the document to replace via a simple query. + +.Find and Replace Documents +==== +[source,java] +---- +Optional result = template.update(Person.class) <1> + .matching(query(where("firstame").is("Tom"))) <2> + .replaceWith(new Person("Dick")) + .withOptions(FindAndReplaceOptions.options().upsert()) <3> + .as(User.class) <4> + .findAndReplace(); <5> +---- +<1> Use the fluent update API with the domain type given for mapping the query and deriving the collection name or just use `MongoOperations#findAndReplace`. +<2> The actual match query mapped against the given domain type. Provide `sort`, `fields` and `collation` settings via the query. +<3> Additional optional hook to provide options other than the defaults, like `upsert`. +<4> An optional projection type used for mapping the operation result. If none given the initial domain type is used. +<5> Trigger the actual processing. Use `findAndReplaceValue` to obtain the nullable result instead of an `Optional`. +==== + +IMPORTANT: Please note that the replacement must not hold an `id` itself as the `id` of the existing `Document` will be +carried over to the replacement by the store itself. Also keep in mind that `findAndReplace` will only replace the first +document matching the query criteria depending on a potentially given sort order. + +[[mongo-template.delete]] +== Delete + +You can use one of five overloaded methods to remove an object from the database: + +==== +[source,java] +---- +template.remove(tywin, "GOT"); <1> + +template.remove(query(where("lastname").is("lannister")), "GOT"); <2> + +template.remove(new Query().limit(3), "GOT"); <3> + +template.findAllAndRemove(query(where("lastname").is("lannister"), "GOT"); <4> + +template.findAllAndRemove(new Query().limit(3), "GOT"); <5> +---- +<1> Remove a single entity specified by its `_id` from the associated collection. +<2> Remove all documents that match the criteria of the query from the `GOT` collection. +<3> Remove the first three documents in the `GOT` collection. Unlike <2>, the documents to remove are identified by their `_id`, running the given query, applying `sort`, `limit`, and `skip` options first, and then removing all at once in a separate step. +<4> Remove all documents matching the criteria of the query from the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. +<5> Remove the first three documents in the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. +==== + +[[mongo-template.optimistic-locking]] +== Optimistic Locking + +The `@Version` annotation provides syntax similar to that of JPA in the context of MongoDB and makes sure updates are only applied to documents with a matching version. +Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the document in the meantime. +In that case, an `OptimisticLockingFailureException` is thrown. +The following example shows these features: + +==== +[source,java] +---- +@Document +class Person { + + @Id String id; + String firstname; + String lastname; + @Version Long version; +} + +Person daenerys = template.insert(new Person("Daenerys")); <1> + +Person tmp = template.findOne(query(where("id").is(daenerys.getId())), Person.class); <2> + +daenerys.setLastname("Targaryen"); +template.save(daenerys); <3> + +template.save(tmp); // throws OptimisticLockingFailureException <4> +---- +<1> Intially insert document. `version` is set to `0`. +<2> Load the just inserted document. `version` is still `0`. +<3> Update the document with `version = 0`. Set the `lastname` and bump `version` to `1`. +<4> Try to update the previously loaded document that still has `version = 0`. The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`. +==== + +Only certain CRUD operations on `MongoTemplate` do consider and alter version properties. Please consult `MongoOperations` java doc for detailed information. + +IMPORTANT: Optimistic Locking requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed. + +NOTE: As of Version 2.2 `MongoOperations` also includes the `@Version` property when removing an entity from the database. +To remove a `Document` without version check use `MongoOperations#remove(Query,...)` instead of `MongoOperations#remove(Object)`. + +NOTE: As of Version 2.2 repositories check for the outcome of acknowledged deletes when removing versioned entities. +An `OptimisticLockingFailureException` is raised if a versioned entity cannot be deleted through `CrudRepository.delete(Object)`. In such case, the version was changed or the object was deleted in the meantime. Use `CrudRepository.deleteById(ID)` to bypass optimistic locking functionality and delete objects regardless of their version. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc new file mode 100644 index 0000000000..9372662352 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc @@ -0,0 +1,48 @@ +[[mongo.query.count]] += Counting Documents + +The template API offers various methods to count the number of documents matching a given criteria. +One of them outlined below. + +==== +[source,java] +---- +template.query(Person.class) + .matching(query(where("firstname").is("luke"))) + .count(); +---- +==== + +In pre-3.x versions of SpringData MongoDB the count operation used MongoDBs internal collection statistics. +With the introduction of xref:mongodb/client-session-transactions.adoc#mongo.transactions[MongoDB Transactions] this was no longer possible because statistics would not correctly reflect potential changes during a transaction requiring an aggregation-based count approach. +So in version 2.x `MongoOperations.count()` would use the collection statistics if no transaction was in progress, and the aggregation variant if so. + +As of Spring Data MongoDB 3.x any `count` operation uses regardless the existence of filter criteria the aggregation-based count approach via MongoDBs `countDocuments`. +If the application is fine with the limitations of working upon collection statistics `MongoOperations.estimatedCount()` offers an alternative. + +[TIP] +==== +By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session]. +It will still be possible to obtain exact numbers via `MongoTemplate#exactCount`, but may speed up things. +==== + +[NOTE] +==== +MongoDBs native `countDocuments` method and the `$match` aggregation, do not support `$near` and `$nearSphere` but require `$geoWithin` along with `$center` or `$centerSphere` which does not support `$minDistance` (see https://jira.mongodb.org/browse/SERVER-37043). + +Therefore a given `Query` will be rewritten for `count` operations using `Reactive`-/`MongoTemplate` to bypass the issue like shown below. + +[source,javascript] +---- +{ location : { $near : [-73.99171, 40.738868], $maxDistance : 1.1 } } <1> +{ location : { $geoWithin : { $center: [ [-73.99171, 40.738868], 1.1] } } } <2> + +{ location : { $near : [-73.99171, 40.738868], $minDistance : 0.1, $maxDistance : 1.1 } } <3> +{$and :[ { $nor :[ { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 0.01] } } } ]}, { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 1.1] } } } ] } <4> +---- +<1> Count source query using `$near`. +<2> Rewritten query now using `$geoWithin` with `$center`. +<3> Count source query using `$near` with `$minDistance` and `$maxDistance`. +<4> Rewritten query now a combination of `$nor` `$geowithin` critierias to work around unsupported `$minDistance`. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc new file mode 100644 index 0000000000..c7305b0a71 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc @@ -0,0 +1,214 @@ +[[gridfs]] += GridFS Support + +MongoDB supports storing binary files inside its filesystem, GridFS. +Spring Data MongoDB provides a javadoc:org.springframework.data.mongodb.gridfs.GridFsOperations[] and javadoc:org.springframework.data.mongodb.gridfs.ReactiveGridFsOperations[] interface as well as the corresponding implementation, `GridFsTemplate` and `ReactiveGridFsTemplate`, to let you interact with the filesystem. +You can set up a template instance by handing it a `MongoDatabaseFactory`/`ReactiveMongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsConfiguration extends AbstractMongoClientConfiguration { + + // … further configuration omitted + + @Bean + public GridFsTemplate gridFsTemplate() { + return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsConfiguration extends AbstractReactiveMongoConfiguration { + + // … further configuration omitted + + @Bean + public ReactiveGridFsTemplate reactiveGridFsTemplate() { + return new ReactiveGridFsTemplate(reactiveMongoDbFactory(), mappingMongoConverter()); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + + +---- +====== + +The template can now be injected and used to perform storage and retrieval operations, as the following example shows: + +.Using GridFS to store files +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Resource file = … // lookup File or Resource + + operations.store(file.getInputStream(), "filename.txt", metadata); + } +} +---- +The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. +The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. +Alternatively, you can also provide a `Document`. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsTemplate operations; + + @Test + public Mono storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Publisher file = … // lookup File or Resource + + return operations.store(file, "filename.txt", metadata); + } +} +---- +The `store(…)` operations take an `Publisher`, a filename, and (optionally) metadata information about the file to store. +The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `ReactiveGridFsTemplate`. +Alternatively, you can also provide a `Document`. + +The MongoDB's driver uses `AsyncInputStream` and `AsyncOutputStream` interfaces to exchange binary streams. +Spring Data MongoDB adapts these interfaces to `Publisher`. +Read more about `DataBuffer` in https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#databuffers[Spring's reference documentation]. +==== +====== + +You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. +Let's have a look at the `find(…)` methods first. +You can either find a single file or multiple files that match a `Query`. +You can use the `GridFsCriteria` helper class to define queries. +It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. +The following example shows how to use the template to query for files: + +.Using GridFsTemplate to query for files +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void findFilesInGridFs() { + GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsTemplate operations; + + @Test + public Flux findFilesInGridFs() { + return operations.find(query(whereFilename().is("filename.txt"))) + } +} +---- +====== + +NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. + +The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. +They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. +The following example shows how to use `GridFsTemplate` to read files: + +.Using GridFsTemplate to read files +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + public GridFsResources[] readFilesFromGridFs() { + return operations.getResources("*.txt"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsOperations operations; + + public Flux readFilesFromGridFs() { + return operations.getResources("*.txt"); + } +} +---- +====== + +`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. + +NOTE: By default, `GridFsTemplate` obtains `GridFSBucket` once upon the first GridFS interaction. +After that, the template instance reuses the cached bucket. +To use different buckets, from the same Template instance use the constructor accepting `Supplier`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc new file mode 100644 index 0000000000..a424748205 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc @@ -0,0 +1,945 @@ +[[mongo.query]] += Querying Documents +:page-section-summary-toc: 1 + +You can use the `Query` and `Criteria` classes to express your queries. +They have method names that mirror the native MongoDB operator names, such as `lt`, `lte`, `is`, and others. +The `Query` and `Criteria` classes follow a fluent API style so that you can chain together multiple method criteria and queries while having easy-to-understand code. +To improve readability, static imports let you avoid using the 'new' keyword for creating `Query` and `Criteria` instances. +You can also use `BasicQuery` to create `Query` instances from plain JSON Strings, as shown in the following example: + +.Creating a Query instance from a plain JSON String +==== +[source,java] +---- +BasicQuery query = new BasicQuery("{ age : { $lt : 50 }, accounts.balance : { $gt : 1000.00 }}"); +List result = mongoTemplate.find(query, Person.class); +---- +==== + +[[mongodb-template-query]] +== Querying Documents in a Collection + +Earlier, we saw how to retrieve a single document by using the `findOne` and `findById` methods on `MongoTemplate`. +These methods return a single domain object right way or using a reactive API a `Mono` emitting a single element. +We can also query for a collection of documents to be returned as a list of domain objects. +Assuming that we have a number of `Person` objects with name and age stored as documents in a collection and that each person has an embedded account document with a balance, we can now run a query using the following code: + +.Querying for documents using the MongoTemplate +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +// ... + +List result = template.query(Person.class) + .matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d))) + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +// ... + +Flux result = template.query(Person.class) + .matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d))) + .all(); +---- +====== + +All find methods take a `Query` object as a parameter. +This object defines the criteria and options used to perform the query. +The criteria are specified by using a `Criteria` object that has a static factory method named `where` to instantiate a new `Criteria` object. +We recommend using static imports for `org.springframework.data.mongodb.core.query.Criteria.where` and `Query.query` to make the query more readable. + +The query should return a `List` or `Flux` of `Person` objects that meet the specified criteria. +The rest of this section lists the methods of the `Criteria` and `Query` classes that correspond to the operators provided in MongoDB. +Most methods return the `Criteria` object, to provide a fluent style for the API. + +[[mongodb-template-query.criteria]] +.Methods of the Criteria Class +[%collapsible] +==== +The `Criteria` class provides the following methods, all of which correspond to operators in MongoDB: + +* `Criteria` *all* `(Object o)` Creates a criterion using the `$all` operator +* `Criteria` *and* `(String key)` Adds a chained `Criteria` with the specified `key` to the current `Criteria` and returns the newly created one +* `Criteria` *andOperator* `(Criteria... criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) +* `Criteria` *andOperator* `(Collection criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) +* `Criteria` *elemMatch* `(Criteria c)` Creates a criterion using the `$elemMatch` operator +* `Criteria` *exists* `(boolean b)` Creates a criterion using the `$exists` operator +* `Criteria` *gt* `(Object o)` Creates a criterion using the `$gt` operator +* `Criteria` *gte* `(Object o)` Creates a criterion using the `$gte` operator +* `Criteria` *in* `(Object... o)` Creates a criterion using the `$in` operator for a varargs argument. +* `Criteria` *in* `(Collection collection)` Creates a criterion using the `$in` operator using a collection +* `Criteria` *is* `(Object o)` Creates a criterion using field matching (`{ key:value }`). If the specified value is a document, the order of the fields and exact equality in the document matters. +* `Criteria` *lt* `(Object o)` Creates a criterion using the `$lt` operator +* `Criteria` *lte* `(Object o)` Creates a criterion using the `$lte` operator +* `Criteria` *mod* `(Number value, Number remainder)` Creates a criterion using the `$mod` operator +* `Criteria` *ne* `(Object o)` Creates a criterion using the `$ne` operator +* `Criteria` *nin* `(Object... o)` Creates a criterion using the `$nin` operator +* `Criteria` *norOperator* `(Criteria... criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria +* `Criteria` *norOperator* `(Collection criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria +* `Criteria` *not* `()` Creates a criterion using the `$not` meta operator which affects the clause directly following +* `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria +* `Criteria` *orOperator* `(Collection criteria)` Creates an or query using the `$or` operator for all of the provided criteria +* `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` +* `Criteria` *sampleRate* `(double sampleRate)` Creates a criterion using the `$sampleRate` operator +* `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator +* `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator +* `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for xref:mongodb/mapping/mapping-schema.adoc[JSON schema criteria]. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields. +* `Criteria` *bits()* is the gateway to https://docs.mongodb.com/manual/reference/operator/query-bitwise/[MongoDB bitwise query operators] like `$bitsAllClear`. + +The Criteria class also provides the following methods for geospatial queries. + +* `Criteria` *within* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. +* `Criteria` *within* `(Box box)` Creates a geospatial criterion using a `$geoWithin $box` operation. +* `Criteria` *withinSphere* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. +* `Criteria` *near* `(Point point)` Creates a geospatial criterion using a `$near` operation +* `Criteria` *nearSphere* `(Point point)` Creates a geospatial criterion using `$nearSphere$center` operations. This is only available for MongoDB 1.7 and higher. +* `Criteria` *minDistance* `(double minDistance)` Creates a geospatial criterion using the `$minDistance` operation, for use with $near. +* `Criteria` *maxDistance* `(double maxDistance)` Creates a geospatial criterion using the `$maxDistance` operation, for use with $near. +==== + +The `Query` class has some additional methods that allow to select certain fields as well as to limit and sort the result. + +[[mongodb-template-query.query]] +.Methods of the Query class +[%collapsible] +==== +* `Query` *addCriteria* `(Criteria criteria)` used to add additional criteria to the query +* `Field` *fields* `()` used to define fields to be included in the query results +* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging) +* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging) +* `Query` *with* `(Sort sort)` used to provide sort definition for the results +* `Query` *with* `(ScrollPosition position)` used to provide a scroll position (Offset- or Keyset-based pagination) to start or resume a `Scroll` +==== + +[[mongo-template.query.result-projection]] + +The template API allows direct usage of result projections that enable you to map queries against a given domain type while projecting the operation result onto another one as outlined below. + +[source,java] +---- +class + +template.query(SWCharacter.class) + .as(Jedi.class) +---- + +For more information on result projections please refer to the xref:repositories/projections.adoc[Projections] section of the documentation. + +[[mongo-template.querying.field-selection]] +== Selecting fields + +MongoDB supports https://docs.mongodb.com/manual/tutorial/project-fields-from-query-results/[projecting fields] returned by a query. +A projection can include and exclude fields (the `_id` field is always included unless explicitly excluded) based on their name. + +.Selecting result fields +==== +[source,java] +---- +public class Person { + + @Id String id; + String firstname; + + @Field("last_name") + String lastname; + + Address address; +} + +query.fields().include("lastname"); <1> + +query.fields().exclude("id").include("lastname") <2> + +query.fields().include("address") <3> + +query.fields().include("address.city") <4> +---- +<1> Result will contain both `_id` and `last_name` via `{ "last_name" : 1 }`. +<2> Result will only contain the `last_name` via `{ "_id" : 0, "last_name" : 1 }`. +<3> Result will contain the `_id` and entire `address` object via `{ "address" : 1 }`. +<4> Result will contain the `_id` and and `address` object that only contains the `city` field via `{ "address.city" : 1 }`. +==== + +Starting with MongoDB 4.4 you can use aggregation expressions for field projections as shown below: + +.Computing result fields using expressions +==== +[source,java] +---- +query.fields() + .project(MongoExpression.create("'$toUpper' : '$last_name'")) <1> + .as("last_name"); <2> + +query.fields() + .project(StringOperators.valueOf("lastname").toUpper()) <3> + .as("last_name"); + +query.fields() + .project(AggregationSpELExpression.expressionOf("toUpper(lastname)")) <4> + .as("last_name"); +---- +<1> Use a native expression. The used field name must refer to field names within the database document. +<2> Assign the field name to which the expression result is projected. The resulting field name is not mapped against the domain model. +<3> Use an `AggregationExpression`. Other than native `MongoExpression`, field names are mapped to the ones used in the domain model. +<4> Use SpEL along with an `AggregationExpression` to invoke expression functions. Field names are mapped to the ones used in the domain model. +==== + +`@Query(fields="…")` allows usage of expression field projections at `Repository` level as described in xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[MongoDB JSON-based Query Methods and Field Restriction]. + +[[mongo.query.additional-query-options]] +== Additional Query Options + +MongoDB offers various ways of applying meta information, like a comment or a batch size, to a query.Using the `Query` API +directly there are several methods for those options. + +[[mongo.query.hints]] +=== Hints + +Index hints can be applied in two ways, using the index name or its field definition. + +==== +[source,java] +---- +template.query(Person.class) + .matching(query("...").withHint("index-to-use")); + +template.query(Person.class) + .matching(query("...").withHint("{ firstname : 1 }")); +---- +==== + +[[mongo.query.cursor-size]] +=== Cursor Batch Size + +The cursor batch size defines the number of documents to return in each response batch. +==== +[source,java] +---- +Query query = query(where("firstname").is("luke")) + .cursorBatchSize(100) +---- +==== + +[[mongo.query.collation]] +=== Collations + +Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show: + +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +Query query = new Query(Criteria.where("firstName").is("Amél")) + .collation(collation); + +List results = template.find(query, Person.class); +---- +==== + +[[mongo.query.read-preference]] +=== Read Preference + +The `ReadPreference` to use can be set directly on the `Query` object to be run as outlined below. + +==== +[source,java] +---- +template.find(Person.class) + .matching(query(where(...)).withReadPreference(ReadPreference.secondary())) + .all(); +---- +==== + +NOTE: The preference set on the `Query` instance will supersede the default `ReadPreference` of `MongoTemplate`. + +[[mongo.query.comment]] +=== Comments + +Queries can be equipped with comments which makes them easier to look up in server logs. + +==== +[source,java] +---- +template.find(Person.class) + .matching(query(where(...)).comment("Use the force luke!")) + .all(); +---- +==== + +[[mongo-template.query.distinct]] +== Query Distinct Values + +MongoDB provides an operation to obtain distinct values for a single field by using a query from the resulting documents. +Resulting values are not required to have the same data type, nor is the feature limited to simple types. +For retrieval, the actual result type does matter for the sake of conversion and typing. The following example shows how to query for distinct values: + +.Retrieving distinct values +==== +[source,java] +---- +template.query(Person.class) <1> + .distinct("lastname") <2> + .all(); <3> +---- +<1> Query the `Person` collection. +<2> Select distinct values of the `lastname` field. The field name is mapped according to the domain types property declaration, taking potential `@Field` annotations into account. +<3> Retrieve all distinct values as a `List` of `Object` (due to no explicit result type being specified). +==== + +Retrieving distinct values into a `Collection` of `Object` is the most flexible way, as it tries to determine the property value of the domain type and convert results to the desired type or mapping `Document` structures. + +Sometimes, when all values of the desired field are fixed to a certain type, it is more convenient to directly obtain a correctly typed `Collection`, as shown in the following example: + +.Retrieving strongly typed distinct values +==== +[source,java] +---- +template.query(Person.class) <1> + .distinct("lastname") <2> + .as(String.class) <3> + .all(); <4> +---- +<1> Query the collection of `Person`. +<2> Select distinct values of the `lastname` field. The fieldname is mapped according to the domain types property declaration, taking potential `@Field` annotations into account. +<3> Retrieved values are converted into the desired target type -- in this case, `String`. It is also possible to map the values to a more complex type if the stored field contains a document. +<4> Retrieve all distinct values as a `List` of `String`. If the type cannot be converted into the desired target type, this method throws a `DataAccessException`. +==== + +[[mongo.geospatial]] ++= GeoSpatial Queries + +MongoDB supports GeoSpatial queries through the use of operators such as `$near`, `$within`, `geoWithin`, and `$nearSphere`. Methods specific to geospatial queries are available on the `Criteria` class. There are also a few shape classes (`Box`, `Circle`, and `Point`) that are used in conjunction with geospatial related `Criteria` methods. + +NOTE: Using GeoSpatial queries requires attention when used within MongoDB transactions, see xref:mongodb/client-session-transactions.adoc#mongo.transactions.behavior[Special behavior inside transactions]. + +To understand how to perform GeoSpatial queries, consider the following `Venue` class (taken from the integration tests and relying on the rich `MappingMongoConverter`): + +.Venue.java +[%collapsible] +==== +[source,java] +---- +@Document(collection="newyork") +public class Venue { + + @Id + private String id; + private String name; + private double[] location; + + @PersistenceConstructor + Venue(String name, double[] location) { + super(); + this.name = name; + this.location = location; + } + + public Venue(String name, double x, double y) { + super(); + this.name = name; + this.location = new double[] { x, y }; + } + + public String getName() { + return name; + } + + public double[] getLocation() { + return location; + } + + @Override + public String toString() { + return "Venue [id=" + id + ", name=" + name + ", location=" + + Arrays.toString(location) + "]"; + } +} +---- +==== + +To find locations within a `Circle`, you can use the following query: + +[source,java] +---- +Circle circle = new Circle(-73.99171, 40.738868, 0.01); +List venues = + template.find(new Query(Criteria.where("location").within(circle)), Venue.class); +---- + +To find venues within a `Circle` using spherical coordinates, you can use the following query: + +[source,java] +---- +Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); +List venues = + template.find(new Query(Criteria.where("location").withinSphere(circle)), Venue.class); +---- + +To find venues within a `Box`, you can use the following query: + +[source,java] +---- +//lower-left then upper-right +Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); +List venues = + template.find(new Query(Criteria.where("location").within(box)), Venue.class); +---- + +To find venues near a `Point`, you can use the following queries: + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query(Criteria.where("location").near(point).maxDistance(0.01)), Venue.class); +---- + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query(Criteria.where("location").near(point).minDistance(0.01).maxDistance(100)), Venue.class); +---- + +To find venues near a `Point` using spherical coordinates, you can use the following query: + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query( + Criteria.where("location").nearSphere(point).maxDistance(0.003712240453784)), + Venue.class); +---- + +[[mongo.geo-near]] +== Geo-near Queries + +[WARNING] +==== +*Changed in 2.2!* + +https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the +`geoNear` command which had been previously used to run the `NearQuery`. + +Spring Data MongoDB 2.2 `MongoOperations#geoNear` uses the `$geoNear` https://docs.mongodb.com/manual/reference/operator/aggregation/geoNear/[aggregation] +instead of the `geoNear` command to run a `NearQuery`. + +The calculated distance (the `dis` when using a geoNear command) previously returned within a wrapper type now is embedded +into the resulting document. +If the given domain type already contains a property with that name, the calculated distance +is named `calculated-distance` with a potentially random postfix. + +Target types may contain a property named after the returned distance to (additionally) read it back directly into the domain type as shown below. + +[source,java] +---- +GeoResults = template.query(Venue.class) <1> + .as(VenueWithDistanceField.class) <2> + .near(NearQuery.near(new GeoJsonPoint(-73.99, 40.73), KILOMETERS)) + .all(); +---- +<1> Domain type used to identify the target collection and potential query mapping. +<2> Target type containing a `dis` field of type `Number`. +==== + +MongoDB supports querying the database for geo locations and calculating the distance from a given origin at the same time. With geo-near queries, you can express queries such as "find all restaurants in the surrounding 10 miles". To let you do so, `MongoOperations` provides `geoNear(…)` methods that take a `NearQuery` as an argument (as well as the already familiar entity type and collection), as shown in the following example: + +[source,java] +---- +Point location = new Point(-73.99171, 40.738868); +NearQuery query = NearQuery.near(location).maxDistance(new Distance(10, Metrics.MILES)); + +GeoResults = operations.geoNear(query, Restaurant.class); +---- + +We use the `NearQuery` builder API to set up a query to return all `Restaurant` instances surrounding the given `Point` out to 10 miles. +The `Metrics` enum used here actually implements an interface so that other metrics could be plugged into a distance as well. +A `Metric` is backed by a multiplier to transform the distance value of the given metric into native distances. +The sample shown here would consider the 10 to be miles. Using one of the built-in metrics (miles and kilometers) automatically triggers the spherical flag to be set on the query. +If you want to avoid that, pass plain `double` values into `maxDistance(…)`. +For more information, see the Javadoc of javadoc:org.springframework.data.mongodb.core.query.NearQuery[] and `Distance`. + +The geo-near operations return a `GeoResults` wrapper object that encapsulates `GeoResult` instances. +Wrapping `GeoResults` allows accessing the average distance of all results. +A single `GeoResult` object carries the entity found plus its distance from the origin. + +[[mongo.geo-json]] +== GeoJSON Support + +MongoDB supports https://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. See the https://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions. + +[[mongo.geo-json.domain.classes]] +== GeoJSON Types in Domain Classes + +Usage of https://geojson.org/[GeoJSON] types in domain classes is straightforward. The `org.springframework.data.mongodb.core.geo` package contains types such as `GeoJsonPoint`, `GeoJsonPolygon`, and others. These types are extend the existing `org.springframework.data.geo` types. The following example uses a javadoc:org.springframework.data.mongodb.core.geo.GeoJsonPoint[]: + +==== +[source,java] +---- +public class Store { + + String id; + + /** + * { "type" : "Point", "coordinates" : [ x, y ] } + */ + GeoJsonPoint location; +} +---- +==== + +[TIP] +==== +If the `coordinates` of a GeoJSON object represent _latitude_ and _longitude_ pairs, the _longitude_ goes first followed by _latitude_. + +`GeoJsonPoint` therefore treats `getX()` as _longitude_ and `getY()` as _latitude_. +==== + +[[mongo.geo-json.query-methods]] +== GeoJSON Types in Repository Query Methods + +Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query, as the following example shows: + +==== +[source,java] +---- +public interface StoreRepository extends CrudRepository { + + List findByLocationWithin(Polygon polygon); <1> + +} + +/* + * { + * "location": { + * "$geoWithin": { + * "$geometry": { + * "type": "Polygon", + * "coordinates": [ + * [ + * [-73.992514,40.758934], + * [-73.961138,40.760348], + * [-73.991658,40.730006], + * [-73.992514,40.758934] + * ] + * ] + * } + * } + * } + * } + */ +repo.findByLocationWithin( <2> + new GeoJsonPolygon( + new Point(-73.992514, 40.758934), + new Point(-73.961138, 40.760348), + new Point(-73.991658, 40.730006), + new Point(-73.992514, 40.758934))); <3> + +/* + * { + * "location" : { + * "$geoWithin" : { + * "$polygon" : [ [-73.992514,40.758934] , [-73.961138,40.760348] , [-73.991658,40.730006] ] + * } + * } + * } + */ +repo.findByLocationWithin( <4> + new Polygon( + new Point(-73.992514, 40.758934), + new Point(-73.961138, 40.760348), + new Point(-73.991658, 40.730006))); +---- +<1> Repository method definition using the commons type allows calling it with both the GeoJSON and the legacy format. +<2> Use GeoJSON type to make use of `$geometry` operator. +<3> Note that GeoJSON polygons need to define a closed ring. +<4> Use the legacy format `$polygon` operator. +==== + +[[mongo.geo-json.metrics]] +== Metrics and Distance calculation + +Then MongoDB `$geoNear` operator allows usage of a GeoJSON Point or legacy coordinate pairs. + +==== +[source,java] +---- +NearQuery.near(new Point(-73.99171, 40.738868)) +---- +[source,json] +---- +{ + "$geoNear": { + //... + "near": [-73.99171, 40.738868] + } +} +---- +==== +==== +[source,java] +---- +NearQuery.near(new GeoJsonPoint(-73.99171, 40.738868)) +---- +[source,json] +---- +{ + "$geoNear": { + //... + "near": { "type": "Point", "coordinates": [-73.99171, 40.738868] } + } +} + +---- +==== + +Though syntactically different the server is fine accepting both no matter what format the target Document within the collection +is using. + +WARNING: There is a huge difference in the distance calculation. Using the legacy format operates +upon _Radians_ on an Earth like sphere, whereas the GeoJSON format uses _Meters_. + +To avoid a serious headache make sure to set the `Metric` to the desired unit of measure which ensures the +distance to be calculated correctly. + +In other words: + +==== +Assume you've got 5 Documents like the ones below: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a5"), + "name" : "Penn Station", + "location" : { "type" : "Point", "coordinates" : [ -73.99408, 40.75057 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796ab"), + "name" : "Momofuku Milk Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.985839, 40.731698 ] } +} +---- +==== + +Fetching all Documents within a 400 Meter radius from `[-73.99171, 40.738868]` would look like this using +GeoJSON: + +.GeoNear with GeoJSON +==== +[source,json] +---- +{ + "$geoNear": { + "maxDistance": 400, <1> + "num": 10, + "near": { type: "Point", coordinates: [-73.99171, 40.738868] }, + "spherical":true, <2> + "key": "location", + "distanceField": "distance" + } +} +---- +Returning the following 3 Documents: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } + "distance" : 0.0 <3> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 69.3582262492474 <3> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 69.3582262492474 <3> +} +---- +<1> Maximum distance from center point in _Meters_. +<2> GeoJSON always operates upon a sphere. +<3> Distance from center point in _Meters_. +==== + +Now, when using legacy coordinate pairs one operates upon _Radians_ as discussed before. So we use `Metrics#KILOMETERS +when constructing the `$geoNear` command. The `Metric` makes sure the distance multiplier is set correctly. + +.GeoNear with Legacy Coordinate Pairs +==== +[source,json] +---- +{ + "$geoNear": { + "maxDistance": 0.0000627142377, <1> + "distanceMultiplier": 6378.137, <2> + "num": 10, + "near": [-73.99171, 40.738868], + "spherical":true, <3> + "key": "location", + "distanceField": "distance" + } +} +---- +Returning the 3 Documents just like the GeoJSON variant: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } + "distance" : 0.0 <4> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 0.0693586286032982 <4> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 0.0693586286032982 <4> +} +---- +<1> Maximum distance from center point in _Radians_. +<2> The distance multiplier so we get _Kilometers_ as resulting distance. +<3> Make sure we operate on a 2d_sphere index. +<4> Distance from center point in _Kilometers_ - take it times 1000 to match _Meters_ of the GeoJSON variant. +==== + +[[mongo.textsearch]] +== Full-text Search + +Since version 2.6 of MongoDB, you can run full-text queries by using the `$text` operator. Methods and operations specific to full-text queries are available in `TextQuery` and `TextCriteria`. When doing full text search, see the https://docs.mongodb.org/manual/reference/operator/query/text/#behavior[MongoDB reference] for its behavior and limitations. + +Before you can actually use full-text search, you must set up the search index correctly. +See xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Index] for more detail on how to create index structures. +The following example shows how to set up a full-text search: + +[source,javascript] +---- +db.foo.createIndex( +{ + title : "text", + content : "text" +}, +{ + weights : { + title : 3 + } +} +) +---- + +A query searching for `coffee cake` can be defined and run as follows: + +.Full Text Query +==== +[source,java] +---- +Query query = TextQuery + .queryText(new TextCriteria().matchingAny("coffee", "cake")); + +List page = template.find(query, Document.class); +---- +==== + +To sort results by relevance according to the `weights` use `TextQuery.sortByScore`. + +.Full Text Query - Sort by Score +==== +[source,java] +---- +Query query = TextQuery + .queryText(new TextCriteria().matchingAny("coffee", "cake")) + .sortByScore() <1> + .includeScore(); <2> + +List page = template.find(query, Document.class); +---- +<1> Use the score property for sorting results by relevance which triggers `.sort({'score': {'$meta': 'textScore'}})`. +<2> Use `TextQuery.includeScore()` to include the calculated relevance in the resulting `Document`. +==== + +You can exclude search terms by prefixing the term with `-` or by using `notMatching`, as shown in the following example (note that the two lines have the same effect and are thus redundant): + +[source,java] +---- +// search for 'coffee' and not 'cake' +TextQuery.queryText(new TextCriteria().matching("coffee").matching("-cake")); +TextQuery.queryText(new TextCriteria().matching("coffee").notMatching("cake")); +---- + +`TextCriteria.matching` takes the provided term as is. +Therefore, you can define phrases by putting them between double quotation marks (for example, `\"coffee cake\")` or using by `TextCriteria.phrase.` +The following example shows both ways of defining a phrase: + +[source,java] +---- +// search for phrase 'coffee cake' +TextQuery.queryText(new TextCriteria().matching("\"coffee cake\"")); +TextQuery.queryText(new TextCriteria().phrase("coffee cake")); +---- + +You can set flags for `$caseSensitive` and `$diacriticSensitive` by using the corresponding methods on `TextCriteria`. +Note that these two optional flags have been introduced in MongoDB 3.2 and are not included in the query unless explicitly set. + +[[mongo.query-by-example]] +== Query by Example + +xref:mongodb/repositories/query-methods.adoc#query-by-example[Query by Example] can be used on the Template API level run example queries. + +The following snipped shows how to query by example: + +.Typed Example Query +[source,java] +---- +Person probe = new Person(); +probe.lastname = "stark"; + +Example example = Example.of(probe); + +Query query = new Query(new Criteria().alike(example)); +List result = template.find(query, Person.class); +---- + +By default `Example` is strictly typed. This means that the mapped query has an included type match, restricting it to probe assignable types. +For example, when sticking with the default type key (`_class`), the query has restrictions such as (`_class : { $in : [ com.acme.Person] }`). + +By using the `UntypedExampleMatcher`, it is possible to bypass the default behavior and skip the type restriction. So, as long as field names match, nearly any domain type can be used as the probe for creating the reference, as the following example shows: + +.Untyped Example Query +==== +[source, java] +---- + +class JustAnArbitraryClassWithMatchingFieldName { + @Field("lastname") String value; +} + +JustAnArbitraryClassWithMatchingFieldNames probe = new JustAnArbitraryClassWithMatchingFieldNames(); +probe.value = "stark"; + +Example example = Example.of(probe, UntypedExampleMatcher.matching()); + +Query query = new Query(new Criteria().alike(example)); +List result = template.find(query, Person.class); +---- +==== + +[NOTE] +==== +When including `null` values in the `ExampleSpec`, Spring Data Mongo uses embedded document matching instead of dot notation property matching. +Doing so forces exact document matching for all property values and the property order in the embedded document. +==== + +[NOTE] +==== +`UntypedExampleMatcher` is likely the right choice for you if you are storing different entities within a single collection or opted out of writing type hints. + +Also, keep in mind that using `@TypeAlias` requires eager initialization of the `MappingContext`. To do so, configure `initialEntitySet` to to ensure proper alias resolution for read operations. +==== + +Spring Data MongoDB provides support for different matching options: + +.`StringMatcher` options +[%collapsible] +==== +[cols="1,2", options="header"] +|=== +| Matching +| Logical result + +| `DEFAULT` (case-sensitive) +| `{"firstname" : firstname}` + +| `DEFAULT` (case-insensitive) +| `{"firstname" : { $regex: firstname, $options: 'i'}}` + +| `EXACT` (case-sensitive) +| `{"firstname" : { $regex: /^firstname$/}}` + +| `EXACT` (case-insensitive) +| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}` + +| `STARTING` (case-sensitive) +| `{"firstname" : { $regex: /^firstname/}}` + +| `STARTING` (case-insensitive) +| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}` + +| `ENDING` (case-sensitive) +| `{"firstname" : { $regex: /firstname$/}}` + +| `ENDING` (case-insensitive) +| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}` + +| `CONTAINING` (case-sensitive) +| `{"firstname" : { $regex: /.\*firstname.*/}}` + +| `CONTAINING` (case-insensitive) +| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}` + +| `REGEX` (case-sensitive) +| `{"firstname" : { $regex: /firstname/}}` + +| `REGEX` (case-insensitive) +| `{"firstname" : { $regex: /firstname/, $options: 'i'}}` + +|=== +==== + +[[mongo.jsonSchema.query]] +== Query a collection for matching JSON Schema + +You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows: + +.Query for Documents matching a `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.find(query(matchingDocumentStructure(schema)), Person.class); +---- +==== + +Please refer to the xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] section to learn more about the schema support in Spring Data MongoDB. + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc new file mode 100644 index 0000000000..6356a46265 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$value-expressions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/observability/conventions.adoc b/src/main/antora/modules/ROOT/pages/observability/conventions.adoc new file mode 100644 index 0000000000..6c18493fd0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/conventions.adoc @@ -0,0 +1,11 @@ +[[observability-conventions]] += Conventions + +Below you can find a list of all `GlobalObservationConvention` and `ObservationConvention` declared by this project. + +.ObservationConvention implementations +|=== +|ObservationConvention Class Name | Applicable ObservationContext Class Name +|`org.springframework.data.mongodb.observability.DefaultMongoHandlerObservationConvention`|`MongoHandlerContext` +|`org.springframework.data.mongodb.observability.MongoHandlerObservationConvention`|`MongoHandlerContext` +|=== diff --git a/src/main/antora/modules/ROOT/pages/observability/metrics.adoc b/src/main/antora/modules/ROOT/pages/observability/metrics.adoc new file mode 100644 index 0000000000..8b5ce92a64 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/metrics.adoc @@ -0,0 +1,39 @@ +[[observability-metrics]] += Metrics + +Below you can find a list of all metrics declared by this project. + +[[observability-metrics-mongodb-command-observation]] +== Mongodb Command Observation + +____ +Timer created around a MongoDB command execution. +____ + +**Metric name** `spring.data.mongodb.command`. **Type** `timer`. + +**Metric name** `spring.data.mongodb.command.active`. **Type** `long task timer`. + +IMPORTANT: KeyValues that are added after starting the Observation might be missing from the *.active metrics. + +IMPORTANT: Micrometer internally uses `nanoseconds` for the baseunit. However, each backend determines the actual baseunit. (i.e. Prometheus uses seconds) + +Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`. + +.Low cardinality Keys +[cols="a,a"] +|=== +|Name | Description +|`db.connection_string` _(required)_|MongoDB connection string. +|`db.mongodb.collection` _(required)_|MongoDB collection name. +|`db.name` _(required)_|MongoDB database name. +|`db.operation` _(required)_|MongoDB command value. +|`db.system` _(required)_|MongoDB database system. +|`db.user` _(required)_|MongoDB user. +|`net.peer.name` _(required)_|Name of the database host. +|`net.peer.port` _(required)_|Logical remote port number. +|`net.sock.peer.addr` _(required)_|Mongo peer address. +|`net.sock.peer.port` _(required)_|Mongo peer port. +|`net.transport` _(required)_|Network transport. +|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier. +|=== diff --git a/src/main/antora/modules/ROOT/pages/observability/observability.adoc b/src/main/antora/modules/ROOT/pages/observability/observability.adoc new file mode 100644 index 0000000000..8a9b0a1eeb --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/observability.adoc @@ -0,0 +1,42 @@ +:root-target: ../../../../target/ + +[[mongodb.observability]] += Observability + +Spring Data MongoDB currently has the most up-to-date code to support Observability in your MongoDB application. +These changes, however, haven't been picked up by Spring Boot (yet). +Until those changes are applied, if you wish to use Spring Data MongoDB's flavor of Observability, you must carry out the following steps. + +. First of all, you must opt into Spring Data MongoDB's configuration settings by customizing `MongoClientSettings` through either your `@SpringBootApplication` class or one of your configuration classes. ++ +.Registering MongoDB Micrometer customizer setup +==== +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer mongoMetricsSynchronousContextProvider(ObservationRegistry registry) { + return (clientSettingsBuilder) -> { + clientSettingsBuilder.contextProvider(ContextProviderFactory.create(registry)) + .addCommandListener(new MongoObservationCommandListener(registry)); + }; +} +---- +==== ++ +. Your project must include *Spring Boot Actuator*. +. Disable Spring Boot's autoconfigured MongoDB command listener and enable tracing manually by adding the following properties to your `application.properties` ++ +.Custom settings to apply +==== +[source] +---- +# Disable Spring Boot's autoconfigured tracing +management.metrics.mongo.command.enabled=false +# Enable it manually +management.tracing.enabled=true +---- +Be sure to add any other relevant settings needed to configure the tracer you are using based upon Micrometer's reference documentation. +==== + +This should do it! You are now running with Spring Data MongoDB's usage of Spring Observability's `Observation` API. +See also https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/database/#mongodb[OpenTelemetry Semantic Conventions] for further reference. diff --git a/src/main/antora/modules/ROOT/pages/observability/spans.adoc b/src/main/antora/modules/ROOT/pages/observability/spans.adoc new file mode 100644 index 0000000000..8e79d33a86 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/spans.adoc @@ -0,0 +1,30 @@ +[[observability-spans]] += Spans + +Below you can find a list of all spans declared by this project. + +[[observability-spans-mongodb-command-observation]] +== Mongodb Command Observation Span + +> Timer created around a MongoDB command execution. + +**Span name** `spring.data.mongodb.command`. + +Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`. + +.Tag Keys +|=== +|Name | Description +|`db.connection_string` _(required)_|MongoDB connection string. +|`db.mongodb.collection` _(required)_|MongoDB collection name. +|`db.name` _(required)_|MongoDB database name. +|`db.operation` _(required)_|MongoDB command value. +|`db.system` _(required)_|MongoDB database system. +|`db.user` _(required)_|MongoDB user. +|`net.peer.name` _(required)_|Name of the database host. +|`net.peer.port` _(required)_|Logical remote port number. +|`net.sock.peer.addr` _(required)_|Mongo peer address. +|`net.sock.peer.port` _(required)_|Mongo peer port. +|`net.transport` _(required)_|Network transport. +|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier. +|=== diff --git a/src/main/antora/modules/ROOT/pages/preface.adoc b/src/main/antora/modules/ROOT/pages/preface.adoc new file mode 100644 index 0000000000..d52509c81e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/preface.adoc @@ -0,0 +1,105 @@ +[[requirements]] += Requirements + +The Spring Data MongoDB 4.x binaries require JDK level 17 and above and https://spring.io/docs[Spring Framework] {springVersion} and above. + +In terms of database and driver, you need at least version 4.x of https://www.mongodb.org/[MongoDB] and a compatible MongoDB Java Driver (5.2.x). + +[[compatibility.matrix]] +== Compatibility Matrix + +The following compatibility matrix summarizes Spring Data versions to MongoDB driver/database versions. +Database versions show server generations that pass the Spring Data test suite. +You can use newer server versions unless your application uses functionality that is affected by xref:preface.adoc#compatibility.changes[changes in the MongoDB server]. +See also the https://www.mongodb.com/docs/drivers/java/sync/current/compatibility/[official MongoDB driver compatibility matrix] for driver- and server version compatibility. + +==== +[cols="h,m,m,m", options="header"] +|=== + +|Spring Data Release Train +|Spring Data MongoDB +|Driver Version +|Database Versions + +|2025.0 +|4.5.x +|5.3.x +|6.x to 8.x + +|2024.1 +|4.4.x +|5.2.x +|4.4.x to 8.x + +|2024.0 +|4.3.x +|4.11.x & 5.x +|4.4.x to 7.x + +|2023.1 +|4.2.x +|4.9.x +|4.4.x to 7.x + +|2023.0 (*) +|4.1.x +|4.9.x +|4.4.x to 6.x + +|2022.0 (*) +|4.0.x +|4.7.x +|4.4.x to 6.x + +|2021.2 (*) +|3.4.x +|4.6.x +|4.4.x to 5.0.x + +|2021.1 (*) +|3.3.x +|4.4.x +|4.4.x to 5.0.x + +|2021.0 (*) +|3.2.x +|4.1.x +|4.4.x + +|2020.0 (*) +|3.1.x +|4.1.x +|4.4.x + +|Neumann (*) +|3.0.x +|4.0.x +|4.4.x + +|Moore (*) +|2.2.x +|3.11.x/Reactive Streams 1.12.x +|4.2.x + +|Lovelace (*) +|2.1.x +|3.8.x/Reactive Streams 1.9.x +|4.0.x + +|=== +(*) https://spring.io/projects/spring-data-mongodb#support[End of OSS Support] +==== + +[[compatibility.changes]] +[[compatibility.changes-4.4]] +=== Relevant Changes in MongoDB 4.4 + +* Fields list must not contain text search score property when no `$text` criteria present. See also https://docs.mongodb.com/manual/reference/operator/query/text/[`$text` operator] +* Sort must not be an empty document when running map reduce. + +[[compatibility.changes-4.2]] +=== Relevant Changes in MongoDB 4.2 + +* Removal of `geoNear` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-geonear-command[Removal of `geoNear`] +* Removal of `eval` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-eval-command[Removal of `eval`] diff --git a/src/main/antora/modules/ROOT/pages/repositories.adoc b/src/main/antora/modules/ROOT/pages/repositories.adoc new file mode 100644 index 0000000000..01b79f6c52 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories.adoc @@ -0,0 +1,8 @@ +[[mongodb.repositories]] += Repositories +:page-section-summary-toc: 1 + +This chapter explains the basic foundations of Spring Data repositories and MongoDB specifics. +Before continuing to the MongoDB specifics, make sure you have a sound understanding of the basic concepts. + +The goal of the Spring Data repository abstraction is to significantly reduce the amount of boilerplate code required to implement data access layers for various persistence stores. diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc new file mode 100644 index 0000000000..1a4af7a60b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc @@ -0,0 +1,12 @@ +include::{commons}@data-commons::page$repositories/core-concepts.adoc[] + +[[mongodb.entity-persistence.state-detection-strategies]] +include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1] + +[NOTE] +==== +Cassandra provides no means to generate identifiers upon inserting data. +As consequence, entities must be associated with identifier values. +Spring Data defaults to identifier inspection to determine whether an entity is new. +If you want to use xref:mongodb/auditing.adoc[auditing] make sure to either use xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] or implement `Persistable` for proper entity state detection. +==== diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc new file mode 100644 index 0000000000..f84313e9da --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-domain-events.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc new file mode 100644 index 0000000000..75dcea1e4f --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc @@ -0,0 +1,242 @@ +[[core.extensions]] += Spring Data Extensions + +This section documents a set of Spring Data extensions that enable Spring Data usage in a variety of contexts. +Currently, most of the integration is targeted towards Spring MVC. + +include::{commons}@data-commons::page$repositories/core-extensions-querydsl.adoc[leveloffset=1] + +[[mongodb.repositories.queries.type-safe]] +=== Type-safe Query Methods with Querydsl + +MongoDB repository and its reactive counterpart integrates with the http://www.querydsl.com/[Querydsl] project, which provides a way to perform type-safe queries. + +[quote,Querydsl Team] +Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API. + +It provides the following features: + +* Code completion in the IDE (all properties, methods, and operations can be expanded in your favorite Java IDE). +* Almost no syntactically invalid queries allowed (type-safe on all levels). +* Domain types and properties can be referenced safely -- no strings involved! +* Adapts better to refactoring changes in domain types. +* Incremental query definition is easier. + +See the http://www.querydsl.com/static/querydsl/latest/reference/html/[Querydsl documentation] for how to bootstrap your environment for APT-based code generation using Maven or Ant. + +QueryDSL lets you write queries such as the following: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +QPerson person = QPerson.person; +List result = repository.findAll(person.address.zipCode.eq("C0123")); + +Page page = repository.findAll(person.lastname.contains("a"), + PageRequest.of(0, 2, Direction.ASC, "lastname")); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +QPerson person = QPerson.person; + +Flux result = repository.findAll(person.address.zipCode.eq("C0123")); +---- +====== + +`QPerson` is a class that is generated by the Java annotation processor. +See xref:#mongodb.repositories.queries.type-safe.apt[Setting up Annotation Processing] for how to set up Annotation Processing with your Build System. +It is a `Predicate` that lets you write type-safe queries. +Notice that there are no strings in the query other than the `C0123` value. + +You can use the generated `Predicate` class by using the `QuerydslPredicateExecutor` / `ReactiveQuerydslPredicateExecutor` interface, which the following listing shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface QuerydslPredicateExecutor { + + Optional findOne(Predicate predicate); + + List findAll(Predicate predicate); + + List findAll(Predicate predicate, Sort sort); + + List findAll(Predicate predicate, OrderSpecifier... orders); + + Page findAll(Predicate predicate, Pageable pageable); + + List findAll(OrderSpecifier... orders); + + long count(Predicate predicate); + + boolean exists(Predicate predicate); + + R findBy(Predicate predicate, Function, R> queryFunction); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface ReactiveQuerydslPredicateExecutor { + + Mono findOne(Predicate predicate); + + Flux findAll(Predicate predicate); + + Flux findAll(Predicate predicate, Sort sort); + + Flux findAll(Predicate predicate, OrderSpecifier... orders); + + Flux findAll(OrderSpecifier... orders); + + Mono count(Predicate predicate); + + Mono exists(Predicate predicate); + + > P findBy(Predicate predicate, + Function, P> queryFunction); +} +---- +====== + +To use this in your repository implementation, add it to the list of repository interfaces from which your interface inherits, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +interface PersonRepository extends MongoRepository, QuerydslPredicateExecutor { + + // additional query methods go here +} +---- + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +interface PersonRepository extends ReactiveMongoRepository, ReactiveQuerydslPredicateExecutor { + + // additional query methods go here +} +---- + +NOTE: Please note that joins (DBRef's) are not supported with Reactive MongoDB support. +==== +====== + +[[mongodb.repositories.queries.type-safe.apt]] +=== Setting up Annotation Processing + +To use Querydsl with Spring Data MongoDB, you need to set up annotation processing in your build system that generates the `Q` classes. +While you could write the `Q` classes by hand, it is recommended to use the Querydsl annotation processor to generate them for you to keep your `Q` classes in sync with your domain model. + +Spring Data MongoDB ships with an annotation processor javadoc:org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor[] that isn't registered by default. +Typically, annotation processors are registered through Java's service loader via `META-INF/services/javax.annotation.processing.Processor` that also activates these once you have them on the class path. +Most Spring Data users do not use Querydsl, so it does not make sense to require additional mandatory dependencies for projects that would not benefit from Querydsl. +Hence, you need to activate annotation processing in your build system. + +The following example shows how to set up annotation processing by mentioning dependencies and compiler config changes in Maven and Gradle: + +[tabs] +====== +Maven:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="primary"] +---- + + + org.springframework.data + spring-data-mongodb + + + + com.querydsl + querydsl-mongodb + ${querydslVersion} + + + + + org.mongodb + mongo-java-driver + + + + + + com.querydsl + querydsl-apt + ${querydslVersion} + jakarta + provided + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + + + + + target/generated-test-sources + target/generated-sources + + + + +---- + +Gradle:: ++ +==== +[source,groovy,indent=0,subs="verbatim,quotes",role="secondary"] +---- +dependencies { + implementation 'com.querydsl:querydsl-mongodb:${querydslVersion}' + + annotationProcessor 'com.querydsl:querydsl-apt:${querydslVersion}:jakarta' + annotationProcessor 'org.springframework.data:spring-data-mongodb' + + testAnnotationProcessor 'com.querydsl:querydsl-apt:${querydslVersion}:jakarta' + testAnnotationProcessor 'org.springframework.data:spring-data-mongodb' +} + +tasks.withType(JavaCompile).configureEach { + options.compilerArgs += [ + "-processor", + "org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor"] +} +---- +==== +====== + +Note that the setup above shows the simplest usage omitting any other options or dependencies that your project might require. +This way of configuring annotation processing disables Java's annotation processor scanning because MongoDB requires specifying `-processor` by class name. +If you're using other annotation processors, you need to add them to the list of `-processor`/`annotationProcessors` as well. + +include::{commons}@data-commons::page$repositories/core-extensions-web.adoc[leveloffset=1] + +include::{commons}@data-commons::page$repositories/core-extensions-populators.adoc[leveloffset=1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc new file mode 100644 index 0000000000..2ae01801b1 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/create-instances.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc new file mode 100644 index 0000000000..c7615191a6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/custom-implementations.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/definition.adoc b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc new file mode 100644 index 0000000000..bd65a8af83 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/definition.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc new file mode 100644 index 0000000000..081bac9f61 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/null-handling.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/projections.adoc b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc new file mode 100644 index 0000000000..9302203c56 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc @@ -0,0 +1,2 @@ +[[mongodb.projections]] +include::{commons}@data-commons::page$repositories/projections.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc new file mode 100644 index 0000000000..6c07119c72 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc @@ -0,0 +1,25 @@ +include::{commons}@data-commons::query-by-example.adoc[] + +[[query-by-example.running]] +== Running an Example + +The following example shows how to query by example when using a repository (of `Person` objects, in this case): + +.Query by Example using a repository +==== +[source, java] +---- +public interface PersonRepository extends QueryByExampleExecutor { + +} + +public class PersonService { + + @Autowired PersonRepository personRepository; + + public List findPeople(Person probe) { + return personRepository.findAll(Example.of(probe)); + } +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc new file mode 100644 index 0000000000..e495eddc6b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-keywords-reference.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc new file mode 100644 index 0000000000..dfe4814955 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-methods-details.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc new file mode 100644 index 0000000000..a73c3201d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-return-types-reference.adoc[] diff --git a/src/main/antora/resources/antora-resources/antora.yml b/src/main/antora/resources/antora-resources/antora.yml new file mode 100644 index 0000000000..857dc03fbe --- /dev/null +++ b/src/main/antora/resources/antora-resources/antora.yml @@ -0,0 +1,23 @@ +version: ${antora-component.version} +prerelease: ${antora-component.prerelease} + +asciidoc: + attributes: + version: ${project.version} + copyright-year: ${current.year} + springversionshort: ${spring.short} + springversion: ${spring} + attribute-missing: 'warn' + commons: ${springdata.commons.docs} + include-xml-namespaces: false + spring-data-commons-docs-url: https://docs.spring.io/spring-data/commons/reference + spring-data-commons-javadoc-base: https://docs.spring.io/spring-data/commons/docs/${springdata.commons}/api/ + springdocsurl: https://docs.spring.io/spring-framework/reference/{springversionshort} + springjavadocurl: https://docs.spring.io/spring-framework/docs/${spring}/javadoc-api + spring-framework-docs: '{springdocsurl}' + spring-framework-javadoc: '{springjavadocurl}' + springhateoasversion: ${spring-hateoas} + releasetrainversion: ${releasetrain} + reactor: ${reactor} + mongoversion: ${mongo} + store: Mongo diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc deleted file mode 100644 index aee79f8aed..0000000000 --- a/src/main/asciidoc/index.adoc +++ /dev/null @@ -1,48 +0,0 @@ -= Spring Data MongoDB - Reference Documentation -Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch -:revnumber: {version} -:revdate: {localdate} -:toc: -:toc-placement!: -:spring-data-commons-docs: ../../../../spring-data-commons/src/main/asciidoc - -(C) 2008-2015 The original authors. - -NOTE: _Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically._ - -toc::[] - -include::preface.adoc[] - -:leveloffset: +1 -include::new-features.adoc[] -include::{spring-data-commons-docs}/dependencies.adoc[] -include::{spring-data-commons-docs}/repositories.adoc[] -:leveloffset: -1 - -[[reference]] -= Reference Documentation - -:leveloffset: +1 -include::reference/introduction.adoc[] -include::reference/mongodb.adoc[] -include::reference/mongo-repositories.adoc[] -include::{spring-data-commons-docs}/auditing.adoc[] -include::reference/mongo-auditing.adoc[] -include::reference/mapping.adoc[] -include::reference/cross-store.adoc[] -include::reference/logging.adoc[] -include::reference/jmx.adoc[] -include::reference/mongo-3.adoc[] -:leveloffset: -1 - -[[appendix]] -= Appendix - -:numbered!: -:leveloffset: +1 -include::{spring-data-commons-docs}/repository-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[] -:leveloffset: -1 diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc deleted file mode 100644 index 50df5db996..0000000000 --- a/src/main/asciidoc/new-features.adoc +++ /dev/null @@ -1,34 +0,0 @@ -[[new-features]] -= New & Noteworthy - -[[new-features.1-9-0]] -== What's new in Spring Data MongoDB 1.9 -* The following annotations have been enabled to build own, composed annotations: `@Document`, `@Id`, `@Field`, `@Indexed`, `@CompoundIndex`, `@GeoSpatialIndexed`, `@TextIndexed`, `@Query`, `@Meta`. -* Support for <> in repository query methods. -* Support for <>. -* Out-of-the-box support for `java.util.Currency` in object mapping. -* Add support for the bulk operations introduced in MongoDB 2.6. -* Upgrade to Querydsl 4. -* Assert compatibility with MongoDB 3.0 and MongoDB Java Driver 3.2 (see: <>). - -[[new-features.1-8-0]] -== What's new in Spring Data MongoDB 1.8 - -* `Criteria` offers support for creating `$geoIntersects`. -* Support http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/expressions.html[SpEL expressions] in `@Query`. -* `MongoMappingEvents` expose the collection name they are issued for. -* Improved support for ``. -* Improved index creation failure error message. - -[[new-features.1-7-0]] -== What's new in Spring Data MongoDB 1.7 - -* Assert compatibility with MongoDB 3.0 and MongoDB Java Driver 3-beta3 (see: <>). -* Support JSR-310 and ThreeTen back-port date/time types. -* Allow `Stream` as query method return type (see: <>). -* Added http://geojson.org/[GeoJSON] support in both domain types and queries (see: <>). -* `QueryDslPredicateExcecutor` now supports `findAll(OrderSpecifier… orders)`. -* Support calling JavaScript functions via <>. -* Improve support for `CONTAINS` keyword on collection like properties. -* Support for `$bit`, `$mul` and `$position` operators to `Update`. - diff --git a/src/main/asciidoc/preface.adoc b/src/main/asciidoc/preface.adoc deleted file mode 100644 index 38a3279e62..0000000000 --- a/src/main/asciidoc/preface.adoc +++ /dev/null @@ -1,59 +0,0 @@ -[[preface]] -= Preface - -The Spring Data MongoDB project applies core Spring concepts to the development of solutions using the MongoDB document style data store. We provide a "template" as a high-level abstraction for storing and querying documents. You will notice similarities to the JDBC support in the Spring Framework. - -This document is the reference guide for Spring Data - Document Support. It explains Document module concepts and semantics and the syntax for various stores namespaces. - -This section provides some basic introduction to Spring and Document database. The rest of the document refers only to Spring Data Document features and assumes the user is familiar with document databases such as MongoDB and CouchDB as well as Spring concepts. - -[[get-started:first-steps:spring]] -== Knowing Spring -Spring Data uses Spring framework's http://docs.spring.io/spring/docs/4.2.x/spring-framework-reference/html/spring-core.html[core] functionality, such as the http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/beans.html[IoC] container, http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/validation.html#core-convert[type conversion system], http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/expressions.html[expression language], http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/jmx.html[JMX integration], and portable http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/dao.html#dao-exceptions[DAO exception hierarchy]. While it is not important to know the Spring APIs, understanding the concepts behind them is. At a minimum, the idea behind IoC should be familiar for whatever IoC container you choose to use. - -The core functionality of the MongoDB and CouchDB support can be used directly, with no need to invoke the IoC services of the Spring Container. This is much like `JdbcTemplate` which can be used 'standalone' without any other services of the Spring container. To leverage all the features of Spring Data document, such as the repository support, you will need to configure some parts of the library using Spring. - -To learn more about Spring, you can refer to the comprehensive (and sometimes disarming) documentation that explains in detail the Spring Framework. There are a lot of articles, blog entries and books on the matter - take a look at the Spring framework http://spring.io/docs[home page ] for more information. - -[[get-started:first-steps:nosql]] -== Knowing NoSQL and Document databases -NoSQL stores have taken the storage world by storm. It is a vast domain with a plethora of solutions, terms and patterns (to make things worth even the term itself has multiple http://www.google.com/search?q=nosoql+acronym[meanings]). While some of the principles are common, it is crucial that the user is familiar to some degree with the stores supported by DATADOC. The best way to get acquainted to this solutions is to read their documentation and follow their examples - it usually doesn't take more then 5-10 minutes to go through them and if you are coming from an RDMBS-only background many times these exercises can be an eye opener. - -The jumping off ground for learning about MongoDB is http://www.mongodb.org/[www.mongodb.org]. Here is a list of other useful resources: - -* The http://docs.mongodb.org/manual/[manual] introduces MongoDB and contains links to getting started guides, reference documentation and tutorials. -* The http://try.mongodb.org/[online shell] provides a convenient way to interact with a MongoDB instance in combination with the online http://docs.mongodb.org/manual/tutorial/getting-started/[tutorial.] -* MongoDB http://docs.mongodb.org/ecosystem/drivers/java/[Java Language Center] -* Several http://www.mongodb.org/books[books] available for purchase -* Karl Seguin's online book: http://openmymind.net/mongodb.pdf[The Little MongoDB Book] - -[[requirements]] -== Requirements - -Spring Data MongoDB 1.x binaries requires JDK level 6.0 and above, and http://spring.io/docs[Spring Framework] {springVersion} and above. - -In terms of document stores, http://www.mongodb.org/[MongoDB] at least 2.6. - -== Additional Help Resources - -Learning a new framework is not always straight forward. In this section, we try to provide what we think is an easy to follow guide for starting with Spring Data Document module. However, if you encounter issues or you are just looking for an advice, feel free to use one of the links below: - -[[get-started:help]] -=== Support - -There are a few support options available: - -[[get-started:help:community]] -==== Community Forum - -Spring Data on Stackoverflow http://stackoverflow.com/questions/tagged/spring-data[Stackoverflow] is a tag for all Spring Data (not just Document) users to share information and help each other. Note that registration is needed *only* for posting. - -[[get-started:help:professional]] -==== Professional Support - -Professional, from-the-source support, with guaranteed response time, is available from http://pivotal.io/[Pivotal Sofware, Inc.], the company behind Spring Data and Spring. - -[[get-started:up-to-date]] -=== Following Development - -For information on the Spring Data Mongo source code repository, nightly builds and snapshot artifacts please see the http://projects.spring.io/spring-data-mongodb/[Spring Data Mongo homepage]. You can help make Spring Data best serve the needs of the Spring community by interacting with developers through the Community on http://stackoverflow.com/questions/tagged/spring-data[Stackoverflow]. To follow developer activity look for the mailing list information on the Spring Data Mongo homepage. If you encounter a bug or want to suggest an improvement, please create a ticket on the Spring Data issue https://jira.spring.io/browse/DATAMONGO[tracker]. To stay up to date with the latest news and announcements in the Spring eco system, subscribe to the Spring Community http://spring.io[Portal]. Lastly, you can follow the SpringSource Data http://spring.io/blog[blog ]or the project team on Twitter (http://twitter.com/SpringData[SpringData]). \ No newline at end of file diff --git a/src/main/asciidoc/reference/cross-store.adoc b/src/main/asciidoc/reference/cross-store.adoc deleted file mode 100644 index 01a5991616..0000000000 --- a/src/main/asciidoc/reference/cross-store.adoc +++ /dev/null @@ -1,250 +0,0 @@ -[[mongo.cross.store]] -= Cross Store support - -Sometimes you need to store data in multiple data stores and these data stores can be of different types. One might be relational while the other a document store. For this use case we have created a separate module in the MongoDB support that handles what we call cross-store support. The current implementation is based on JPA as the driver for the relational database and we allow select fields in the Entities to be stored in a Mongo database. In addition to allowing you to store your data in two stores we also coordinate persistence operations for the non-transactional MongoDB store with the transaction life-cycle for the relational database. - -[[mongodb_cross-store-configuration]] -== Cross Store Configuration - -Assuming that you have a working JPA application and would like to add some cross-store persistence for MongoDB. What do you have to add to your configuration? - -First of all you need to add a dependency on the module. Using Maven this is done by adding a dependency to your pom: - -.Example Maven pom.xml with spring-data-mongodb-cross-store dependency -==== -[source,xml] ----- - - 4.0.0 - - ... - - - - org.springframework.data - spring-data-mongodb-cross-store - ${spring.data.mongo.version} - - - ... - - ----- -==== - -Once this is done we need to enable AspectJ for the project. The cross-store support is implemented using AspectJ aspects so by enabling compile time AspectJ support the cross-store features will become available to your project. In Maven you would add an additional plugin to the section of the pom: - -.Example Maven pom.xml with AspectJ plugin enabled -==== -[source,xml] ----- - - 4.0.0 - - ... - - - - - … - - - org.codehaus.mojo - aspectj-maven-plugin - 1.0 - - - - org.aspectj - aspectjrt - ${aspectj.version} - - - org.aspectj - aspectjtools - ${aspectj.version} - - - - - - compile - test-compile - - - - - true - - - org.springframework - spring-aspects - - - org.springframework.data - spring-data-mongodb-cross-store - - - 1.6 - 1.6 - - - - ... - - - - -... - - ----- -==== - -Finally, you need to configure your project to use MongoDB and also configure the aspects that are used. The following XML snippet should be added to your application context: - -.Example application context with MongoDB and cross-store aspect support -==== -[source,xml] ----- - - - - ... - - - - - - - - - - - - - - - - - - - - - - ... - - ----- -==== - -[[mongodb_cross-store-application]] -== Writing the Cross Store Application - -We are assuming that you have a working JPA application so we will only cover the additional steps needed to persist part of your Entity in your Mongo database. First you need to identify the field you want persisted. It should be a domain class and follow the general rules for the Mongo mapping support covered in previous chapters. The field you want persisted in MongoDB should be annotated using the `@RelatedDocument` annotation. That is really all you need to do!. The cross-store aspects take care of the rest. This includes marking the field with `@Transient` so it won't be persisted using JPA, keeping track of any changes made to the field value and writing them to the database on successful transaction completion, loading the document from MongoDB the first time the value is used in your application. Here is an example of a simple Entity that has a field annotated with `@RelatedEntity`. - -.Example of Entity with @RelatedDocument -==== -[source,java] ----- -@Entity -public class Customer { - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Long id; - - private String firstName; - - private String lastName; - - @RelatedDocument - private SurveyInfo surveyInfo; - - // getters and setters omitted -} ----- -==== - -.Example of domain class to be stored as document -==== -[source,java] ----- -public class SurveyInfo { - - private Map questionsAndAnswers; - - public SurveyInfo() { - this.questionsAndAnswers = new HashMap(); - } - - public SurveyInfo(Map questionsAndAnswers) { - this.questionsAndAnswers = questionsAndAnswers; - } - - public Map getQuestionsAndAnswers() { - return questionsAndAnswers; - } - - public void setQuestionsAndAnswers(Map questionsAndAnswers) { - this.questionsAndAnswers = questionsAndAnswers; - } - - public SurveyInfo addQuestionAndAnswer(String question, String answer) { - this.questionsAndAnswers.put(question, answer); - return this; - } -} ----- -==== - -Once the SurveyInfo has been set on the Customer object above the MongoTemplate that was configured above is used to save the SurveyInfo along with some metadata about the JPA Entity is stored in a MongoDB collection named after the fully qualified name of the JPA Entity class. The following code: - -.Example of code using the JPA Entity configured for cross-store persistence -==== -[source,java] ----- -Customer customer = new Customer(); -customer.setFirstName("Sven"); -customer.setLastName("Olafsen"); -SurveyInfo surveyInfo = new SurveyInfo() - .addQuestionAndAnswer("age", "22") - .addQuestionAndAnswer("married", "Yes") - .addQuestionAndAnswer("citizenship", "Norwegian"); -customer.setSurveyInfo(surveyInfo); -customerRepository.save(customer); ----- -==== - -Executing the code above results in the following JSON document stored in MongoDB. - -.Example of JSON document stored in MongoDB -==== -[source,javascript] ----- -{ "_id" : ObjectId( "4d9e8b6e3c55287f87d4b79e" ), - "_entity_id" : 1, - "_entity_class" : "org.springframework.data.mongodb.examples.custsvc.domain.Customer", - "_entity_field_name" : "surveyInfo", - "questionsAndAnswers" : { "married" : "Yes", - "age" : "22", - "citizenship" : "Norwegian" }, - "_entity_field_class" : "org.springframework.data.mongodb.examples.custsvc.domain.SurveyInfo" } ----- -==== diff --git a/src/main/asciidoc/reference/introduction.adoc b/src/main/asciidoc/reference/introduction.adoc deleted file mode 100644 index d224c29d37..0000000000 --- a/src/main/asciidoc/reference/introduction.adoc +++ /dev/null @@ -1,11 +0,0 @@ -[[introduction]] -= Introduction - -== Document Structure - -This part of the reference documentation explains the core functionality offered by Spring Data Document. - -<> introduces the MongoDB module feature set. - -<> introduces the repository support for MongoDB. - diff --git a/src/main/asciidoc/reference/jmx.adoc b/src/main/asciidoc/reference/jmx.adoc deleted file mode 100644 index 9ebb6f5916..0000000000 --- a/src/main/asciidoc/reference/jmx.adoc +++ /dev/null @@ -1,64 +0,0 @@ -[[mongo.jmx]] -= JMX support - -The JMX support for MongoDB exposes the results of executing the 'serverStatus' command on the admin database for a single MongoDB server instance. It also exposes an administrative MBean, MongoAdmin which will let you perform administrative operations such as drop or create a database. The JMX features build upon the JMX feature set available in the Spring Framework. See http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/jmx.html[here ] for more details. - -[[mongodb:jmx-configuration]] -== MongoDB JMX Configuration - -Spring's Mongo namespace enables you to easily enable JMX functionality - -.XML schema to configure MongoDB -==== -[source,xml] ----- - - - - - - - - - - - - - - - - - - - - ----- -==== - -This will expose several MBeans - -* AssertMetrics -* BackgroundFlushingMetrics -* BtreeIndexCounters -* ConnectionMetrics -* GlobalLoclMetrics -* MemoryMetrics -* OperationCounters -* ServerInfo -* MongoAdmin - -This is shown below in a screenshot from JConsole - -image::jconsole.png[] \ No newline at end of file diff --git a/src/main/asciidoc/reference/logging.adoc b/src/main/asciidoc/reference/logging.adoc deleted file mode 100644 index 5cd3c5d401..0000000000 --- a/src/main/asciidoc/reference/logging.adoc +++ /dev/null @@ -1,33 +0,0 @@ -[[mongo.logging]] -= Logging support - -An appender for Log4j is provided in the maven module "spring-data-mongodb-log4j". Note, there is no dependency on other Spring Mongo modules, only the MongoDB driver. - -[[mongodb:logging-configuration]] -== MongoDB Log4j Configuration - -Here is an example configuration - -[source] ----- -log4j.rootCategory=INFO, stdout - -log4j.appender.stdout=org.springframework.data.document.mongodb.log4j.MongoLog4jAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - <%m>%n -log4j.appender.stdout.host = localhost -log4j.appender.stdout.port = 27017 -log4j.appender.stdout.database = logs -log4j.appender.stdout.collectionPattern = %X{year}%X{month} -log4j.appender.stdout.applicationId = my.application -log4j.appender.stdout.warnOrHigherWriteConcern = FSYNC_SAFE - -log4j.category.org.apache.activemq=ERROR -log4j.category.org.springframework.batch=DEBUG -log4j.category.org.springframework.data.document.mongodb=DEBUG -log4j.category.org.springframework.transaction=INFO ----- - -The important configuration to look at aside from host and port is the database and collectionPattern. The variables year, month, day and hour are available for you to use in forming a collection name. This is to support the common convention of grouping log information in a collection that corresponds to a specific time period, for example a collection per day. - -There is also an applicationId which is put into the stored message. The document stored from logging as the following keys: level, name, applicationId, timestamp, properties, traceback, and message. diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc deleted file mode 100644 index b7873035d4..0000000000 --- a/src/main/asciidoc/reference/mapping.adoc +++ /dev/null @@ -1,456 +0,0 @@ -[[mapping-chapter]] -= Mapping - -Rich mapping support is provided by the `MongoMappingConverter`. `MongoMappingConverter` has a rich metadata model that provides a full feature set of functionality to map domain objects to MongoDB documents.The mapping metadata model is populated using annotations on your domain objects. However, the infrastructure is not limited to using annotations as the only source of metadata information. The `MongoMappingConverter` also allows you to map objects to documents without providing any additional metadata, by following a set of conventions. - -In this section we will describe the features of the `MongoMappingConverter`. How to use conventions for mapping objects to documents and how to override those conventions with annotation based mapping metadata. - -NOTE: `SimpleMongoConverter` has been deprecated in Spring Data MongoDB M3 as all of its functionality has been subsumed into `MappingMongoConverter`. - -[[mapping-conventions]] -== Convention based Mapping - -`MongoMappingConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided. The conventions are: - -* The short Java class name is mapped to the collection name in the following manner. The class '`com.bigbank.SavingsAccount`' maps to '`savingsAccount`' collection name. -* All nested objects are stored as nested objects in the document and *not* as DBRefs -* The converter will use any Spring Converters registered with it to override the default mapping of object properties to document field/values. -* The fields of an object are used to convert to and from fields in the document. Public JavaBean properties are not used. -* You can have a single non-zero argument constructor whose constructor argument names match top level field names of document, that constructor will be used. Otherwise the zero arg constructor will be used. if there is more than one non-zero argument constructor an exception will be thrown. - -[[mapping.conventions.id-field]] -=== How the '_id' field is handled in the mapping layer - -MongoDB requires that you have an '_id' field for all documents. If you don't provide one the driver will assign a ObjectId with a generated value. The "_id" field can be of any type the, other than arrays, so long as it is unique. The driver naturally supports all primitive types and Dates. When using the `MongoMappingConverter` there are certain rules that govern how properties from the Java class is mapped to this '_id' field. - -The following outlines what field will be mapped to the '_id' document field: - -* A field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the '_id' field. -* A field without an annotation but named 'id' will be mapped to the '_id' field. -* The default field name for identifiers is '_id' and can be customized via the `@Field` annotation. - -[cols="1,2", options="header"] -.Examples for the translation of '_id'-field definitions -|=== -| Field definition -| Resulting Id-Fieldname in MongoDB - -| `String` id -| `_id` - -| `@Field` `String` id -| `_id` - -| `@Field('x')` `String` id -| `x` - -| `@Id` `String` x -| `_id` - -| `@Field('x')` `@Id` `String` x -| `_id` -|=== - -The following outlines what type conversion, if any, will be done on the property mapped to the _id document field. - -* If a field named 'id' is declared as a String or BigInteger in the Java class it will be converted to and stored as an ObjectId if possible. ObjectId as a field type is also valid. If you specify a value for 'id' in your application, the conversion to an ObjectId is detected to the MongoDBdriver. If the specified 'id' value cannot be converted to an ObjectId, then the value will be stored as is in the document's _id field. -* If a field named ' id' id field is not declared as a String, BigInteger, or ObjectID in the Java class then you should assign it a value in your application so it can be stored 'as-is' in the document's _id field. -* If no field named 'id' is present in the Java class then an implicit '_id' file will be generated by the driver but not mapped to a property or field of the Java class. - -When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. - -[[mapping-configuration]] -== Mapping Configuration - -Unless explicitly configured, an instance of `MongoMappingConverter` is created by default when creating a `MongoTemplate`. You can create your own instance of the `MappingMongoConverter` so as to tell it where to scan the classpath at startup your domain classes in order to extract metadata and construct indexes. Also, by creating your own instance you can register Spring converters to use for mapping specific classes to and from the database. - -You can configure the `MongoMappingConverter` as well as `com.mongodb.Mongo` and MongoTemplate either using Java or XML based metadata. Here is an example using Spring's Java based configuration - -.@Configuration class to configure MongoDB mapping support -==== -[source,java] ----- -@Configuration -public class GeoSpatialAppConfig extends AbstractMongoConfiguration { - - @Bean - public Mongo mongo() throws Exception { - return new Mongo("localhost"); - } - - @Override - public String getDatabaseName() { - return "database"; - } - - @Override - public String getMappingBasePackage() { - return "com.bigbank.domain"; - } - - // the following are optional - - - @Bean - @Override - public CustomConversions customConversions() throws Exception { - List> converterList = new ArrayList>(); - converterList.add(new org.springframework.data.mongodb.test.PersonReadConverter()); - converterList.add(new org.springframework.data.mongodb.test.PersonWriteConverter()); - return new CustomConversions(converterList); - } - - @Bean - public LoggingEventListener mappingEventsListener() { - return new LoggingEventListener(); - } -} ----- -==== - -`AbstractMongoConfiguration` requires you to implement methods that define a `com.mongodb.Mongo` as well as provide a database name. `AbstractMongoConfiguration` also has a method you can override named '`getMappingBasePackage`' which tells the converter where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. - -You can add additional converters to the converter by overriding the method afterMappingMongoConverterCreation. Also shown in the above example is a `LoggingEventListener` which logs `MongoMappingEvent`s that are posted onto Spring's `ApplicationContextEvent` infrastructure. - -NOTE: AbstractMongoConfiguration will create a MongoTemplate instance and registered with the container under the name 'mongoTemplate'. - -You can also override the method `UserCredentials getUserCredentials()` to provide the username and password information to connect to the database. - -Spring's MongoDB namespace enables you to easily enable mapping functionality in XML - -.XML schema to configure MongoDB mapping support -==== -[source,xml] ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ----- -==== - -The `base-package` property tells it where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. - -[[mapping-usage]] -== Metadata based Mapping - -To take full advantage of the object mapping functionality inside the Spring Data/MongoDB support, you should annotate your mapped objects with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. Although it is not necessary for the mapping framework to have this annotation (your POJOs will be mapped correctly, even without any annotations), it allows the classpath scanner to find and pre-process your domain objects to extract the necessary metadata. If you don't use this annotation, your application will take a slight performance hit the first time you store a domain object because the mapping framework needs to build up its internal metadata model so it knows about the properties of your domain object and how to persist them. - -.Example domain object -==== -[source,java] ----- -package com.mycompany.domain; - -@Document -public class Person { - - @Id - private ObjectId id; - - @Indexed - private Integer ssn; - - private String firstName; - - @Indexed - private String lastName; -} ----- -==== - -IMPORTANT: The `@Id` annotation tells the mapper which property you want to use for the MongoDB `_id` property and the `@Indexed` annotation tells the mapping framework to call `createIndex(…)` on that property of your document, making searches faster. - -IMPORTANT: Automatic index creation is only done for types annotated with `@Document`. - -[[mapping-usage-annotations]] -=== Mapping annotation overview - -The MappingMongoConverter can use metadata to drive the mapping of objects to documents. An overview of the annotations is provided below - -* `@Id` - applied at the field level to mark the field used for identiy purpose. -* `@Document` - applied at the class level to indicate this class is a candidate for mapping to the database. You can specify the name of the collection where the database will be stored. -* `@DBRef` - applied at the field to indicate it is to be stored using a com.mongodb.DBRef. -* `@Indexed` - applied at the field level to describe how to index the field. -* `@CompoundIndex` - applied at the type level to declare Compound Indexes -* `@GeoSpatialIndexed` - applied at the field level to describe how to geoindex the field. -* `@TextIndexed` - applied at the field level to mark the field to be included in the text index. -* `@Language` - applied at the field level to set the language override property for text index. -* `@Transient` - by default all private fields are mapped to the document, this annotation excludes the field where it is applied from being stored in the database -* `@PersistenceConstructor` - marks a given constructor - even a package protected one - to use when instantiating the object from the database. Constructor arguments are mapped by name to the key values in the retrieved DBObject. -* `@Value` - this annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document. -* `@Field` - applied at the field level and described the name of the field as it will be represented in the MongoDB BSON document thus allowing the name to be different than the fieldname of the class. -* `@Version` - applied at field level is used for optimistic locking and checked for modification on save operations. The initial value is `zero` which is bumped automatically on every update. - -The mapping metadata infrastructure is defined in a seperate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand. - -Here is an example of a more complex mapping. - -[source,java] ----- -@Document -@CompoundIndexes({ - @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") -}) -public class Person { - - @Id - private String id; - - @Indexed(unique = true) - private Integer ssn; - - @Field("fName") - private String firstName; - - @Indexed - private String lastName; - - private Integer age; - - @Transient - private Integer accountTotal; - - @DBRef - private List accounts; - - private T address; - - - public Person(Integer ssn) { - this.ssn = ssn; - } - - @PersistenceConstructor - public Person(Integer ssn, String firstName, String lastName, Integer age, T address) { - this.ssn = ssn; - this.firstName = firstName; - this.lastName = lastName; - this.age = age; - this.address = address; - } - - public String getId() { - return id; - } - - // no setter for Id. (getter is only exposed for some unit testing) - - public Integer getSsn() { - return ssn; - } - -// other getters/setters ommitted ----- - -[[mapping-custom-object-construction]] -=== Customized Object Construction - -The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation. The values to be used for the constructor parameters are resolved in the following way: - -* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated and the result is used as the parameter value. -* If the Java type has a property whose name matches the given field of the input document, then it's property information is used to select the appropriate constructor parameter to pass the input field value to. This works only if the parameter name information is present in the java `.class` files which can be achieved by compiling the source with debug information or using the new `-parameters` command-line switch for javac in Java 8. -* Otherwise an `MappingException` will be thrown indicating that the given constructor parameter could not be bound. - -[source,java] ----- -class OrderItem { - - private @Id String id; - private int quantity; - private double unitPrice; - - OrderItem(String id, @Value("#root.qty ?: 0") int quantity, double unitPrice) { - this.id = id; - this.quantity = quantity; - this.unitPrice = unitPrice; - } - - // getters/setters ommitted -} - -DBObject input = new BasicDBObject("id", "4711"); -input.put("unitPrice", 2.5); -input.put("qty",5); -OrderItem item = converter.read(OrderItem.class, input); ----- - -NOTE: The SpEL expression in the `@Value` annotation of the `quantity` parameter falls back to the value `0` if the given property path cannot be resolved. - -Additional examples for using the `@PersistenceConstructor` annotation can be found in the https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java[MappingMongoConverterUnitTests] test suite. - -[[mapping-usage-indexes.compound-index]] -=== Compound Indexes - -Compound indexes are also supported. They are defined at the class level, rather than on indidividual properties. - -NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields - -Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order: - -.Example Compound Index Usage -==== -[source,java] ----- -package com.mycompany.domain; - -@Document -@CompoundIndexes({ - @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") -}) -public class Person { - - @Id - private ObjectId id; - private Integer age; - private String firstName; - private String lastName; - -} ----- -==== - -[[mapping-usage-indexes.text-index]] -=== Text Indexes - -NOTE: The text index feature is disabled by default for mongodb v.2.4. - -Creating a text index allows to accumulate several fields into a searchable full text index. It is only possible to have one text index per collection so all fields marked with `@TextIndexed` are combined into this index. Properties can be weighted to influence document score for ranking results. The default language for the text index is english, to change the default language set `@Document(language="spanish")` to any language you want. Using a property called `language` or `@Language` allows to define a language override on a per document base. - -.Example Text Index Usage -==== -[source,java] ----- -@Document(language = "spanish") -class SomeEntity { - - @TextIndexed String foo; - - @Language String lang; - - Nested nested; -} - -class Nested { - - @TextIndexed(weight=5) String bar; - String roo; -} ----- -==== - -[[mapping-usage-references]] -=== Using DBRefs - -The mapping framework doesn't have to store child objects embedded within the document. You can also store them separately and use a DBRef to refer to that document. When the object is loaded from MongoDB, those references will be eagerly resolved and you will get back a mapped object that looks the same as if it had been stored embedded within your master document. - -Here's an example of using a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): - -==== -[source,java] ----- -@Document -public class Account { - - @Id - private ObjectId id; - private Float total; -} - -@Document -public class Person { - - @Id - private ObjectId id; - @Indexed - private Integer ssn; - @DBRef - private List accounts; -} ----- -==== - -There's no need to use something like `@OneToMany` because the mapping framework sees that you're wanting a one-to-many relationship because there is a List of objects. When the object is stored in MongoDB, there will be a list of DBRefs rather than the `Account` objects themselves. - -IMPORTANT: The mapping framework does not handle cascading saves. If you change an `Account` object that is referenced by a `Person` object, you must save the Account object separately. Calling `save` on the `Person` object will not automatically save the `Account` objects in the property `accounts`. - -[[mapping-usage-events]] -=== Mapping Framework Events - -Events are fired throughout the lifecycle of the mapping process. This is described in the <> section. - -Simply declaring these beans in your Spring ApplicationContext will cause them to be invoked whenever the event is dispatched. - -[[mapping-explicit-converters]] -=== Overriding Mapping with explicit Converters - -When storing and querying your objects it is convenient to have a `MongoConverter` instance handle the mapping of all Java types to DBObjects. However, sometimes you may want the `MongoConverter`'s do most of the work but allow you to selectively handle the conversion for a particular type or to optimize performance. - -To selectively handle the conversion yourself, register one or more one or more `org.springframework.core.convert.converter.Converter` instances with the MongoConverter. - -NOTE: Spring 3.0 introduced a core.convert package that provides a general type conversion system. This is described in detail in the Spring reference documentation section entitled http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/validation.html#core-convert[Spring Type Conversion]. - -The method `customConversions` in `AbstractMongoConfiguration` can be used to configure Converters. The examples <> at the beginning of this chapter show how to perform the configuration using Java and XML. - -Below is an example of a Spring Converter implementation that converts from a DBObject to a Person POJO. - -[source,java] ----- -@ReadingConverter - public class PersonReadConverter implements Converter { - - public Person convert(DBObject source) { - Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); - p.setAge((Integer) source.get("age")); - return p; - } -} ----- - -Here is an example that converts from a Person to a DBObject. - -[source,java] ----- -@WritingConverter -public class PersonWriteConverter implements Converter { - - public DBObject convert(Person source) { - DBObject dbo = new BasicDBObject(); - dbo.put("_id", source.getId()); - dbo.put("name", source.getFirstName()); - dbo.put("age", source.getAge()); - return dbo; - } -} ----- diff --git a/src/main/asciidoc/reference/mongo-3.adoc b/src/main/asciidoc/reference/mongo-3.adoc deleted file mode 100644 index c0aee3c949..0000000000 --- a/src/main/asciidoc/reference/mongo-3.adoc +++ /dev/null @@ -1,95 +0,0 @@ -[[mongo.mongo-3]] -= MongoDB 3.0 Support - -Spring Data MongoDB allows usage of both MongoDB Java driver generations 2 and 3 when connecting to a MongoDB 2.6/3.0 server running _MMap.v1_ or a MongoDB server 3.0 using _MMap.v1_ or the _WiredTiger_ storage engine. - -NOTE: Please refer to the driver and database specific documentation for major differences between those. - -NOTE: Operations that are no longer valid using a 3.x MongoDB Java driver have been deprecated within Spring Data and will be removed in a subsequent release. - -== Using Spring Data MongoDB with MongoDB 3.0 - -[[mongo.mongo-3.configuration]] -=== Configuration Options - -Some of the configuration options have been changed / removed for the _mongo-java-driver_. The following options will be ignored using the generation 3 driver: - - * autoConnectRetry - * maxAutoConnectRetryTime - * slaveOk - -Generally it is recommended to use the `` and `` elements instead of `` when doing XML based configuration, since those elements will only provide you with attributes valid for the 3 generation java driver. - -[source,xml] ----- - - - - - - - - ----- - -[[mongo.mongo-3.write-concern]] -=== WriteConcern and WriteConcernChecking - -The `WriteConcern.NONE`, which had been used as default by Spring Data MongoDB, was removed in 3.0. Therefore in a MongoDB 3 environment the `WriteConcern` will be defaulted to `WriteConcern.UNACKNOWLEGED`. In case `WriteResultChecking.EXCEPTION` is enabled the `WriteConcern` will be altered to `WriteConcern.ACKNOWLEDGED` for write operations, as otherwise errors during execution would not be throw correctly, since simply not raised by the driver. - -[[mongo.mongo-3.authentication]] -=== Authentication - -MongoDB Server generation 3 changed the authentication model when connecting to the DB. Therefore some of the configuration options available for authentication are no longer valid. Please use the `MongoClient` specific options for setting credentials via `MongoCredential` to provide authentication data. - -[source,java] ----- -@Configuration -public class ApplicationContextEventTestsAppConfig extends AbstractMongoConfiguration { - - @Override - public String getDatabaseName() { - return "database"; - } - - @Override - @Bean - public Mongo mongo() throws Exception { - return new MongoClient(singletonList(new ServerAddress("127.0.0.1", 27017)), - singletonList(MongoCredential.createCredential("name", "db", "pwd".toCharArray()))); - } -} ----- - -In order to use authentication with XML configuration use the `credentials` attribue on ``. - -[source,xml] ----- - - - - - - ----- - -[[mongo.mongo-3.misc]] -=== Other things to be aware of - -This section covers additional things to keep in mind when using the 3.0 driver. - -* `IndexOperations.resetIndexCache()` is no longer supported. -* Any `MapReduceOptions.extraOption` is silently ignored. -* `WriteResult` does not longer hold error informations but throws an Exception. -* `MongoOperations.executeInSession(…)` no longer calls `requestStart` / `requestDone`. -* Index name generation has become a driver internal operations, still we use the 2.x schema to generate names. -* Some Exception messages differ between the generation 2 and 3 servers as well as between _MMap.v1_ and _WiredTiger_ storage engine. - diff --git a/src/main/asciidoc/reference/mongo-auditing.adoc b/src/main/asciidoc/reference/mongo-auditing.adoc deleted file mode 100644 index c8d01ceaf7..0000000000 --- a/src/main/asciidoc/reference/mongo-auditing.adoc +++ /dev/null @@ -1,33 +0,0 @@ -[[mongo.auditing]] -== General auditing configuration - -Activating auditing functionality is just a matter of adding the Spring Data Mongo `auditing` namespace element to your configuration: - -.Activating auditing using XML configuration -==== -[source,xml] ----- - ----- -==== - -Since Spring Data MongoDB 1.4 auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation. - -.Activating auditing using JavaConfig -==== -[source,java] ----- -@Configuration -@EnableMongoAuditing -class Config { - - @Bean - public AuditorAware myAuditorProvider() { - return new AuditorAwareImpl(); - } -} ----- -==== - -If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure will pick it up automatically and use it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableJpaAuditing`. - diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc deleted file mode 100644 index d78880fcff..0000000000 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ /dev/null @@ -1,505 +0,0 @@ -[[mongo.repositories]] -= MongoDB repositories - -[[mongo-repo-intro]] -== Introduction - -This chapter will point out the specialties for repository support for MongoDB. This builds on the core repository support explained in <>. So make sure you've got a sound understanding of the basic concepts explained there. - -[[mongo-repo-usage]] -== Usage - -To access domain entities stored in a MongoDB you can leverage our sophisticated repository support that eases implementing those quite significantly. To do so, simply create an interface for your repository: - -.Sample Person entity -==== -[source,java] ----- -public class Person { - - @Id - private String id; - private String firstname; - private String lastname; - private Address address; - - // … getters and setters omitted -} ----- -==== - -We have a quite simple domain object here. Note that it has a property named `id` of type`ObjectId`. The default serialization mechanism used in `MongoTemplate` (which is backing the repository support) regards properties named id as document id. Currently we support`String`, `ObjectId` and `BigInteger` as id-types. - -.Basic repository interface to persist Person entities -==== -[source] ----- -public interface PersonRepository extends PagingAndSortingRepository { - - // additional custom finder methods go here -} ----- -==== - -Right now this interface simply serves typing purposes but we will add additional methods to it later. In your Spring configuration simply add - -.General MongoDB repository Spring configuration -==== -[source,xml] ----- - - - - - - - - - - - - - ----- -==== - -This namespace element will cause the base packages to be scanned for interfaces extending `MongoRepository` and create Spring beans for each of them found. By default the repositories will get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention. - -If you'd rather like to go with JavaConfig use the `@EnableMongoRepositories` annotation. The annotation carries the very same attributes like the namespace element. If no base package is configured the infrastructure will scan the package of the annotated configuration class. - -.JavaConfig for repositories -==== -[source,java] ----- -@Configuration -@EnableMongoRepositories -class ApplicationConfig extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return "e-store"; - } - - @Override - public Mongo mongo() throws Exception { - return new Mongo(); - } - - @Override - protected String getMappingBasePackage() { - return "com.oreilly.springdata.mongodb" - } -} ----- -==== - -As our domain repository extends `PagingAndSortingRepository` it provides you with CRUD operations as well as methods for paginated and sorted access to the entities. Working with the repository instance is just a matter of dependency injecting it into a client. So accessing the second page of `Person`s at a page size of 10 would simply look something like this: - -.Paging access to Person entities -==== -[source,java] ----- -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class PersonRepositoryTests { - - @Autowired PersonRepository repository; - - @Test - public void readsFirstPageCorrectly() { - - Page persons = repository.findAll(new PageRequest(0, 10)); - assertThat(persons.isFirstPage(), is(true)); - } -} ----- -==== - -The sample creates an application context with Spring's unit test support which will perform annotation based dependency injection into test cases. Inside the test method we simply use the repository to query the datastore. We hand the repository a `PageRequest` instance that requests the first page of persons at a page size of 10. - -[[mongodb.repositories.queries]] -== Query methods - -Most of the data access operations you usually trigger on a repository result a query being executed against the MongoDB databases. Defining such a query is just a matter of declaring a method on the repository interface - -.PersonRepository with query methods -==== -[source,java] ----- -public interface PersonRepository extends PagingAndSortingRepository { - - List findByLastname(String lastname); <1> - - Page findByFirstname(String firstname, Pageable pageable); <2> - - Person findByShippingAddresses(Address address); <3> - - Stream findAllBy(); <4> -} ----- -<1> The method shows a query for all people with the given lastname. The query will be derived parsing the method name for constraints which can be concatenated with `And` and `Or`. Thus the method name will result in a query expression of `{"lastname" : lastname}`. -<2> Applies pagination to a query. Just equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and we will automatically page the query accordingly. -<3> Shows that you can query based on properties which are not a primitive type. -<4> Uses a Java 8 `Stream` which reads and converts individual elements while iterating the stream. -==== - - - -NOTE: Note that for version 1.0 we currently don't support referring to parameters that are mapped as `DBRef` in the domain class. - -[cols="1,2,3", options="header"] -.Supported keywords for query methods -|=== -| Keyword -| Sample -| Logical result - -| `After` -| `findByBirthdateAfter(Date date)` -| `{"birthdate" : {"$gt" : date}}` - -| `GreaterThan` -| `findByAgeGreaterThan(int age)` -| `{"age" : {"$gt" : age}}` - -| `GreaterThanEqual` -| `findByAgeGreaterThanEqual(int age)` -| `{"age" : {"$gte" : age}}` - -| `Before` -| `findByBirthdateBefore(Date date)` -| `{"birthdate" : {"$lt" : date}}` - -| `LessThan` -| `findByAgeLessThan(int age)` -| `{"age" : {"$lt" : age}}` - -| `LessThanEqual` -| `findByAgeLessThanEqual(int age)` -| `{"age" : {"$lte" : age}}` - -| `Between` -| `findByAgeBetween(int from, int to)` -| `{"age" : {"$gt" : from, "$lt" : to}}` - -| `In` -| `findByAgeIn(Collection ages)` -| `{"age" : {"$in" : [ages...]}}` - -| `NotIn` -| `findByAgeNotIn(Collection ages)` -| `{"age" : {"$nin" : [ages...]}}` - -| `IsNotNull, NotNull` -| `findByFirstnameNotNull()` -| `{"firstname" : {"$ne" : null}}` - -| `IsNull, Null` -| `findByFirstnameNull()` -| `{"firstname" : null}` - -| `Like`, `StartingWith`, `EndingWith` -| `findByFirstnameLike(String name)` -| `{"firstname" : name} ( name as regex)` - -| `Containing` on String -| `findByFirstnameContaining(String name)` -| `{"firstname" : name} (name as regex)` - -| `Containing` on Collection -| `findByAddressesContaining(Address address)` -| `{"addresses" : { "$in" : address}}` - -| `Regex` -| `findByFirstnameRegex(String firstname)` -| `{"firstname" : {"$regex" : firstname }}` - -| `(No keyword)` -| `findByFirstname(String name)` -| `{"firstname" : name}` - -| `Not` -| `findByFirstnameNot(String name)` -| `{"firstname" : {"$ne" : name}}` - -| `Near` -| `findByLocationNear(Point point)` -| `{"location" : {"$near" : [x,y]}}` - -| `Near` -| `findByLocationNear(Point point, Distance max)` -| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}` - -| `Near` -| `findByLocationNear(Point point, Distance min, Distance max)` -| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}` - -| `Within` -| `findByLocationWithin(Circle circle)` -| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}` - -| `Within` -| `findByLocationWithin(Box box)` -| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}` - -| `IsTrue, True` -| `findByActiveIsTrue()` -| `{"active" : true}` - -| `IsFalse, False` -| `findByActiveIsFalse()` -| `{"active" : false}` - -| `Exists` -| `findByLocationExists(boolean exists)` -| `{"location" : {"$exists" : exists }}` -|=== - -[[mongodb.repositories.queries.delete]] -=== Repository delete queries - -The above keywords can be used in conjunciton with `delete…By` or `remove…By` to create queries deleting matching documents. - -.`Delete…By` Query -==== -[source,java] ----- -public interface PersonRepository extends MongoRepository { - - List deleteByLastname(String lastname); - - Long deletePersonByLastname(String lastname); -} ----- -==== - -Using return type `List` will retrieve and return all matching documents before actually deleting them. A numeric return type directly removes the matching documents returning the total number of documents removed. - -[[mongodb.repositories.queries.geo-spatial]] -=== Geo-spatial repository queries - -As you've just seen there are a few keywords triggering geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification. Let's have look at some examples: - -.Advanced `Near` queries -==== -[source,java] ----- -public interface PersonRepository extends MongoRepository - - // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} - List findByLocationNear(Point location, Distance distance); -} ----- -==== - -Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric` we will transparently use `$nearSphere` instead of $code. - -.Using `Distance` with `Metrics` -==== -[source,java] ----- -Point point = new Point(43.7, 48.8); -Distance distance = new Distance(200, Metrics.KILOMETERS); -… = repository.findByLocationNear(point, distance); -// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} ----- -==== - -As you can see using a `Distance` equipped with a `Metric` causes `$nearSphere` clause to be added instead of a plain `$near`. Beyond that the actual distance gets calculated according to the `Metrics` used. - -NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator. - -==== Geo-near queries - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - // {'geoNear' : 'location', 'near' : [x, y] } - GeoResults findByLocationNear(Point location); - - // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, - // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } - GeoResults findByLocationNear(Point location, Distance distance); - - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, - // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, - // 'spherical' : true } - GeoResults findByLocationNear(Point location, Distance min, Distance max); - - // {'geoNear' : 'location', 'near' : [x, y] } - GeoResults findByLocationNear(Point location); -} ----- - -[[mongodb.repositories.queries.json-based]] -=== MongoDB JSON based query methods and field restriction - -By adding the annotation `org.springframework.data.mongodb.repository.Query` repository finder methods you can specify a MongoDB JSON query string to use instead of having the query derived from the method name. For example - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query("{ 'firstname' : ?0 }") - List findByThePersonsFirstname(String firstname); - -} ----- - -The placeholder ?0 lets you substitute the value from the method arguments into the JSON query string. - -You can also use the filter property to restrict the set of properties that will be mapped into the Java object. For example, - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") - List findByThePersonsFirstname(String firstname); - -} ----- - -This will return only the firstname, lastname and Id properties of the Person objects. The age property, a java.lang.Integer, will not be set and its value will therefore be null. - -[[mongodb.repositories.queries.type-safe]] -=== Type-safe Query methods - -MongoDB repository support integrates with the http://www.querydsl.com/[QueryDSL] project which provides a means to perform type-safe queries in Java. To quote from the project description, "Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API." It provides the following features - -* Code completion in IDE (all properties, methods and operations can be expanded in your favorite Java IDE) -* Almost no syntactically invalid queries allowed (type-safe on all levels) -* Domain types and properties can be referenced safely (no Strings involved!) -* Adopts better to refactoring changes in domain types -* Incremental query definition is easier - -Please refer to the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] which describes how to bootstrap your environment for APT based code generation using Maven or Ant. - -Using QueryDSL you will be able to write queries as shown below - -[source,java] ----- -QPerson person = new QPerson("person"); -List result = repository.findAll(person.address.zipCode.eq("C0123")); - -Page page = repository.findAll(person.lastname.contains("a"), - new PageRequest(0, 2, Direction.ASC, "lastname")); ----- - -`QPerson` is a class that is generated (via the Java annotation post processing tool) which is a `Predicate` that allows you to write type safe queries. Notice that there are no strings in the query other than the value "C0123". - -You can use the generated `Predicate` class via the interface `QueryDslPredicateExecutor` which is shown below - -[source,java] ----- -public interface QueryDslPredicateExecutor { - - T findOne(Predicate predicate); - - List findAll(Predicate predicate); - - List findAll(Predicate predicate, OrderSpecifier... orders); - - Page findAll(Predicate predicate, Pageable pageable); - - Long count(Predicate predicate); -} ----- - -To use this in your repository implementation, simply inherit from it in addition to other repository interfaces. This is shown below - -[source,java] ----- -public interface PersonRepository extends MongoRepository, QueryDslPredicateExecutor { - - // additional finder methods go here -} ----- - -We think you will find this an extremely powerful tool for writing MongoDB queries. - -[[mongodb.repositories.queries.full-text]] -=== Full-text search queries -MongoDBs full text search feature is very store specic and therefore can rather be found on `MongoRepository` than on the more general `CrudRepository`. What we need is a document with a full-text index defined for (Please see section <> for creating). - -Additional methods on `MongoRepository` take `TextCriteria` as input parameter. In addition to those explicit methods, it is also possible to add a `TextCriteria` derived repository method. The criteria will added as an additional `AND` criteria. Once the entity contains a `@TextScore` annotated property the documents full-text score will be retrieved. Furthermore the `@TextScore` annotated property will also make it possible to sort by the documents score. - -[source, java] ----- -@Document -class FullTextDocument { - - @Id String id; - @TextIndexed String title; - @TextIndexed String content; - @TextScore Float score; -} - -interface FullTextRepository extends Repository { - - // Execute a full-text search and define sorting dynamically - List findAllBy(TextCriteria criteria, Sort sort); - - // Paginate over a full-text search result - Page findAllBy(TextCriteria criteria, Pageable pageable); - - // Combine a derived query with a full-text search - List findByTitleOrderByScoreDesc(String title, TextCriteria criteria); -} - - -Sort sort = new Sort("score"); -TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data"); -List result = repository.findAllBy(criteria, sort); - -criteria = TextCriteria.forDefaultLanguage().matching("film"); -Page page = repository.findAllBy(criteria, new PageRequest(1, 1, sort)); -List result = repository.findByTitleOrderByScoreDesc("mongodb", criteria); ----- - -include::../{spring-data-commons-docs}/repository-projections.adoc[leveloffset=+2] - -[[mongodb.repositories.misc]] -== Miscellaneous - -[[mongodb.repositories.misc.cdi-integration]] -=== CDI Integration - -Instances of the repository interfaces are usually created by a container, which Spring is the most natural choice when working with Spring Data. As of version 1.3.0 Spring Data MongoDB ships with a custom CDI extension that allows using the repository abstraction in CDI environments. The extension is part of the JAR so all you need to do to activate it is dropping the Spring Data MongoDB JAR into your classpath. You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`: - -[source,java] ----- -class MongoTemplateProducer { - - @Produces - @ApplicationScoped - public MongoOperations createMongoTemplate() throws UnknownHostException, MongoException { - - MongoDbFactory factory = new SimpleMongoDbFactory(new Mongo(), "database"); - return new MongoTemplate(factory); - } -} ----- - -The Spring Data MongoDB CDI extension will pick up the `MongoTemplate` available as CDI bean and create a proxy for a Spring Data repository whenever an bean of a repository type is requested by the container. Thus obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property: - -[source,java] ----- -class RepositoryClient { - - @Inject - PersonRepository repository; - - public void businessMethod() { - List people = repository.findAll(); - } -} ----- diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc deleted file mode 100644 index 46b7a1ee17..0000000000 --- a/src/main/asciidoc/reference/mongodb.adoc +++ /dev/null @@ -1,2351 +0,0 @@ -[[mongo.core]] -= MongoDB support - -The MongoDB support contains a wide range of features which are summarized below. - -* Spring configuration support using Java based @Configuration classes or an XML namespace for a Mongo driver instance and replica sets -* MongoTemplate helper class that increases productivity performing common Mongo operations. Includes integrated object mapping between documents and POJOs. -* Exception translation into Spring's portable Data Access Exception hierarchy -* Feature Rich Object Mapping integrated with Spring's Conversion Service -* Annotation based mapping metadata but extensible to support other metadata formats -* Persistence and mapping lifecycle events -* Java based Query, Criteria, and Update DSLs -* Automatic implementation of Repository interfaces including support for custom finder methods. -* QueryDSL integration to support type-safe queries. -* Cross-store persistance - support for JPA Entities with fields transparently persisted/retrieved using MongoDB -* Log4j log appender -* GeoSpatial integration - -For most tasks you will find yourself using `MongoTemplate` or the Repository support that both leverage the rich mapping functionality. MongoTemplate is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. MongoTemplate also provides callback methods so that it is easy for you to get a hold of the low level API artifacts such as `org.mongo.DB` to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. - -[[mongodb-getting-started]] -== Getting Started - -Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 6 or higher. An easy way to bootstrap setting up a working environment is to create a Spring based project in http://spring.io/tools/sts[STS]. - -First you need to set up a running Mongodb server. Refer to the http://docs.mongodb.org/manual/core/introduction/[Mongodb Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed starting MongoDB is typically a matter of executing the following command: `MONGO_HOME/bin/mongod` - -To create a Spring project in STS go to File -> New -> Spring Template Project -> Simple Spring Utility Project -> press Yes when prompted. Then enter a project and a package name such as org.spring.mongodb.example. - -Then add the following to pom.xml dependencies section. - -[source,xml] ----- - - - - - - org.springframework.data - spring-data-mongodb - {version} - - - ----- - -Also change the version of Spring in the pom.xml to be - -[source,xml] ----- -{springVersion} ----- - -You will also need to add the location of the Spring Milestone repository for maven to your pom.xml which is at the same level of your element - -[source,xml] ----- - - - spring-milestone - Spring Maven MILESTONE Repository - http://repo.spring.io/libs-milestone - - ----- - -The repository is also http://repo.spring.io/milestone/org/springframework/data/[browseable here]. - -You may also want to set the logging level to `DEBUG` to see some additional information, edit the log4j.properties file to have - -[source] ----- -log4j.category.org.springframework.data.document.mongodb=DEBUG -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %40.40c:%4L - %m%n ----- - -Create a simple Person class to persist: - -[source,java] ----- -package org.spring.mongodb.example; - -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } -} ----- - -And a main application to run - -[source,java] ----- -package org.spring.mongodb.example; - -import static org.springframework.data.mongodb.core.query.Criteria.where; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.query.Query; - -import com.mongodb.Mongo; - -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - MongoOperations mongoOps = new MongoTemplate(new Mongo(), "database"); - mongoOps.insert(new Person("Joe", 34)); - - log.info(mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)); - - mongoOps.dropCollection("person"); - } -} ----- - -This will produce the following output - -[source] ----- -10:01:32,062 DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. -10:01:32,265 DEBUG ramework.data.mongodb.core.MongoTemplate: 631 - insert DBObject containing fields: [_class, age, name] in collection: Person -10:01:32,765 DEBUG ramework.data.mongodb.core.MongoTemplate:1243 - findOne using query: { "name" : "Joe"} in db.collection: database.Person -10:01:32,953 INFO org.spring.mongodb.example.MongoApp: 25 - Person [id=4ddbba3c0be56b7e1b210166, name=Joe, age=34] -10:01:32,984 DEBUG ramework.data.mongodb.core.MongoTemplate: 375 - Dropped collection [database.person] ----- - -Even in this simple example, there are few things to take notice of - -* You can instantiate the central helper class of Spring Mongo, <>, using the standard `com.mongodb.Mongo` object and the name of the database to use. -* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). -* Conventions are used for handling the id field, converting it to be a ObjectId when stored in the database. -* Mapping conventions can use field access. Notice the Person class has only getters. -* If the constructor argument names match the field names of the stored document, they will be used to instantiate the object - -[[mongo.examples-repo]] -== Examples Repository - -There is an https://github.com/spring-projects/spring-data-examples[github repository with several examples] that you can download and play around with to get a feel for how the library works. - -[[mongodb-connectors]] -== Connecting to MongoDB with Spring - -One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.Mongo` object using the IoC container. There are two main ways to do this, either using Java based bean metadata or XML based bean metadata. These are discussed in the following sections. - -NOTE: For those not familiar with how to configure the Spring container using Java based bean metadata instead of XML based metadata see the high level introduction in the reference docs http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here ] as well as the detailed documentation http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/beans.html#beans-java-instantiating-container[ here]. - -[[mongo.mongo-java-config]] -=== Registering a Mongo instance using Java based metadata - -An example of using Java based bean metadata to register an instance of a `com.mongodb.Mongo` is shown below - -.Registering a com.mongodb.Mongo object using Java based bean metadata -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Use the standard Mongo driver API to create a com.mongodb.Mongo instance. - */ - public @Bean Mongo mongo() throws UnknownHostException { - return new Mongo("localhost"); - } -} ----- -==== - -This approach allows you to use the standard `com.mongodb.Mongo` API that you may already be used to using but also pollutes the code with the UnknownHostException checked exception. The use of the checked exception is not desirable as Java based bean metadata uses methods as a means to set object dependencies, making the calling code cluttered. - -An alternative is to register an instance of `com.mongodb.Mongo` instance with the container using Spring's `MongoClientFactoryBean`. As compared to instantiating a `com.mongodb.Mongo` instance directly, the FactoryBean approach does not throw a checked exception and has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annoated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/dao.html[Spring's DAO support features]. - -An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below: - -.Registering a com.mongodb.Mongo object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Factory bean that creates the com.mongodb.Mongo instance - */ - public @Bean MongoClientFactoryBean mongo() { - MongoClientFactoryBean mongo = new MongoClientFactoryBean(); - mongo.setHost("localhost"); - return mongo; - } -} ----- -==== - -To access the `com.mongodb.Mongo` object created by the `MongoClientFactoryBean` in other `@Configuration` or your own classes, use a "`private @Autowired Mongo mongo;`" field. - -[[mongo.mongo-xml-config]] -=== Registering a Mongo instance using XML based metadata - -While you can use Spring's traditional `` XML namespace to register an instance of `com.mongodb.Mongo` with the container, the XML can be quite verbose as it is general purpose. XML namespaces are a better alternative to configuring commonly used objects such as the Mongo instance. The mongo namespace alows you to create a Mongo instance server location, replica-sets, and options. - -To use the Mongo namespace elements you will need to reference the Mongo schema: - -.XML schema to configure MongoDB -==== -[source,xml] ----- - - - - - ** - - ----- -==== - -A more advanced configuration with MongoOptions is shown below (note these are not recommended values) - -.XML schema to configure a com.mongodb.Mongo object with MongoOptions -==== -[source,xml] ----- - - - - - - - ----- -==== - -A configuration using replica sets is shown below. - -.XML schema to configure com.mongodb.Mongo object with Replica Sets -==== -[source,xml] ----- - ----- -==== - -[[mongo.mongo-db-factory]] -=== The MongoDbFactory interface - -While `com.mongodb.Mongo` is the entry point to the MongoDB driver API, connecting to a specific MongoDB database instance requires additional information such as the database name and an optional username and password. With that information you can obtain a com.mongodb.DB object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.MongoDbFactory` interface shown below to bootstrap connectivity to the database. - -[source,java] ----- -public interface MongoDbFactory { - - DB getDb() throws DataAccessException; - - DB getDb(String dbName) throws DataAccessException; -} ----- - -The following sections show how you can use the container with either Java or the XML based metadata to configure an instance of the `MongoDbFactory` interface. In turn, you can use the `MongoDbFactory` instance to configure MongoTemplate. - -The class `org.springframework.data.mongodb.core.SimpleMongoDbFactory` provides implements the MongoDbFactory interface and is created with a standard `com.mongodb.Mongo` instance, the database name and an optional `org.springframework.data.authentication.UserCredentials` constructor argument. - -Instead of using the IoC container to create an instance of MongoTemplate, you can just use them in standard Java code as shown below. - -[source,java] ----- -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - MongoOperations mongoOps = new MongoTemplate(*new SimpleMongoDbFactory(new Mongo(), "database")*); - - mongoOps.insert(new Person("Joe", 34)); - - log.info(mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)); - - mongoOps.dropCollection("person"); - } -} ----- - -The code in bold highlights the use of SimpleMongoDbFactory and is the only difference between the listing shown in the <>. - -[[mongo.mongo-db-factory-java]] -=== Registering a MongoDbFactory instance using Java based metadata - -To register a MongoDbFactory instance with the container, you write code much like what was highlighted in the previous code listing. A simple example is shown below - -[source,java] ----- -@Configuration -public class MongoConfiguration { - - public @Bean MongoDbFactory mongoDbFactory() throws Exception { - return new SimpleMongoDbFactory(new Mongo(), "database"); - } -} ----- - -To define the username and password create an instance of `org.springframework.data.authentication.UserCredentials` and pass it into the constructor as shown below. This listing also shows using `MongoDbFactory` register an instance of MongoTemplate with the container. - -[source,java] ----- -@Configuration -public class MongoConfiguration { - - public @Bean MongoDbFactory mongoDbFactory() throws Exception { - UserCredentials userCredentials = new UserCredentials("joe", "secret"); - return new SimpleMongoDbFactory(new Mongo(), "database", userCredentials); - } - - public @Bean MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongoDbFactory()); - } -} ----- - - -[[mongo.mongo-db-factory-xml]] -=== Registering a MongoDbFactory instance using XML based metadata - -The mongo namespace provides a convient way to create a `SimpleMongoDbFactory` as compared to using the`` namespace. Simple usage is shown below - -[source,xml] ----- - ----- - -In the above example a `com.mongodb.Mongo` instance is created using the default host and port number. The `SimpleMongoDbFactory` registered with the container is identified by the id 'mongoDbFactory' unless a value for the id attribute is specified. - -You can also provide the host and port for the underlying `com.mongodb.Mongo` instance as shown below, in addition to username and password for the database. - -[source,xml] ----- - ----- - -If you need to configure additional options on the `com.mongodb.Mongo` instance that is used to create a `SimpleMongoDbFactory` you can refer to an existing bean using the `mongo-ref` attribute as shown below. To show another common usage pattern, this listing show the use of a property placeholder to parameterise the configuration and creating `MongoTemplate`. - -[source,xml] ----- - - - - - - - - - - - ----- - -[[mongo-template]] -== Introduction to MongoTemplate - -The class `MongoTemplate`, located in the package `org.springframework.data.document.mongodb`, is the central class of the Spring's MongoDB support providing a rich feature set to interact with the database. The template offers convenience operations to create, update, delete and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents. - -NOTE: Once configured, `MongoTemplate` is thread-safe and can be reused across multiple instances. - -The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the interface `MongoConverter`. Spring provides two implementations, `SimpleMappingConverter` and `MongoMappingConverter`, but you can also write your own converter. Please refer to the section on MongoConverters for more detailed information. - -The `MongoTemplate` class implements the interface `MongoOperations`. In as much as possible, the methods on `MongoOperations` are named after methods available on the MongoDB driver `Collection` object as as to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you will find methods such as "find", "findAndModify", "findOne", "insert", "remove", "save", "update" and "updateMulti". The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `MongoOperations`. A major difference in between the two APIs is that MongoOperations can be passed domain objects instead of `DBObject` and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `DBObject` to specify the parameters for those operations. - -NOTE: The preferred way to reference the operations on `MongoTemplate` instance is via its interface `MongoOperations`. - -The default converter implementation used by `MongoTemplate` is MongoMappingConverter. While the `MongoMappingConverter` can make use of additional metadata to specify the mapping of objects to documents it is also capable of converting objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations is explained in the <>. - -NOTE: In the M2 release `SimpleMappingConverter`, was the default and this class is now deprecated as its functionality has been subsumed by the MongoMappingConverter. - -Another central feature of MongoTemplate is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. Refer to the section on <> for more information. - -While there are many convenience methods on `MongoTemplate` to help you easily perform common tasks if you should need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate you can use one of several Execute callback methods to access underlying driver APIs. The execute callbacks will give you a reference to either a `com.mongodb.Collection` or a `com.mongodb.DB` object. Please see the section mongo.executioncallback[Execution Callbacks] for more information. - -Now let's look at a examples of how to work with the `MongoTemplate` in the context of the Spring container. - -[[mongo-template.instantiating]] -=== Instantiating MongoTemplate - -You can use Java to create and register an instance of MongoTemplate as shown below. - -.Registering a com.mongodb.Mongo object and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - public @Bean Mongo mongo() throws Exception { - return new Mongo("localhost"); - } - - public @Bean MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongo(), "mydatabase"); - } -} ----- -==== - -There are several overloaded constructors of MongoTemplate. These are - -* `MongoTemplate(Mongo mongo, String databaseName)` - takes the `com.mongodb.Mongo` object and the default database name to operate against. -* `MongoTemplate(Mongo mongo, String databaseName, UserCredentials userCredentials)` - adds the username and password for authenticating with the database. -* `MongoTemplate(MongoDbFactory mongoDbFactory)` - takes a MongoDbFactory object that encapsulated the `com.mongodb.Mongo` object, database name, and username and password. -* `MongoTemplate(MongoDbFactory mongoDbFactory, MongoConverter mongoConverter)` - adds a MongoConverter to use for mapping. - -You can also configure a MongoTemplate using Spring's XML schema. - -[source,java] ----- - - - - - - ----- - -Other optional properties that you might like to set when creating a `MongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, and `ReadPreference`. - -NOTE: The preferred way to reference the operations on `MongoTemplate` instance is via its interface `MongoOperations`. - -[[mongo-template.writeresultchecking]] -=== WriteResultChecking Policy - -When in development it is very handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully but in fact the database was not modified according to your expectations. Set MongoTemplate's property to an enum with the following values, LOG, EXCEPTION, or NONE to either log the error, throw and exception or do nothing. The default is to use a `WriteResultChecking` value of NONE. - -[[mongo-template.writeconcern]] -=== WriteConcern - -You can set the `com.mongodb.WriteConcern` property that the `MongoTemplate` will use for write operations if it has not yet been specified via the driver at a higher level such as `com.mongodb.Mongo`. If MongoTemplate's `WriteConcern` property is not set it will default to the one set in the MongoDB driver's DB or Collection setting. - -[[mongo-template.writeconcernresolver]] -=== WriteConcernResolver - -For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert and save operations), a strategy interface called `WriteConcernResolver` can be configured on `MongoTemplate`. Since `MongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The `WriteConcernResolver` interface is shown below. - -[source,java] ----- -public interface WriteConcernResolver { - WriteConcern resolve(MongoAction action); -} ----- - -The passed in argument, MongoAction, is what you use to determine the `WriteConcern` value to be used or to use the value of the Template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `DBObject`, as well as the operation as an enumeration (`MongoActionOperation`: REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE) and a few other pieces of contextual information. For example, - -[source] ----- -private class MyAppWriteConcernResolver implements WriteConcernResolver { - - public WriteConcern resolve(MongoAction action) { - if (action.getEntityClass().getSimpleName().contains("Audit")) { - return WriteConcern.NONE; - } else if (action.getEntityClass().getSimpleName().contains("Metadata")) { - return WriteConcern.JOURNAL_SAFE; - } - return action.getDefaultWriteConcern(); - } -} ----- - -[[mongo-template.save-update-remove]] -== Saving, Updating, and Removing Documents - -`MongoTemplate` provides a simple way for you to save, update, and delete your domain objects and map those objects to documents stored in MongoDB. - -Given a simple class such as Person - -[source,java] ----- -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } - -} ----- - -You can save, update and delete the object as shown below. - -NOTE: `MongoOperations` is the interface that `MongoTemplate` implements. - -[source,java] ----- -package org.spring.example; - -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Update.update; -import static org.springframework.data.mongodb.core.query.Query.query; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; - -import com.mongodb.Mongo; - -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - MongoOperations mongoOps = new MongoTemplate(new SimpleMongoDbFactory(new Mongo(), "database")); - - Person p = new Person("Joe", 34); - - // Insert is used to initially store the object into the database. - mongoOps.insert(p); - log.info("Insert: " + p); - - // Find - p = mongoOps.findById(p.getId(), Person.class); - log.info("Found: " + p); - - // Update - mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class); - p = mongoOps.findOne(query(where("name").is("Joe")), Person.class); - log.info("Updated: " + p); - - // Delete - mongoOps.remove(p); - - // Check that deletion worked - List people = mongoOps.findAll(Person.class); - log.info("Number of people = : " + people.size()); - - - mongoOps.dropCollection(Person.class); - } -} ----- - -This would produce the following log output (including debug messages from `MongoTemplate` itself) - -[source] ----- -DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. -DEBUG work.data.mongodb.core.MongoTemplate: 632 - insert DBObject containing fields: [_class, age, name] in collection: person -INFO org.spring.example.MongoApp: 30 - Insert: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] -DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "_id" : { "$oid" : "4ddc6e784ce5b1eba3ceaf5c"}} in db.collection: database.person -INFO org.spring.example.MongoApp: 34 - Found: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] -DEBUG work.data.mongodb.core.MongoTemplate: 778 - calling update using query: { "name" : "Joe"} and update: { "$set" : { "age" : 35}} in collection: person -DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "name" : "Joe"} in db.collection: database.person -INFO org.spring.example.MongoApp: 39 - Updated: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=35] -DEBUG work.data.mongodb.core.MongoTemplate: 823 - remove using query: { "id" : "4ddc6e784ce5b1eba3ceaf5c"} in collection: person -INFO org.spring.example.MongoApp: 46 - Number of people = : 0 -DEBUG work.data.mongodb.core.MongoTemplate: 376 - Dropped collection [database.person] ----- - -There was implicit conversion using the `MongoConverter` between a `String` and `ObjectId` as stored in the database and recognizing a convention of the property "Id" name. - -NOTE: This example is meant to show the use of save, update and remove operations on MongoTemplate and not to show complex mapping functionality - -The query syntax used in the example is explained in more detail in the section <>. - -[[mongo-template.id-handling]] -=== How the '_id' field is handled in the mapping layer - -MongoDB requires that you have an '_id' field for all documents. If you don't provide one the driver will assign a `ObjectId` with a generated value. When using the `MongoMappingConverter` there are certain rules that govern how properties from the Java class is mapped to this '_id' field. - -The following outlines what property will be mapped to the '_id' document field: - -* A property or field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the '_id' field. -* A property or field without an annotation but named `id` will be mapped to the '_id' field. - -The following outlines what type conversion, if any, will be done on the property mapped to the _id document field when using the `MappingMongoConverter`, the default for `MongoTemplate`. - -* An id property or field declared as a String in the Java class will be converted to and stored as an `ObjectId` if possible using a Spring `Converter`. Valid conversion rules are delegated to the MongoDB Java driver. If it cannot be converted to an ObjectId, then the value will be stored as a string in the database. -* An id property or field declared as `BigInteger` in the Java class will be converted to and stored as an `ObjectId` using a Spring `Converter`. - -If no field or property specified above is present in the Java class then an implicit '_id' file will be generated by the driver but not mapped to a property or field of the Java class. - -When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. - -[[mongo-template.type-mapping]] -=== Type mapping - -As MongoDB collections can contain documents that represent instances of a variety of types. A great example here is if you store a hierarchy of classes or simply have a class with a property of type `Object`. In the latter case the values held inside that property have to be read in correctly when retrieving the object. Thus we need a mechanism to store type information alongside the actual document. - -To achieve that the `MappingMongoConverter` uses a `MongoTypeMapper` abstraction with `DefaultMongoTypeMapper` as it's main implementation. It's default behaviour is storing the fully qualified classname under `_class` inside the document for the top-level document as well as for every value if it's a complex type and a subtype of the property type declared. - -.Type mapping -==== -[source,java] ----- -public class Sample { - Contact value; -} - -public abstract class Contact { … } - -public class Person extends Contact { … } - -Sample sample = new Sample(); -sample.value = new Person(); - -mongoTemplate.save(sample); - -{ "_class" : "com.acme.Sample", - "value" : { "_class" : "com.acme.Person" } -} ----- -==== - -As you can see we store the type information for the actual root class persistent as well as for the nested type as it is complex and a subtype of `Contact`. So if you're now using `mongoTemplate.findAll(Object.class, "sample")` we are able to find out that the document stored shall be a `Sample` instance. We are also able to find out that the value property shall be a `Person` actually. - -==== Customizing type mapping - -In case you want to avoid writing the entire Java class name as type information but rather like to use some key you can use the `@TypeAlias` annotation at the entity class being persisted. If you need to customize the mapping even more have a look at the `TypeInformationMapper` interface. An instance of that interface can be configured at the `DefaultMongoTypeMapper` which can be configured in turn on `MappingMongoConverter`. - -.Defining a TypeAlias for an Entity -==== -[source,java] ----- -@TypeAlias("pers") -class Person { - -} ----- -==== - -Note that the resulting document will contain `"pers"` as the value in the `_class` Field. - -==== Configuring custom type mapping - -The following example demonstrates how to configure a custom `MongoTypeMapper` in `MappingMongoConverter`. - -.Configuring a custom MongoTypeMapper via Spring Java Config -==== -[source,java] ----- -class CustomMongoTypeMapper extends DefaultMongoTypeMapper { - //implement custom type mapping here -} ----- -==== - -[source,java] ----- -@Configuration -class SampleMongoConfiguration extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return "database"; - } - - @Override - public Mongo mongo() throws Exception { - return new Mongo(); - } - - @Bean - @Override - public MappingMongoConverter mappingMongoConverter() throws Exception { - MappingMongoConverter mmc = super.mappingMongoConverter(); - mmc.setTypeMapper(customTypeMapper()); - return mmc; - } - - @Bean - public MongoTypeMapper customTypeMapper() { - return new CustomMongoTypeMapper(); - } -} ----- - -Note that we are extending the `AbstractMongoConfiguration` class and override the bean definition of the `MappingMongoConverter` where we configure our custom `MongoTypeMapper`. - -.Configuring a custom MongoTypeMapper via XML -==== -[source,xml] ----- - - - ----- -==== - -[[mongo-template.save-insert]] -=== Methods for saving and inserting documents - -There are several convenient methods on `MongoTemplate` for saving and inserting your objects. To have more fine grained control over the conversion process you can register Spring converters with the `MappingMongoConverter`, for example `Converter` and `Converter`. - -NOTE: The difference between insert and save operations is that a save operation will perform an insert if the object is not already present. - -The simple case of using the save operation is to save a POJO. In this case the collection name will be determined by name (not fully qualfied) of the class. You may also call the save operation with a specific collection name. The collection to store the object can be overriden using mapping metadata. - -When inserting or saving, if the Id property is not set, the assumption is that its value will be auto-generated by the database. As such, for auto-generation of an ObjectId to succeed the type of the Id property/field in your class must be either a `String`, `ObjectId`, or `BigInteger`. - -Here is a basic example of using the save operation and retrieving its contents. - -.Inserting and retrieving documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Criteria.query; -… - -Person p = new Person("Bob", 33); -mongoTemplate.insert(p); - -Person qp = mongoTemplate.findOne(query(where("age").is(33)), Person.class); ----- -==== - -The insert/save operations available to you are listed below. - -* `void` *save* `(Object objectToSave)` Save the object to the default collection. -* `void` *save* `(Object objectToSave, String collectionName)` Save the object to the specified collection. - -A similar set of insert operations is listed below - -* `void` *insert* `(Object objectToSave)` Insert the object to the default collection. -* `void` *insert* `(Object objectToSave, String collectionName)` Insert the object to the specified collection. - -[[mongo-template.save-insert.collection]] -==== Which collection will my documents be saved into? - -There are two ways to manage the collection name that is used for operating on the documents. The default collection name that is used is the class name changed to start with a lower-case letter. So a `com.test.Person` class would be stored in the "person" collection. You can customize this by providing a different collection name using the @Document annotation. You can also override the collection name by providing your own collection name as the last parameter for the selected MongoTemplate method calls. - -[[mongo-template.save-insert.individual]] -==== Inserting or saving individual objects - -The MongoDB driver supports inserting a collection of documents in one operation. The methods in the MongoOperations interface that support this functionality are listed below - -* *insert* inserts an object. If there is an existing document with the same id then an error is generated. -* *insertAll* takes a `Collection `of objects as the first parameter. This method inspects each object and inserts it to the appropriate collection based on the rules specified above. -* *save* saves the object overwriting any object that might exist with the same id. - -[[mongo-template.save-insert.batch]] -==== Inserting several objects in a batch - -The MongoDB driver supports inserting a collection of documents in one operation. The methods in the MongoOperations interface that support this functionality are listed below - -* *insert*` methods that take a `Collection` as the first argument. This inserts a list of objects in a single batch write to the database. - -[[mongodb-template-update]] -=== Updating documents in a collection - -For updates we can elect to update the first document found using `MongoOperation`'s method `updateFirst` or we can update all documents that were found to match the query using the method `updateMulti`. Here is an example of an update of all SAVINGS accounts where we are adding a one time $50.00 bonus to the balance using the `$inc` operator. - -.Updating documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Query; -import static org.springframework.data.mongodb.core.query.Update; - -... - -WriteResult wr = mongoTemplate.updateMulti(new Query(where("accounts.accountType").is(Account.Type.SAVINGS)), - new Update().inc("accounts.$.balance", 50.00), Account.class); ----- -==== - -In addition to the `Query` discussed above we provide the update definition using an `Update` object. The `Update` class has methods that match the update modifiers available for MongoDB. - -As you can see most methods return the `Update` object to provide a fluent style for the API. - -[[mongodb-template-update.methods]] -==== Methods for executing updates for documents - -* *updateFirst* Updates the first document that matches the query document criteria with the provided updated document. -* *updateMulti* Updates all objects that match the query document criteria with the provided updated document. - -[[mongodb-template-update.update]] -==== Methods for the Update class - -The Update class can be used with a little 'syntax sugar' as its methods are meant to be chained together and you can kick-start the creation of a new Update instance via the static method `public static Update update(String key, Object value)` and using static imports. - -Here is a listing of methods on the Update class - -* `Update` *addToSet* `(String key, Object value) ` Update using the `$addToSet` update modifier -* `Update` *inc* `(String key, Number inc)` Update using the `$inc` update modifier -* `Update` *pop* `(String key, Update.Position pos)` Update using the `$pop` update modifier -* `Update` *pull* `(String key, Object value)` Update using the `$pull` update modifier -* `Update` *pullAll* `(String key, Object[] values)` Update using the `$pullAll` update modifier -* `Update` *push* `(String key, Object value) ` Update using the `$push` update modifier -* `Update` *pushAll* `(String key, Object[] values)` Update using the `$pushAll` update modifier -* `Update` *rename* `(String oldName, String newName)` Update using the `$rename` update modifier -* `Update` *set* `(String key, Object value)` Update using the `$set` update modifier -* `Update` *unset* `(String key)` Update using the `$unset` update modifier - -[[mongo-template.upserts]] -=== Upserting documents in a collection - -Related to performing an `updateFirst` operations, you can also perform an upsert operation which will perform an insert if no document is found that matches the query. The document that is inserted is a combination of the query document and the update document. Here is an example - -[source] ----- -template.upsert(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")), update("address", addr), Person.class); ----- - -[[mongo-template.find-and-upsert]] -=== Finding and Upserting documents in a collection - -The `findAndModify(…)` method on DBCollection can update a document and return either the old or newly updated document in a single operation. `MongoTemplate` provides a findAndModify method that takes `Query` and `Update` classes and converts from `DBObject` to your POJOs. Here are the methods - -[source,java] ----- - T findAndModify(Query query, Update update, Class entityClass); - - T findAndModify(Query query, Update update, Class entityClass, String collectionName); - - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); - - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, String collectionName); ----- - -As an example usage, we will insert of few `Person` objects into the container and perform a simple findAndUpdate operation - -[source,java] ----- -mongoTemplate.insert(new Person("Tom", 21)); -mongoTemplate.insert(new Person("Dick", 22)); -mongoTemplate.insert(new Person("Harry", 23)); - -Query query = new Query(Criteria.where("firstName").is("Harry")); -Update update = new Update().inc("age", 1); -Person p = mongoTemplate.findAndModify(query, update, Person.class); // return's old person object - -assertThat(p.getFirstName(), is("Harry")); -assertThat(p.getAge(), is(23)); -p = mongoTemplate.findOne(query, Person.class); -assertThat(p.getAge(), is(24)); - -// Now return the newly updated document when updating -p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class); -assertThat(p.getAge(), is(25)); ----- - -The `FindAndModifyOptions` lets you set the options of returnNew, upsert, and remove. An example extending off the previous code snippit is shown below - -[source,java] ----- -Query query2 = new Query(Criteria.where("firstName").is("Mary")); -p = mongoTemplate.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class); -assertThat(p.getFirstName(), is("Mary")); -assertThat(p.getAge(), is(1)); ----- - -[[mongo-template.delete]] -=== Methods for removing documents - -You can use several overloaded methods to remove an object from the database. - -* *remove* Remove the given document based on one of the following: a specific object instance, a query document criteria combined with a class or a query document criteria combined with a specific collection name. - -[[mongo-template.optimistic-locking]] -=== Optimistic locking - -The `@Version` annotation provides a JPA similar semantic in the context of MongoDB and makes sure updates are only applied to documents with matching version. Therefore the actual value of the version property is added to the update query in a way that the update won't have any effect if another operation altered the document in between. In that case an `OptimisticLockingFailureException` is thrown. - -==== -[source,java] ----- -@Document -class Person { - - @Id String id; - String firstname; - String lastname; - @Version Long version; -} - -Person daenerys = template.insert(new Person("Daenerys")); <1> - -Person tmp = teplate.findOne(query(where("id").is(daenerys.getId())), Person.class); <2> - -daenerys.setLastname("Targaryen"); -template.save(daenerys); <3> - -template.save(tmp); // throws OptimisticLockingFailureException <4> ----- -<1> Intially insert document. `version` is set to `0`. -<2> Load the just inserted document `version` is still `0`. -<3> Update document with `version = 0`. Set the `lastname` and bump `version` to `1`. -<4> Try to update previously loaded document sill having `version = 0` fails with `OptimisticLockingFailureException` as the current `version` is `1`. -==== - -IMPORTANT: Using MongoDB driver version 3 requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed. - -[[mongo.query]] -== Querying Documents - -You can express your queries using the `Query` and `Criteria` classes which have method names that mirror the native MongoDB operator names such as `lt`, `lte`, `is`, and others. The `Query` and `Criteria` classes follow a fluent API style so that you can easily chain together multiple method criteria and queries while having easy to understand code. Static imports in Java are used to help remove the need to see the 'new' keyword for creating `Query` and `Criteria` instances so as to improve readability. If you like to create `Query` instances from a plain JSON String use `BasicQuery`. - -.Creating a Query instance from a plain JSON String -==== -[source,java] ----- -BasicQuery query = new BasicQuery("{ age : { $lt : 50 }, accounts.balance : { $gt : 1000.00 }}"); -List result = mongoTemplate.find(query, Person.class); ----- -==== - -GeoSpatial queries are also supported and are described more in the section <>. - -Map-Reduce operations are also supported and are described more in the section <>. - -[[mongodb-template-query]] -=== Querying documents in a collection - -We saw how to retrieve a single document using the findOne and findById methods on MongoTemplate in previous sections which return a single domain object. We can also query for a collection of documents to be returned as a list of domain objects. Assuming that we have a number of Person objects with name and age stored as documents in a collection and that each person has an embedded account document with a balance. We can now run a query using the following code. - -.Querying for documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Query.query; - -… - -List result = mongoTemplate.find(query(where("age").lt(50) - .and("accounts.balance").gt(1000.00d)), Person.class); ----- -==== - -All find methods take a `Query` object as a parameter. This object defines the criteria and options used to perform the query. The criteria is specified using a `Criteria` object that has a static factory method named `where` used to instantiate a new `Criteria` object. We recommend using a static import for `org.springframework.data.mongodb.core.query.Criteria.where` and `Query.query` to make the query more readable. - -This query should return a list of `Person` objects that meet the specified criteria. The `Criteria` class has the following methods that correspond to the operators provided in MongoDB. - -As you can see most methods return the `Criteria` object to provide a fluent style for the API. - -[[mongodb-template-query.criteria]] -==== Methods for the Criteria class - -* `Criteria` *all* `(Object o)` Creates a criterion using the `$all` operator -* `Criteria` *and* `(String key)` Adds a chained `Criteria` with the specified `key` to the current `Criteria` and returns the newly created one -* `Criteria` *andOperator* `(Criteria... criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) -* `Criteria` *elemMatch* `(Criteria c)` Creates a criterion using the `$elemMatch` operator -* `Criteria` *exists* `(boolean b)` Creates a criterion using the `$exists` operator -* `Criteria` *gt* `(Object o)` Creates a criterion using the `$gt` operator -* `Criteria` *gte* `(Object o)` Creates a criterion using the `$gte` operator -* `Criteria` *in* `(Object... o)` Creates a criterion using the `$in` operator for a varargs argument. -* `Criteria` *in* `(Collection collection)` Creates a criterion using the `$in` operator using a collection -* `Criteria` *is* `(Object o)` Creates a criterion using the `$is` operator -* `Criteria` *lt* `(Object o)` Creates a criterion using the `$lt` operator -* `Criteria` *lte* `(Object o)` Creates a criterion using the `$lte` operator -* `Criteria` *mod* `(Number value, Number remainder)` Creates a criterion using the `$mod` operator -* `Criteria` *ne* `(Object o)` Creates a criterion using the `$ne` operator -* `Criteria` *nin* `(Object... o)` Creates a criterion using the `$nin` operator -* `Criteria` *norOperator* `(Criteria... criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria -* `Criteria` *not* `()` Creates a criterion using the `$not` meta operator which affects the clause directly following -* `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria -* `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` -* `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator -* `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator - -There are also methods on the Criteria class for geospatial queries. Here is a listing but look at the section on <> to see them in action. - -* `Criteria` *within* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. -* `Criteria` *within* `(Box box)` Creates a geospatial criterion using a `$geoWithin $box` operation. -* `Criteria` *withinSphere* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. -* `Criteria` *near* `(Point point)` Creates a geospatial criterion using a `$near `operation -* `Criteria` *nearSphere* `(Point point)` Creates a geospatial criterion using `$nearSphere$center` operations. This is only available for MongoDB 1.7 and higher. -* `Criteria` *minDistance* `(double minDistance)` Creates a geospatial criterion using the `$minDistance` operation, for use with $near. -* `Criteria` *maxDistance* `(double maxDistance)` Creates a geospatial criterion using the `$maxDistance` operation, for use with $near. - -The `Query` class has some additional methods used to provide options for the query. - -[[mongodb-template-query.query]] -==== Methods for the Query class - -* `Query` *addCriteria* `(Criteria criteria)` used to add additional criteria to the query -* `Field` *fields* `()` used to define fields to be included in the query results -* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging) -* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging) -* `Query` *with* `(Sort sort)` used to provide sort definition for the results - -[[mongo-template.querying]] -=== Methods for querying for documents - -The query methods need to specify the target type T that will be returned and they are also overloaded with an explicit collection name for queries that should operate on a collection other than the one indicated by the return type. - -* *findAll* Query for a list of objects of type T from the collection. -* *findOne* Map the results of an ad-hoc query on the collection to a single instance of an object of the specified type. -* *findById* Return an object of the given id and target class. -* *find* Map the results of an ad-hoc query on the collection to a List of the specified type. -* *findAndRemove* Map the results of an ad-hoc query on the collection to a single instance of an object of the specified type. The first document that matches the query is returned and also removed from the collection in the database. - -[[mongo.geospatial]] -=== GeoSpatial Queries - -MongoDB supports GeoSpatial queries through the use of operators such as `$near`, `$within`, `geoWithin` and `$nearSphere`. Methods specific to geospatial queries are available on the `Criteria` class. There are also a few shape classes, `Box`, `Circle`, and `Point` that are used in conjunction with geospatial related `Criteria` methods. - -To understand how to perform GeoSpatial queries we will use the following Venue class taken from the integration tests.which relies on using the rich `MappingMongoConverter`. - -[source,java] ----- -@Document(collection="newyork") -public class Venue { - - @Id - private String id; - private String name; - private double[] location; - - @PersistenceConstructor - Venue(String name, double[] location) { - super(); - this.name = name; - this.location = location; - } - - public Venue(String name, double x, double y) { - super(); - this.name = name; - this.location = new double[] { x, y }; - } - - public String getName() { - return name; - } - - public double[] getLocation() { - return location; - } - - @Override - public String toString() { - return "Venue [id=" + id + ", name=" + name + ", location=" - + Arrays.toString(location) + "]"; - } -} ----- - -To find locations within a `Circle`, the following query can be used. - -[source,java] ----- -Circle circle = new Circle(-73.99171, 40.738868, 0.01); -List venues = - template.find(new Query(Criteria.where("location").within(circle)), Venue.class); ----- - -To find venues within a `Circle` using spherical coordinates the following query can be used - -[source,java] ----- -Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); -List venues = - template.find(new Query(Criteria.where("location").withinSphere(circle)), Venue.class); ----- - -To find venues within a `Box` the following query can be used - -[source,java] ----- -//lower-left then upper-right -Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); -List venues = - template.find(new Query(Criteria.where("location").within(box)), Venue.class); ----- - -To find venues near a `Point`, the following queries can be used - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query(Criteria.where("location").near(point).maxDistance(0.01)), Venue.class); ----- - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query(Criteria.where("location").near(point).minDistance(0.01).maxDistance(100)), Venue.class); ----- - -To find venues near a `Point` using spherical coordines the following query can be used - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query( - Criteria.where("location").nearSphere(point).maxDistance(0.003712240453784)), - Venue.class); ----- - -[[mongo.geo-near]] -==== Geo near queries - -MongoDB supports querying the database for geo locations and calculation the distance from a given origin at the very same time. With geo-near queries it's possible to express queries like: "find all restaurants in the surrounding 10 miles". To do so `MongoOperations` provides `geoNear(…)` methods taking a `NearQuery` as argument as well as the already familiar entity type and collection - -[source,java] ----- -Point location = new Point(-73.99171, 40.738868); -NearQuery query = NearQuery.near(location).maxDistance(new Distance(10, Metrics.MILES)); - -GeoResults = operations.geoNear(query, Restaurant.class); ----- - -As you can see we use the `NearQuery` builder API to set up a query to return all `Restaurant` instances surrounding the given `Point` by 10 miles maximum. The `Metrics` enum used here actually implements an interface so that other metrics could be plugged into a distance as well. A `Metric` is backed by a multiplier to transform the distance value of the given metric into native distances. The sample shown here would consider the 10 to be miles. Using one of the pre-built in metrics (miles and kilometers) will automatically trigger the spherical flag to be set on the query. If you want to avoid that, simply hand in plain `double` values into `maxDistance(…)`. For more information see the JavaDoc of `NearQuery` and `Distance`. - -The geo near operations return a `GeoResults` wrapper object that encapsulates `GeoResult` instances. The wrapping `GeoResults` allows to access the average distance of all results. A single `GeoResult` object simply carries the entity found plus its distance from the origin. - -[[mongo.geo-json]] -=== GeoJSON Support - -MongoDB supports http://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. - -NOTE: Please refer to the http://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions. - -==== GeoJSON types in domain classes - -Usage of http://geojson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types. - -==== -[source,java] ----- -public class Store { - - String id; - - /** - * location is stored in GeoJSON format. - * { - * "type" : "Point", - * "coordinates" : [ x, y ] - * } - */ - GeoJsonPoint location; -} ----- -==== - -==== GeoJSON types in repository query methods - -Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query. - -==== -[source,java] ----- -public interface StoreRepository extends CrudRepository { - - List findByLocationWithin(Polygon polygon); <1> - -} - -/* - * { - * "location": { - * "$geoWithin": { - * "$geometry": { - * "type": "Polygon", - * "coordinates": [ - * [ - * [-73.992514,40.758934], - * [-73.961138,40.760348], - * [-73.991658,40.730006], - * [-73.992514,40.758934] - * ] - * ] - * } - * } - * } - * } - */ -repo.findByLocationWithin( <2> - new GeoJsonPolygon( - new Point(-73.992514, 40.758934), - new Point(-73.961138, 40.760348), - new Point(-73.991658, 40.730006), - new Point(-73.992514, 40.758934))); <3> - -/* - * { - * "location" : { - * "$geoWithin" : { - * "$polygon" : [ [-73.992514,40.758934] , [-73.961138,40.760348] , [-73.991658,40.730006] ] - * } - * } - * } - */ -repo.findByLocationWithin( <4> - new Polygon( - new Point(-73.992514, 40.758934), - new Point(-73.961138, 40.760348), - new Point(-73.991658, 40.730006)); ----- -<1> Repository method definition using the commons type allows calling it with both GeoJSON and legacy format. -<2> Use GeoJSON type the make use of `$geometry` operator. -<3> Plase note that GeoJSON polygons need the define a closed ring. -<4> Use legacy format `$polygon` operator. -==== - -[[mongo.textsearch]] -=== Full Text Queries - -Since MongoDB 2.6 full text queries can be executed using the `$text` operator. Methods and operations specific for full text queries are available in `TextQuery` and `TextCriteria`. When doing full text search please refer to the http://docs.mongodb.org/manual/reference/operator/query/text/#behavior[MongoDB reference] for its behavior and limitations. - -==== Full Text Search - -Before we are actually able to use full text search we have to ensure to set up the search index correctly. Please refer to section <> for creating index structures. - -[source,javascript] ----- -db.foo.createIndex( -{ - title : "text", - content : "text" -}, -{ - weights : { - title : 3 - } -} -) ----- - -A query searching for `coffee cake`, sorted by relevance according to the `weights` can be defined and executed as: - -[source,java] ----- -Query query = TextQuery.searching(new TextCriteria().matchingAny("coffee", "cake")).sortByScore(); -List page = template.find(query, Document.class); ----- - -Exclusion of search terms can directly be done by prefixing the term with `-` or using `notMatching` - -[source,java] ----- -// search for 'coffee' and not 'cake' -TextQuery.searching(new TextCriteria().matching("coffee").matching("-cake")); -TextQuery.searching(new TextCriteria().matching("coffee").notMatching("cake")); ----- - -As `TextCriteria.matching` takes the provided term as is. Therefore phrases can be defined by putting them between double quotes (eg. `\"coffee cake\")` or using `TextCriteria.phrase.` - -[source,java] ----- -// search for phrase 'coffee cake' -TextQuery.searching(new TextCriteria().matching("\"coffee cake\"")); -TextQuery.searching(new TextCriteria().phrase("coffee cake")); ----- - -include::../{spring-data-commons-docs}/query-by-example.adoc[leveloffset=+1] -include::query-by-example.adoc[leveloffset=+1] - -[[mongo.mapreduce]] -== Map-Reduce Operations - -You can query MongoDB using Map-Reduce which is useful for batch processing, data aggregation, and for when the query language doesn't fulfill your needs. - -Spring provides integration with MongoDB's map reduce by providing methods on MongoOperations to simplify the creation and execution of Map-Reduce operations. It can convert the results of a Map-Reduce operation to a POJO also integrates with Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/resources.html[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files is often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. - -[[mongo.mapreduce.example]] -=== Example Usage - -To understand how to perform Map-Reduce operations an example from the book 'MongoDB - The definitive guide' is used. In this example we will create three documents that have the values [a,b], [b,c], and [c,d] respectfully. The values in each document are associated with the key 'x' as shown below. For this example assume these documents are in the collection named "jmr1". - -[source] ----- -{ "_id" : ObjectId("4e5ff893c0277826074ec533"), "x" : [ "a", "b" ] } -{ "_id" : ObjectId("4e5ff893c0277826074ec534"), "x" : [ "b", "c" ] } -{ "_id" : ObjectId("4e5ff893c0277826074ec535"), "x" : [ "c", "d" ] } ----- - -A map function that will count the occurrence of each letter in the array for each document is shown below - -[source,java] ----- -function () { - for (var i = 0; i < this.x.length; i++) { - emit(this.x[i], 1); - } -} ----- - -The reduce function that will sum up the occurrence of each letter across all the documents is shown below - -[source,java] ----- -function (key, values) { - var sum = 0; - for (var i = 0; i < values.length; i++) - sum += values[i]; - return sum; -} ----- - -Executing this will result in a collection as shown below. - -[source] ----- -{ "_id" : "a", "value" : 1 } -{ "_id" : "b", "value" : 2 } -{ "_id" : "c", "value" : 2 } -{ "_id" : "d", "value" : 1 } ----- - -Assuming that the map and reduce functions are located in map.js and reduce.js and bundled in your jar so they are available on the classpath, you can execute a map-reduce operation and obtain the results as shown below - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); -for (ValueObject valueObject : results) { - System.out.println(valueObject); -} ----- - -The output of the above code is - -[source] ----- -ValueObject [id=a, value=1.0] -ValueObject [id=b, value=2.0] -ValueObject [id=c, value=2.0] -ValueObject [id=d, value=1.0] ----- - -The MapReduceResults class implements `Iterable` and provides access to the raw output, as well as timing and count statistics. The `ValueObject` class is simply - -[source,java] ----- -public class ValueObject { - - private String id; - private float value; - - public String getId() { - return id; - } - - public float getValue() { - return value; - } - - public void setValue(float value) { - this.value = value; - } - - @Override - public String toString() { - return "ValueObject [id=" + id + ", value=" + value + "]"; - } -} ----- - -By default the output type of INLINE is used so you don't have to specify an output collection. To specify additional map-reduce options use an overloaded method that takes an additional `MapReduceOptions` argument. The class `MapReduceOptions` has a fluent API so adding additional options can be done in a very compact syntax. Here an example that sets the output collection to "jmr1_out". Note that setting only the output collection assumes a default output type of REPLACE. - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", - new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); ----- - -There is also a static import `import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.options;` that can be used to make the syntax slightly more compact - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", - options().outputCollection("jmr1_out"), ValueObject.class); ----- - -You can also specify a query to reduce the set of data that will be used to feed into the map-reduce operation. This will remove the document that contains [a,b] from consideration for map-reduce operations. - -[source,java] ----- -Query query = new Query(where("x").ne(new String[] { "a", "b" })); -MapReduceResults results = mongoOperations.mapReduce(query, "jmr1", "classpath:map.js", "classpath:reduce.js", - options().outputCollection("jmr1_out"), ValueObject.class); ----- - -Note that you can specify additional limit and sort values as well on the query but not skip values. - -[[mongo.server-side-scripts]] -== Script Operations - -MongoDB allows to execute JavaScript functions on the server by either directly sending the script or calling a stored one. `ScriptOperations` can be accessed via `MongoTemplate` and provides basic abstraction for `JavaScript` usage. - -=== Example Usage - -==== -[source,java] ----- -ScriptOperations scriptOps = template.scriptOps(); - -ExecutableMongoScript echoScript = new ExecutableMongoScript("function(x) { return x; }"); -scriptOps.execute(echoScript, "directly execute script"); <1> - -scriptOps.register(new NamedMongoScript("echo", echoScript)); <2> -scriptOps.call("echo", "execute script via name"); <3> ----- -<1> Execute the script directly without storing the function on server side. -<2> Store the script using 'echo' as its name. The given name identifies the script and allows calling it later. -<3> Execute the script with name 'echo' using the provided parameters. -==== - -[[mongo.group]] -== Group Operations - -As an alternative to using Map-Reduce to perform data aggregation, you can use the http://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group[`group` operation] which feels similar to using SQL's group by query style, so it may feel more approachable vs. using Map-Reduce. Using the group operations does have some limitations, for example it is not supported in a shareded environment and it returns the full result set in a single BSON object, so the result should be small, less than 10,000 keys. - -Spring provides integration with MongoDB's group operation by providing methods on MongoOperations to simplify the creation and execution of group operations. It can convert the results of the group operation to a POJO and also integrates with Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/resources.html[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files if often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. - -[[mongo.group.example]] -=== Example Usage - -In order to understand how group operations work the following example is used, which is somewhat artificial. For a more realistic example consult the book 'MongoDB - The definitive guide'. A collection named "group_test_collection" created with the following rows. - -[source] ----- -{ "_id" : ObjectId("4ec1d25d41421e2015da64f1"), "x" : 1 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f2"), "x" : 1 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f3"), "x" : 2 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f4"), "x" : 3 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f5"), "x" : 3 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f6"), "x" : 3 } ----- - -We would like to group by the only field in each row, the 'x' field and aggregate the number of times each specific value of 'x' occurs. To do this we need to create an initial document that contains our count variable and also a reduce function which will increment it each time it is encountered. The Java code to execute the group operation is shown below - -[source,java] ----- -GroupByResults results = mongoTemplate.group("group_test_collection", - GroupBy.key("x").initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"), - XObject.class); ----- - -The first argument is the name of the collection to run the group operation over, the second is a fluent API that specifies properties of the group operation via a `GroupBy` class. In this example we are using just the `intialDocument` and `reduceFunction` methods. You can also specify a key-function, as well as a finalizer as part of the fluent API. If you have multiple keys to group by, you can pass in a comma separated list of keys. - -The raw results of the group operation is a JSON document that looks like this - -[source] ----- -{ - "retval" : [ { "x" : 1.0 , "count" : 2.0} , - { "x" : 2.0 , "count" : 1.0} , - { "x" : 3.0 , "count" : 3.0} ] , - "count" : 6.0 , - "keys" : 3 , - "ok" : 1.0 -} ----- - -The document under the "retval" field is mapped onto the third argument in the group method, in this case XObject which is shown below. - -[source,java] ----- -public class XObject { - - private float x; - - private float count; - - - public float getX() { - return x; - } - - public void setX(float x) { - this.x = x; - } - - public float getCount() { - return count; - } - - public void setCount(float count) { - this.count = count; - } - - @Override - public String toString() { - return "XObject [x=" + x + " count = " + count + "]"; - } -} ----- - -You can also obtain the raw result as a `DbObject` by calling the method `getRawResults` on the `GroupByResults` class. - -There is an additional method overload of the group method on `MongoOperations` which lets you specify a `Criteria` object for selecting a subset of the rows. An example which uses a `Criteria` object, with some syntax sugar using static imports, as well as referencing a key-function and reduce function javascript files via a Spring Resource string is shown below. - -[source] ----- -import static org.springframework.data.mongodb.core.mapreduce.GroupBy.keyFunction; -import static org.springframework.data.mongodb.core.query.Criteria.where; - -GroupByResults results = mongoTemplate.group(where("x").gt(0), - "group_test_collection", - keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); ----- - -[[mongo.aggregation]] -== Aggregation Framework Support - -Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. - -The MongoDB Documentation describes the http://docs.mongodb.org/manual/core/aggregation/[Aggregation Framework] as follows: - -For further information see the full http://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. - -[[mongo.aggregation.basic-concepts]] -=== Basic Concepts - -The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions `Aggregation`, `AggregationOperation` and `AggregationResults`. - -* `Aggregation` -+ -An Aggregation represents a MongoDB `aggregate` operation and holds the description of the aggregation pipline instructions. Aggregations are created by inoking the appropriate `newAggregation(…)` static factory Method of the `Aggregation` class which takes the list of `AggregateOperation` as a parameter next to the optional input class. -+ -The actual aggregate operation is executed by the `aggregate` method of the `MongoTemplate` which also takes the desired output class as parameter. -+ -* `AggregationOperation` -+ -An `AggregationOperation` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although one could manually create an `AggregationOperation` the recommended way to construct an `AggregateOperation` is to use the static factory methods provided by the `Aggregate` class. -+ -* `AggregationResults` -+ -`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result in the form of an `DBObject`, to the mapped objects and information which performed the aggregation. -+ -The canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework looks as follows: - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - pipelineOP1(), - pipelineOP2(), - pipelineOPn() -); - -AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); -List mappedResult = results.getMappedResults(); ----- - -Note that if you provide an input class as the first parameter to the `newAggregation` method the `MongoTemplate` will derive the name of the input collection from this class. Otherwise if you don't not specify an input class you must provide the name of the input collection explicitly. If an input-class and an input-collection is provided the latter takes precedence. - -[[mongo.aggregation.supported-aggregation-operations]] -=== Supported Aggregation Operations - -The MongoDB Aggregation Framework provides the following types of Aggregation Operations: - -* Pipeline Aggregation Operators -* Group Aggregation Operators -* Boolean Aggregation Operators -* Comparison Aggregation Operators -* Arithmetic Aggregation Operators -* String Aggregation Operators -* Date Aggregation Operators -* Conditional Aggregation Operators -* Lookup Aggregation Operators - -At the time of this writing we provide support for the following Aggregation Operations in Spring Data MongoDB. - -.Aggregation Operations currently supported by Spring Data MongoDB -[cols="2*"] -|=== -| Pipeline Aggregation Operators -| project, skip, limit, lookup, unwind, group, sort, geoNear - -| Group Aggregation Operators -| addToSet, first, last, max, min, avg, push, sum, (*count) - -| Arithmetic Aggregation Operators -| add (*via plus), subtract (*via minus), multiply, divide, mod - -| Comparison Aggregation Operators -| eq (*via: is), gt, gte, lt, lte, ne -|=== - -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. - -*) The operation is mapped or added by Spring Data MongoDB. - -[[mongo.aggregation.projection]] -=== Projection Expressions - -Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined via the `project` method of the `Aggregate` class either by passing a list of `String`s or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API via the `and(String)` method and aliased via the `as(String)` method. -Note that one can also define fields with aliases via the static factory method `Fields.field` of the aggregation framework that can then be used to construct a new `Fields` instance. - -.Projection expression examples -==== -[source,java] ----- -project("name", "netPrice") // will generate {$project: {name: 1, netPrice: 1}} -project().and("foo").as("bar") // will generate {$project: {bar: $foo}} -project("a","b").and("foo").as("bar") // will generate {$project: {a: 1, b: 1, bar: $foo}} ----- -==== - -Note that more examples for project operations can be found in the `AggregationTests` class. - -Note that further details regarding the projection expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.projection.expressions]] -==== Spring Expression Support in Projection Expressions - -As of Version 1.4.0 we support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` class. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations. - -===== Complex calculations with SpEL expressions - -The following SpEL expression: - -[source,java] ----- -1 + (q + 1) / (q - 1) ----- - -will be translated into the following projection expression part: - -[source,javascript] ----- -{ "$add" : [ 1, { - "$divide" : [ { - "$add":["$q", 1]}, { - "$subtract":[ "$q", 1]} - ] -}]} ----- - -Have a look at an example in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. - -[[mongo.aggregation.examples]] -==== Aggregation Framework Examples - -The following examples demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. - -[[mongo.aggregation.examples.example1]] -.Aggregation Framework Example 1 - -In this introductory example we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection called `"tags"` sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection) and unwinding (result splitting). - -[source,java] ----- -class TagCount { - String tag; - int n; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - project("tags"), - unwind("tags"), - group("tags").count().as("n"), - project("n").and("tag").previousOperation(), - sort(DESC, "n") -); - -AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); -List tagCount = results.getMappedResults(); ----- - -* In order to do this we first create a new aggregation via the `newAggregation` static factory method to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. -* As a second step we select the `"tags"` field (which is an array of strings) from the input collection with the `project` operation. -* In a third step we use the `unwind` operation to generate a new document for each tag within the `"tags"` array. -* In the forth step we use the `group` operation to define a group for each `"tags"`-value for which we aggregate the occurrence count via the `count` aggregation operator and collect the result in a new field called `"n"`. -* As a fifth step we select the field `"n"` and create an alias for the id-field generated from the previous group operation (hence the call to `previousOperation()`) with the name `"tag"`. -* As the sixth step we sort the resulting list of tags by their occurrence count in descending order via the `sort` operation. -* Finally we call the `aggregate` Method on the MongoTemplate in order to let MongoDB perform the acutal aggregation operation with the created `Aggregation` as an argument. - -Note that the input collection is explicitly specified as the `"tags"` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input-class passed as first parameter to the `newAggreation` Method. - -[[mongo.aggregation.examples.example2]] -.Aggregation Framework Example 2 - -This example is based on the http://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state, using the aggregation framework. This example demonstrates the usage of grouping, sorting and projections (selection). - -[source,java] ----- -class ZipInfo { - String id; - String city; - String state; - @Field("pop") int population; - @Field("loc") double[] location; -} - -class City { - String name; - int population; -} - -class ZipInfoStats { - String id; - String state; - City biggestCity; - City smallestCity; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation aggregation = newAggregation(ZipInfo.class, - group("state", "city") - .sum("population").as("pop"), - sort(ASC, "pop", "state", "city"), - group("state") - .last("city").as("biggestCity") - .last("pop").as("biggestPop") - .first("city").as("smallestCity") - .first("pop").as("smallestPop"), - project() - .and("state").previousOperation() - .and("biggestCity") - .nested(bind("name", "biggestCity").and("population", "biggestPop")) - .and("smallestCity") - .nested(bind("name", "smallestCity").and("population", "smallestPop")), - sort(ASC, "state") -); - -AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); -ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); ----- - -* The class `ZipInfo` maps the structure of the given input-collection. The class `ZipInfoStats` defines the structure in the desired output format. -* As a first step we use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the fields `"state"` and `"city" `which forms the id structure of the group. We aggregate the value of the `"population"` property from the grouped elements with by using the `sum` operator saving the result in the field `"pop"`. -* In a second step we use the `sort` operation to sort the intermediate-result by the fields `"pop"`, `"state"` and `"city"` in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on "state" and `"city"` is implicitly performed against the group id fields which Spring Data MongoDB took care of. -* In the third step we use a `group` operation again to group the intermediate result by `"state"`. Note that `"state"` again implicitly references an group-id field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operator respectively via the `project` operation. -* As the forth step we select the `"state"` field from the previous `group` operation. Note that `"state"` again implicitly references an group-id field. As we do not want an implicit generated id to appear, we exclude the id from the previous operation via `and(previousOperation()).exclude()`. As we want to populate the nested `City` structures in our output-class accordingly we have to emit appropriate sub-documents with the nested method. -* Finally as the fifth step we sort the resulting list of `StateStats` by their state name in ascending order via the `sort` operation. - -Note that we derive the name of the input-collection from the `ZipInfo`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example3]] -.Aggregation Framework Example 3 - -This example is based on the http://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million ]example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates the usage of grouping, sorting and matching (filtering). - -[source,java] ----- -class StateStats { - @Id String id; - String state; - @Field("totalPop") int totalPopulation; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(ZipInfo.class, - group("state").sum("population").as("totalPop"), - sort(ASC, previousOperation(), "totalPop"), - match(where("totalPop").gte(10 * 1000 * 1000)) -); - -AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); -List stateStatsList = result.getMappedResults(); ----- - -* As a first step we group the input collection by the `"state"` field and calculate the sum of the `"population"` field and store the result in the new field `"totalPop"`. -* In the second step we sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. -* Finally in the third step we filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. - -Note that we derive the name of the input-collection from the `ZipInfo`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example4]] -.Aggregation Framework Example 4 - -This example demonstrates the use of simple arithmetic operations in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .and("netPrice").plus(1).as("netPricePlus1") - .and("netPrice").minus(1).as("netPriceMinus1") - .and("netPrice").multiply(1.19).as("grossPrice") - .and("netPrice").divide(2).as("netPriceDiv2") - .and("spaceUnits").mod(2).as("spaceUnitsMod2") -); - -AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); -List resultList = result.getMappedResults(); ----- - -Note that we derive the name of the input-collection from the `Product`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example5]] -.Aggregation Framework Example 5 - -This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("netPrice + 1").as("netPricePlus1") - .andExpression("netPrice - 1").as("netPriceMinus1") - .andExpression("netPrice / 2").as("netPriceDiv2") - .andExpression("netPrice * 1.19").as("grossPrice") - .andExpression("spaceUnits % 2").as("spaceUnitsMod2") - .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") - -); - -AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); -List resultList = result.getMappedResults(); ----- - -[[mongo.aggregation.examples.example6]] -.Aggregation Framework Example 6 - -This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. - -Note: The additional parameters passed to the `addExpression` Method can be referenced via indexer expressions according to their position. In this example we reference the parameter which is the first parameter of the parameters array via `[0]`. External parameter expressions are replaced with their respective values when the SpEL expression is transformed into a MongoDB aggregation framework expression. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -double shippingCosts = 1.2; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") -); - -AggregationResults result = mongoTemplate.aggregate(agg, DBObject.class); -List resultList = result.getMappedResults(); ----- - -Note that we can also refer to other fields of the document within the SpEL expression. - -[[mongo.custom-converters]] -== Overriding default mapping with custom converters - -In order to have more fine grained control over the mapping process you can register Spring converters with the `MongoConverter` implementations such as the `MappingMongoConverter`. - -The `MappingMongoConverter` checks to see if there are any Spring converters that can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the MappingConverter. - -NOTE: For more information on the Spring type conversion service see the reference docs http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/validation.html#core-convert[here]. - -[[mongo.custom-converters.writer]] -=== Saving using a registered Spring Converter - -An example implementation of the `Converter` that converts from a Person object to a `com.mongodb.DBObject` is shown below - -[source,java] ----- -import org.springframework.core.convert.converter.Converter; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; - -public class PersonWriteConverter implements Converter { - - public DBObject convert(Person source) { - DBObject dbo = new BasicDBObject(); - dbo.put("_id", source.getId()); - dbo.put("name", source.getFirstName()); - dbo.put("age", source.getAge()); - return dbo; - } -} ----- - -[[mongo.custom-converters.reader]] -=== Reading using a Spring Converter - -An example implementation of a Converter that converts from a DBObject ot a Person object is shownn below - -[source,java] ----- -public class PersonReadConverter implements Converter { - - public Person convert(DBObject source) { - Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); - p.setAge((Integer) source.get("age")); - return p; - } -} ----- - -[[mongo.custom-converters.xml]] -=== Registering Spring Converters with the MongoConverter - -The Mongo Spring namespace provides a convenience way to register Spring `Converter`s with the `MappingMongoConverter`. The configuration snippet below shows how to manually register converter beans as well as configuring the wrapping `MappingMongoConverter` into a `MongoTemplate`. - -[source,xml] ----- - - - - - - - - - - - - - - - - - ----- - -You can also use the base-package attribute of the custom-converters element to enable classpath scanning for all `Converter` and `GenericConverter` implementations below the given package. - -[source,xml] ----- - - - ----- - -[[mongo.converter-disambiguation]] -=== Converter disambiguation - -Generally we inspect the `Converter` implementations for the source and target types they convert from and to. Depending on whether one of those is a type MongoDB can handle natively we will register the converter instance as reading or writing one. Have a look at the following samples: - -[source,java] ----- -// Write converter as only the target type is one Mongo can handle natively -class MyConverter implements Converter { … } - -// Read converter as only the source type is one Mongo can handle natively -class MyConverter implements Converter { … } ----- - -In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter` is ambiguous although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used at the converter implementation. - -[[mongo-template.index-and-collections]] -== Index and Collection management - -`MongoTemplate` provides a few methods for managing indexes and collections. These are collected into a helper interface called `IndexOperations`. You access these operations by calling the method `indexOps` and pass in either the collection name or the `java.lang.Class` of your entity (the collection name will be derived from the .class either by name or via annotation metadata). - -The `IndexOperations` interface is shown below - -[source,java] ----- -public interface IndexOperations { - - void ensureIndex(IndexDefinition indexDefinition); - - void dropIndex(String name); - - void dropAllIndexes(); - - void resetIndexCache(); - - List getIndexInfo(); -} ----- - -[[mongo-template.index-and-collections.index]] -=== Methods for creating an Index - -We can create an index on a collection to improve query performance. - -==== Creating an index using the MongoTemplate - -[source,java] ----- -mongoTemplate.indexOps(Person.class).ensureIndex(new Index().on("name",Order.ASCENDING)); ----- - -* *ensureIndex* Ensure that an index for the provided IndexDefinition exists for the collection. - -You can create standard, geospatial and text indexes using the classes `IndexDefinition`, `GeoSpatialIndex` and `TextIndexDefinition`. For example, given the Venue class defined in a previous section, you would declare a geospatial query as shown below. - -[source,java] ----- -mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); ----- - -[[mongo-template.index-and-collections.access]] -=== Accessing index information - -The IndexOperations interface has the method getIndexInfo that returns a list of IndexInfo objects. This contains all the indexes defined on the collectcion. Here is an example that defines an index on the Person class that has age property. - -[source,java] ----- -template.indexOps(Person.class).ensureIndex(new Index().on("age", Order.DESCENDING).unique(Duplicates.DROP)); - -List indexInfoList = template.indexOps(Person.class).getIndexInfo(); - -// Contains -// [IndexInfo [fieldSpec={_id=ASCENDING}, name=_id_, unique=false, dropDuplicates=false, sparse=false], -// IndexInfo [fieldSpec={age=DESCENDING}, name=age_-1, unique=true, dropDuplicates=true, sparse=false]] ----- - -[[mongo-template.index-and-collections.collection]] -=== Methods for working with a Collection - -It's time to look at some code examples showing how to use the `MongoTemplate`. First we look at creating our first collection. - -.Working with collections using the MongoTemplate -==== -[source,java] ----- -DBCollection collection = null; -if (!mongoTemplate.getCollectionNames().contains("MyNewCollection")) { - collection = mongoTemplate.createCollection("MyNewCollection"); -} - -mongoTemplate.dropCollection("MyNewCollection"); ----- -==== - -* *getCollectionNames* Returns a set of collection names. -* *collectionExists* Check to see if a collection with a given name exists. -* *createCollection* Create an uncapped collection -* *dropCollection* Drop the collection -* *getCollection* Get a collection by name, creating it if it doesn't exist. - -[[mongo-template.commands]] -== Executing Commands - -You can also get at the MongoDB driver's `DB.command( )` method using the `executeCommand(…)` methods on `MongoTemplate`. These will also perform exception translation into Spring's `DataAccessException` hierarchy. - -[[mongo-template.commands.execution]] -=== Methods for executing commands - -* `CommandResult` *executeCommand* `(DBObject command)` Execute a MongoDB command. -* `CommandResult` *executeCommand* `(String jsonCommand)` Execute the a MongoDB command expressed as a JSON string. - -[[mongodb.mapping-usage.events]] -== Lifecycle Events - -Built into the MongoDB mapping framework are several `org.springframework.context.ApplicationEvent` events that your application can respond to by registering special beans in the `ApplicationContext`. By being based off Spring's ApplicationContext event infastructure this enables other products, such as Spring Integration, to easily receive these events as they are a well known eventing mechanism in Spring based applications. - -To intercept an object before it goes through the conversion process (which turns your domain object into a `com.mongodb.DBObject`), you'd register a subclass of `AbstractMongoEventListener` that overrides the `onBeforeConvert` method. When the event is dispatched, your listener will be called and passed the domain object before it goes into the converter. - -==== -[source,java] ----- -public class BeforeConvertListener extends AbstractMongoEventListener { - @Override - public void onBeforeConvert(BeforeConvertEvent event) { - ... does some auditing manipulation, set timestamps, whatever ... - } -} ----- -==== - -To intercept an object before it goes into the database, you'd register a subclass of `org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener` that overrides the `onBeforeSave` method. When the event is dispatched, your listener will be called and passed the domain object and the converted `com.mongodb.DBObject`. - -==== -[source,java] ----- -public class BeforeSaveListener extends AbstractMongoEventListener { - @Override - public void onBeforeSave(BeforeSaveEvent event) { - … change values, delete them, whatever … - } -} ----- -==== - -Simply declaring these beans in your Spring ApplicationContext will cause them to be invoked whenever the event is dispatched. - -The list of callback methods that are present in AbstractMappingEventListener are - -* `onBeforeConvert` - called in MongoTemplate insert, insertList and save operations before the object is converted to a DBObject using a MongoConveter. -* `onBeforeSave` - called in MongoTemplate insert, insertList and save operations *before* inserting/saving the DBObject in the database. -* `onAfterSave` - called in MongoTemplate insert, insertList and save operations *after* inserting/saving the DBObject in the database. -* `onAfterLoad` - called in MongoTemplate find, findAndRemove, findOne and getCollection methods after the DBObject is retrieved from the database. -* `onAfterConvert` - called in MongoTemplate find, findAndRemove, findOne and getCollection methods after the DBObject retrieved from the database was converted to a POJO. - -[[mongo.exception]] -== Exception Translation - -The Spring framework provides exception translation for a wide variety of database and mapping technologies. This has traditionally been for JDBC and JPA. The Spring support for MongoDB extends this feature to the MongoDB Database by providing an implementation of the `org.springframework.dao.support.PersistenceExceptionTranslator` interface. - -The motivation behind mapping to Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/dao.html#dao-exceptions[consistent data access exception hierarchy] is that you are then able to write portable and descriptive exception handling code without resorting to coding against http://www.mongodb.org/about/contributors/error-codes/[MongoDB error codes]. All of Spring's data access exceptions are inherited from the root `DataAccessException` class so you can be sure that you will be able to catch all database related exception within a single try-catch block. Note, that not all exceptions thrown by the MongoDB driver inherit from the MongoException class. The inner exception and message are preserved so no information is lost. - -Some of the mappings performed by the `MongoExceptionTranslator` are: com.mongodb.Network to DataAccessResourceFailureException and `MongoException` error codes 1003, 12001, 12010, 12011, 12012 to `InvalidDataAccessApiUsageException`. Look into the implementation for more details on the mapping. - -[[mongo.executioncallback]] -== Execution callbacks - -One common design feature of all Spring template classes is that all functionality is routed into one of the templates execute callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using thexe execute callback is the preferred way to access the MongoDB driver's `DB` and `DBCollection` objects to perform uncommon operations that were not exposed as methods on `MongoTemplate`. - -Here is a list of execute callback methods. - -* ` T` *execute* `(Class entityClass, CollectionCallback action)` Executes the given CollectionCallback for the entity collection of the specified class. - -* ` T` *execute* `(String collectionName, CollectionCallback action)` Executes the given CollectionCallback on the collection of the given name. - -* ` T` *execute* `(DbCallback action) Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.` Executes a DbCallback translating any exceptions as necessary. - -* ` T` *execute* `(String collectionName, DbCallback action)` Executes a DbCallback on the collection of the given name translating any exceptions as necessary. - -* ` T` *executeInSession* `(DbCallback action) ` Executes the given DbCallback within the same connection to the database so as to ensure consistency in a write heavy environment where you may read the data that you wrote. - -Here is an example that uses the `CollectionCallback` to return information about an index - -[source,java] ----- -boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean>() { - public Boolean doInCollection(Venue.class, DBCollection collection) throws MongoException, DataAccessException { - List indexes = collection.getIndexInfo(); - for (DBObject dbo : indexes) { - if ("location_2d".equals(dbo.get("name"))) { - return true; - } - } - return false; - } -}); ----- - -[[gridfs]] -== GridFS support - -MongoDB supports storing binary files inside it's filesystem GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the according implementation `GridFsTemplate` to easily interact with the filesystem. You can setup a `GridFsTemplate` instance by handing it a `MongoDbFactory` as well as a `MongoConverter`: - -.JavaConfig setup for a GridFsTemplate -==== -[source,java] ----- -class GridFsConfiguration extends AbstractMongoConfiguration { - - // … further configuration omitted - - @Bean - public GridFsTemplate gridFsTemplate() { - return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); - } -} ----- -==== - -An according XML configuration looks like this: - -.XML configuration for a GridFsTemplate -==== -[source,xml] ----- - - - - - - - - - - - - ----- -==== - -The template can now be injected and used to perform storage and retrieval operations. - -.Using GridFsTemplate to store files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void storeFileToGridFs { - - FileMetadata metadata = new FileMetadata(); - // populate metadata - Resource file = … // lookup File or Resource - - operations.store(file.getInputStream(), "filename.txt", metadata); - } -} ----- -==== - -The `store(…)` operations take an `InputStream`, a filename and optionally metadata information about the file to store. The metadata can be an arbitrary object which will be marshalled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively you can also provide a `DBObject` as well. - -Reading files from the filesystem can either be achieved through the `find(…)` or `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file matching a `Query` or multiple ones. To easily define file queries we provide the `GridFsCriteria` helper class. It provides static factory methods to encapsulate default metadata fields (e.g. `whereFilename()`, `whereContentType()`) or the custom one through `whereMetaData()`. - -.Using GridFsTemplate to query for files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void findFilesInGridFs { - List result = operations.find(query(whereFilename().is("filename.txt"))) - } -} ----- -==== - -NOTE: Currently MongoDB does not support defining sort criteria when retrieving files from GridFS. Thus any sort criteria defined on the `Query` instance handed into the `find(…)` method will be disregarded. - -The other option to read files from the GridFs is using the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method ar thus retrieve files matching the given pattern. - -.Using GridFsTemplate to read files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void readFilesFromGridFs { - GridFsResources[] txtFiles = operations.getResources("*.txt"); - } -} ----- -==== - -`GridFsOperations` extending `ResourcePatternResolver` allows the `GridFsTemplate` e.g. to be plugged into an `ApplicationContext` to read Spring Config files from a MongoDB. diff --git a/src/main/asciidoc/reference/query-by-example.adoc b/src/main/asciidoc/reference/query-by-example.adoc deleted file mode 100644 index d8c5fdfa21..0000000000 --- a/src/main/asciidoc/reference/query-by-example.adoc +++ /dev/null @@ -1,71 +0,0 @@ -[[query-by-example.execution]] -== Executing an example - -.Query by Example using a Repository -==== -[source, java] ----- -public interface PersonRepository extends QueryByExampleExecutor { - -} - -public class PersonService { - - @Autowired PersonRepository personRepository; - - public List findPeople(Person probe) { - return personRepository.findAll(Example.of(probe)); - } -} ----- -==== - -An `Example` containing an untyped `ExampleSpec` uses the Repository type and its collection name. Typed `ExampleSpec` use their type as result type and the collection name from the Repository. - -NOTE: When including `null` values in the `ExampleSpec` Spring Data Mongo uses embedded document matching instead of dot notation property matching. This forces exact document matching for all property values and the property order in the embedded document. - -Spring Data MongoDB provides support for the following matching options: - -[cols="1,2", options="header"] -.`StringMatcher` options -|=== -| Matching -| Logical result - -| `DEFAULT` (case-sensitive) -| `{"firstname" : firstname}` - -| `DEFAULT` (case-insensitive) -| `{"firstname" : { $regex: firstname, $options: 'i'}}` - -| `EXACT` (case-sensitive) -| `{"firstname" : { $regex: /^firstname$/}}` - -| `EXACT` (case-insensitive) -| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}` - -| `STARTING` (case-sensitive) -| `{"firstname" : { $regex: /^firstname/}}` - -| `STARTING` (case-insensitive) -| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}` - -| `ENDING` (case-sensitive) -| `{"firstname" : { $regex: /firstname$/}}` - -| `ENDING` (case-insensitive) -| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}` - -| `CONTAINING` (case-sensitive) -| `{"firstname" : { $regex: /.\*firstname.*/}}` - -| `CONTAINING` (case-insensitive) -| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}` - -| `REGEX` (case-sensitive) -| `{"firstname" : { $regex: /firstname/}}` - -| `REGEX` (case-insensitive) -| `{"firstname" : { $regex: /firstname/, $options: 'i'}}` - -|=== diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt deleted file mode 100644 index 58c3f31c37..0000000000 --- a/src/main/resources/changelog.txt +++ /dev/null @@ -1,1593 +0,0 @@ -Spring Data MongoDB Changelog -============================= - -Changes in version 1.9.1.RELEASE (2016-04-06) ---------------------------------------------- -* DATAMONGO-1408 - Release 1.9.1 (Hopper SR1). - - -Changes in version 1.9.0.RELEASE (2016-04-06) ---------------------------------------------- -* DATAMONGO-1407 - Add pull request template. -* DATAMONGO-1405 - Release 1.9 GA (Hopper). -* DATAMONGO-1401 - GeoJsonPoint error on update. -* DATAMONGO-1398 - Update documentation for Spring Data MongoDB 1.9. -* DATAMONGO-1396 - Exception when creating geo within Criteria using Aggregation. - - -Changes in version 1.9.0.RC1 (2016-03-18) ------------------------------------------ -* DATAMONGO-1400 - Adapt to rename of Spring Data Commons' Tuple to Pair. -* DATAMONGO-1397 - MongoTemplate.geoNear() do not log the Query. -* DATAMONGO-1392 - Release 1.9 RC1 (Hopper). -* DATAMONGO-1389 - Adapt test case to changes made for improved type prediction infrastructure. -* DATAMONGO-1387 - BasicQuery.fields().include() doesn't stick, even though Query.fields().include() does. -* DATAMONGO-1373 - Problem with custom annotations with AliasFor annotated attributes. -* DATAMONGO-1326 - Add support for $lookup to aggregation. -* DATAMONGO-1245 - Add support for Query-By-Example. - - -Changes in version 1.8.4.RELEASE (2016-02-23) ---------------------------------------------- -* DATAMONGO-1381 - Release 1.8.4 (Gosling SR4). -* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister. -* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort). -* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories. -* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE. -* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc. -* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results. -* DATAMONGO-1360 - Cannot query with JSR310. -* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean. - - -Changes in version 1.9.0.M1 (2016-02-12) ----------------------------------------- -* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister. -* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort). -* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories. -* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE. -* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc. -* DATAMONGO-1372 - Add converter for Currency. -* DATAMONGO-1371 - Add code of conduct. -* DATAMONGO-1366 - Release 1.9 M1 (Hopper). -* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results. -* DATAMONGO-1360 - Cannot query with JSR310. -* DATAMONGO-1349 - Upgrade to mongo-java-driver 2.14.0. -* DATAMONGO-1346 - Cannot add two pullAll to an Update. -* DATAMONGO-1345 - Add support for projections on repository query methods. -* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…). -* DATAMONGO-1341 - Remove package cycle between core and core.index. -* DATAMONGO-1337 - General code quality improvements. -* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested. -* DATAMONGO-1334 - MapResultOptions limit not implemented. -* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion. -* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2. -* DATAMONGO-1314 - Fix typo in Exception message. -* DATAMONGO-1312 - Cannot convert generic sub-document fields. -* DATAMONGO-1303 - Add build profile for MongoDB 3.1 driver. -* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory. -* DATAMONGO-1297 - Unique Index on DBRef. -* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri. -* DATAMONGO-1291 - Allow @Document to be used as meta-annotation. -* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work. -* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation. -* DATAMONGO-1288 - Update.inc(String, Number) method fails to work with AtomicInteger. -* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies. -* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator. -* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean. -* DATAMONGO-1238 - Support for Querydsl 4. -* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3. -* DATAMONGO-1163 - Allow @Indexed to be used as meta-annotation. -* DATAMONGO-934 - Add support for the bulk operations introduced in MongoDB 2.6. - - -Changes in version 1.8.2.RELEASE (2015-12-18) ---------------------------------------------- -* DATAMONGO-1355 - Release 1.8.2 (Gosling). -* DATAMONGO-1346 - Cannot add two pullAll to an Update. -* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…). -* DATAMONGO-1337 - General code quality improvements. -* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested. -* DATAMONGO-1334 - MapResultOptions limit not implemented. -* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion. -* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2. -* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work. -* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation. -* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies. -* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3. - - -Changes in version 1.8.1.RELEASE (2015-11-15) ---------------------------------------------- -* DATAMONGO-1316 - Release 1.8.1 (Gosling). -* DATAMONGO-1312 - Cannot convert generic sub-document fields. -* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory. -* DATAMONGO-1297 - Unique Index on DBRef. -* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri. -* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator. - - -Changes in version 1.6.4.RELEASE (2015-10-14) ---------------------------------------------- -* DATAMONGO-1304 - Release 1.6.4 (Evans). - - -Changes in version 1.8.0.RELEASE (2015-09-01) ---------------------------------------------- -* DATAMONGO-1282 - Release 1.8 GA (Gosling). -* DATAMONGO-1280 - Add what's new section to refrence documentation. -* DATAMONGO-1275 - Reference documentation should mention support for optimistic locking. -* DATAMONGO-1269 - QueryMapper drops numeric keys in Maps. -* DATAMONGO-1256 - Provide a collectionName in MongoMappingEvents. - - -Changes in version 1.8.0.RC1 (2015-08-04) ------------------------------------------ -* DATAMONGO-1268 - Release 1.8 RC1 (Gosling). -* DATAMONGO-1266 - Repository query methods returning a primitive do not detect domain type correctly. -* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory. -* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma. -* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name. -* DATAMONGO-1251 - update / findAndModify throws NullPointerException. -* DATAMONGO-1250 - Custom converter implementation not used in updates. -* DATAMONGO-1244 - StringBasedMongoQuery handles complex expression parameters incorrectly. -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1236 - MongoOperations findAndModify and updateFirst do not include the _class in Map values. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. -* DATAMONGO-1125 - Specify collection that triggers CommandFailureException. - - -Changes in version 1.7.2.RELEASE (2015-07-28) ---------------------------------------------- -* DATAMONGO-1261 - Release 1.7.2 (Fowler). -* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory. -* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma. -* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name. -* DATAMONGO-1251 - update / findAndModify throws NullPointerException. -* DATAMONGO-1250 - Custom converter implementation not used in updates. - - -Changes in version 1.5.6.RELEASE (2015-07-01) ---------------------------------------------- -* DATAMONGO-1246 - Release 1.5.6 (Dijkstra). -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0. - - -Changes in version 1.6.3.RELEASE (2015-07-01) ---------------------------------------------- -* DATAMONGO-1247 - Release 1.6.3 (Evans). -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. -* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0. -* DATAMONGO-1153 - Fix documentation build. -* DATAMONGO-1133 - Field aliasing is not honored in Aggregation operations. -* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO. -* DATAMONGO-1081 - Improve documentation on field mapping semantics. - - -Changes in version 1.7.1.RELEASE (2015-06-30) ---------------------------------------------- -* DATAMONGO-1248 - Release 1.7.1 (Fowler). -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1202 - Indexed annotation problems under generics. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. - - -Changes in version 1.8.0.M1 (2015-06-02) ----------------------------------------- -* DATAMONGO-1228 - Release 1.8 M1 (Gosling). -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1218 - Deprecate non-MongoClient related configuration options in XML namespace. -* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1211 - Adapt API changes in Spring Data Commons to simplify custom repository base class registration. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1202 - Indexed annotation problems under generics. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver. -* DATAMONGO-1192 - Switch back to Spring 4.1's CollectionFactory. -* DATAMONGO-1134 - Add support for $geoIntersects. -* DATAMONGO-990 - Add support for SpEL expressions in @Query. - - -Changes in version 1.7.0.RELEASE (2015-03-23) ---------------------------------------------- -* DATAMONGO-1189 - Release 1.7 GA. -* DATAMONGO-1181 - Add Jackson Module for GeoJSON types. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1179 - Update reference documentation. -* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO. -* DATAMONGO-979 - Add support for $size expression in project and group aggregation pipeline. - - -Changes in version 1.7.0.RC1 (2015-03-05) ------------------------------------------ -* DATAMONGO-1173 - Release 1.7 RC1. -* DATAMONGO-1167 - Add 'findAll' method to QueryDslMongoRepository which accepts a querydsl Predicate and a Sort. -* DATAMONGO-1165 - Add support for Java 8 Stream as return type in repositories. -* DATAMONGO-1162 - Adapt test cases to semantic changes in Spring Data Commons AuditingHandler API. -* DATAMONGO-1158 - Assert compatibility with MongoDB 3.0. -* DATAMONGO-1154 - Upgrade to MongoDB Java driver 2.13.0. -* DATAMONGO-1153 - Fix documentation build. -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1146 - Add 'exists' method to QueryDslMongoRepository which accepts a querydsl Predicate. -* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1136 - Use $geoWithin instead of $within for geo queries. -* DATAMONGO-1135 - Add support for $geometry to support GeoJSON queries. -* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation. -* DATAMONGO-1131 - Register converters for ThreeTen back port by default. -* DATAMONGO-1129 - Upgrade to latest MongoDB Java driver. -* DATAMONGO-1127 - Add support for geoNear queries with distance information. -* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1110 - Add support for $minDistance to NearQuery. -* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support. -* DATAMONGO-1081 - Improve documentation on field mapping semantics. -* DATAMONGO-712 - Another round of potential performance improvements. -* DATAMONGO-479 - Support calling of MongoDB stored javascripts. - - -Changes in version 1.6.2.RELEASE (2015-01-28) ---------------------------------------------- -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5. -* DATAMONGO-1144 - Release 1.6.2. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation. -* DATAMONGO-1127 - Add support for geoNear queries with distance information. -* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field... -* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL. -* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support. -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-712 - Another round of potential performance improvements. - - -Changes in version 1.5.5.RELEASE (2015-01-27) ---------------------------------------------- -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1143 - Release 1.5.5. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field... -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name. -* DATAMONGO-1039 - Polish implementation for cleaning up after tests. -* DATAMONGO-712 - Another round of potential performance improvements. - - -Changes in version 1.7.0.M1 (2014-12-01) ----------------------------------------- -* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation. -* DATAMONGO-1106 - Release 1.7 M1. -* DATAMONGO-1105 - Add implementation for new QueryDslPredicateExecutor.findAll(OrderSpecifier... orders). -* DATAMONGO-1102 - Auto-register JSR-310 converters to support JDK 8 date/time types. -* DATAMONGO-1101 - Add support for $bit to Update. -* DATAMONGO-1100 - Adapt to new PersistentPropertyAccessor API. -* DATAMONGO-1097 - Add support for $mul to Update. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods. -* DATAMONGO-1092 - Ensure compatibility with MongoDB 2.8.0.rc0 and java driver 2.13.0-rc0. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field… -* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL. -* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results. -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property. -* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1070 - Query annotation with $oid leads to a parse error. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element. -* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible. -* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String. -* DATAMONGO-1050 - SimpleMongoRepository.findById(id, class) don't return ids for nested documents. -* DATAMONGO-1049 - Reserved field name 'language' causes trouble. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-943 - Add support for $position to Update $push $each. - - -Changes in version 1.6.1.RELEASE (2014-10-30) ---------------------------------------------- -* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results. -* DATAMONGO-1079 - Release 1.6.1. -* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property. -* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1070 - Query annotation with $oid leads to a parse error. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element. -* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String. -* DATAMONGO-1049 - Reserved field name 'language' causes trouble. - - -Changes in version 1.6.0.RELEASE (2014-09-05) ---------------------------------------------- -* DATAMONGO-1046 - Release 1.6 GA. -* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1. -* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name. -* DATAMONGO-1039 - Polish implementation for cleaning up after tests. -* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs. -* DATAMONGO-1036 - Custom repository implementations are not picked up when using CDI. -* DATAMONGO-1034 - Improve error message when trying to convert incompatible types. -* DATAMONGO-1032 - Polish Asciidoctor documentation. -* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types. -* DATAMONGO-1027 - Collection inherits complex index from embedded class/object. -* DATAMONGO-1025 - Duplicate index creation on embedded documents. - - -Changes in version 1.5.4.RELEASE (2014-08-27) ---------------------------------------------- -* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs. -* DATAMONGO-1034 - Improve error message when trying to convert incompatible types. -* DATAMONGO-1033 - Release 1.5.4. -* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types. -* DATAMONGO-1027 - Collection inherits complex index from embedded class/object. -* DATAMONGO-1025 - Duplicate index creation on embedded documents. -* DATAMONGO-1020 - LimitOperator should be a public class. -* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present. - - -Changes in version 1.6.0.RC1 (2014-08-13) ------------------------------------------ -* DATAMONGO-1024 - Upgrade to Java driver 2.12.3. -* DATAMONGO-1021 - Release 1.6 RC1. -* DATAMONGO-1020 - LimitOperator should be a public class. -* DATAMONGO-1019 - Correct examples in reference documentation. -* DATAMONGO-1017 - Add support for custom implementations in CDI repositories. -* DATAMONGO-1016 - Remove deprecations in geospatial area. -* DATAMONGO-1015 - Move to Asciidoctor for reference documentation. -* DATAMONGO-1012 - Proxies for lazy DBRefs with field access should have their id values resolved eagerly. -* DATAMONGO-1009 - Adapt to new multi-store configuration detection. -* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present. -* DATAMONGO-1005 - Improve cycle-detection for DbRef's. -* DATAMONGO-1002 - Update.toString(…) might throw exception. -* DATAMONGO-1001 - Can't save/update lazy load object. -* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions. -* DATAMONGO-996 - Pagination broken after introduction of the support for top/first. -* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters. -* DATAMONGO-993 - The system variables $$CURRENT and $$ROOT not handled correctly. -* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used. -* DATAMONGO-991 - Adapt to deprecation removals in Spring Data Commons. -* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition. -* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate. -* DATAMONGO-974 - synthetic field target's name is returned instead of the alias name. -* DATAMONGO-973 - Add support for deriving full text queries. -* DATAMONGO-957 - Add support for query modifiers. -* DATAMONGO-420 - Extra quotes being added to @Query values and fields. - - -Changes in version 1.5.2.RELEASE (2014-07-28) ---------------------------------------------- -* DATAMONGO-1007 - Release 1.5.2. -* DATAMONGO-1002 - Update.toString(…) might throw exception. -* DATAMONGO-1001 - Can't save/update lazy load object. -* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions. -* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters. -* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used. -* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition. -* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate. -* DATAMONGO-983 - Remove links to forum.spring.io. -* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions. -* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent. -* DATAMONGO-972 - References are not handled properly in Querydsl integration. -* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection). -* DATAMONGO-420 - Extra quotes being added to @Query values and fields. - - -Changes in version 1.6.0.M1 (2014-07-10) ----------------------------------------- -* DATAMONGO-983 - Remove links to forum.spring.io. -* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions. -* DATAMONGO-981 - Release 1.6 M1. -* DATAMONGO-980 - Use meta annotations from spring data commons for @Score. -* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent. -* DATAMONGO-977 - Adapt to Spring 4 upgrade. -* DATAMONGO-976 - Add support for reading $meta projection on textScore into document. -* DATAMONGO-975 - Add support for date/time operators in aggregation framework. -* DATAMONGO-973 - Add support for deriving full text queries. -* DATAMONGO-972 - References are not handled properly in Querydsl integration. -* DATAMONGO-970 - Id query cannot be created if object to remove is DBObject. -* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection). -* DATAMONGO-968 - Add support for $meta projections and sorting for textScore metadata. -* DATAMONGO-963 - Compound index with expireAfterSeconds causes repeating error on mongodb server. -* DATAMONGO-962 - “Cycle found” with Spring Data Mongo 1.5. -* DATAMONGO-960 - Allow to pass options to the Aggregation Pipeline. -* DATAMONGO-958 - Move to FieldNamingStrategy SPI in Spring Data Commons. -* DATAMONGO-954 - Add support for System Variables in Aggregations. -* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString. -* DATAMONGO-952 - @Query annotation does not work with only field restrictions. -* DATAMONGO-950 - Add support for limiting the query result in the query derivation mechanism. -* DATAMONGO-949 - CyclicPropertyReferenceException in versions 1.5.0 + for MongoDB. -* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject. -* DATAMONGO-944 - Add support $currentDate to Update. -* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce. -* DATAMONGO-937 - Add support for creating text index. -* DATAMONGO-850 - Add support for text search using $text. -* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0. - - -Changes in version 1.4.3.RELEASE (2014-06-18) ---------------------------------------------- -* DATAMONGO-955 - Release 1.4.3. -* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString. -* DATAMONGO-952 - @Query annotation does not work with only field restrictions. -* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject. -* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce. -* DATAMONGO-924 - Aggregation not working with as() method in project() pipeline operator. -* DATAMONGO-920 - Fix debug messages for delete events in AbstractMongoEventListener. -* DATAMONGO-917 - DefaultDbRefResolver throws NPE when bundled into an uberjar. -* DATAMONGO-914 - Improve resolving of LazyLoading proxies for classes that override equals/hashcode. -* DATAMONGO-913 - Can't query using lazy DBRef objects. -* DATAMONGO-912 - Aggregation#project followed by Aggregation#match with custom converter causes IllegalArgumentException. -* DATAMONGO-898 - MapReduce seems not to work when javascript not being escaped. -* DATAMONGO-847 - Allow usage of Criteria within Update. -* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0. -* DATAMONGO-647 - Using "OrderBy" in "query by method name" ignores the @Field annotation for field alias. - - -Changes in version 1.5.0.RELEASE (2014-05-20) ---------------------------------------------- -* DATAMONGO-936 - Release 1.5 GA. -* DATAMONGO-929 - Index key should be the properties dot path when creating index using @Indexed / @CompoundIndex. -* DATAMONGO-928 - Error when using field-naming-strategy-ref. -* DATAMONGO-926 - Stack Overflow Error with 1.5.0.RC1 Release. -* DATAMONGO-925 - MappingMongoConverterParser is incorrectly rejecting field-naming-strategy-ref XML configuration. -* DATAMONGO-647 - Using "OrderBy" in "query by method name" ignores the @Field annotation for field alias. -* DATAMONGO-367 - @Indexed field in embedded Object creates new collection. - - -Changes in version 1.5.0.RC1 (2014-05-02) ------------------------------------------ -* DATAMONGO-924 - Aggregation not working with as() method in project() pipeline operator. -* DATAMONGO-921 - Upgrade to MongoDB Java driver 2.12.1. -* DATAMONGO-920 - Fix debug messages for delete events in AbstractMongoEventListener. -* DATAMONGO-919 - Release 1.5 RC1. -* DATAMONGO-917 - DefaultDbRefResolver throws NPE when bundled into an uberjar. -* DATAMONGO-914 - Improve resolving of LazyLoading proxies for classes that override equals/hashcode. -* DATAMONGO-913 - Can't query using lazy DBRef objects. -* DATAMONGO-912 - Aggregation#project followed by Aggregation#match with custom converter causes IllegalArgumentException. -* DATAMONGO-910 - Upgrade to latest MongoDB Java driver (2.12). -* DATAMONGO-909 - @CompoundIndex on inherited entity classes. -* DATAMONGO-908 - Nested field references in group operations broken. -* DATAMONGO-907 - Assert compatibility with mongodb 2.6.0. -* DATAMONGO-905 - Remove obsolete CGLib dependency from cross store module. -* DATAMONGO-901 - MongoRepositoryConfigurationExtension fails to invoke super method. -* DATAMONGO-899 - Overhaul automatic index creation. -* DATAMONGO-898 - MapReduce seems not to work when javascript not being escaped. -* DATAMONGO-897 - FindAndUpdate broken when using @DbRef and interface as target. -* DATAMONGO-896 - Assert compatibility with latest MongoDB Java driver. -* DATAMONGO-895 - Use most specific type for checks against values in DBObjects. -* DATAMONGO-893 - Mapping Convertor does not remove "_class" property on collection of embedded objects. -* DATAMONGO-892 - can't be configured as nested bean definition. -* DATAMONGO-888 - Mapping is not applied to SortObject during queries. -* DATAMONGO-866 - Add new field naming strategy and make it configurable through XML/Java config. -* DATAMONGO-847 - Allow usage of Criteria within Update. -* DATAMONGO-827 - @Indexed and @CompundIndex cannot be created without giving index name. - - -Changes in version 1.4.2.RELEASE (2014-04-15) ---------------------------------------------- -** Fix - * [DATAMONGO-880] - Improved handling of persistence of lazy-loaded DBRefs. - * [DATAMONGO-884] - Improved handling for Object methods in LazyLoadingInterceptor. - * [DATAMONGO-887] - Added unit tests to verify TreeMaps can be converted. - * [DATAMONGO-888] - Sorting now considers mapping information. - * [DATAMONGO-890] - Fixed Point.toString(). - * [DATAMONGO-892] - Reject nested MappingMongoConverter declarations in XML. - * [DATAMONGO-893] - Converter must not write "_class" information for know types. - * [DATAMONGO-897] - Fixed potential NullPointerException in QueryMapper. - * [DATAMONGO-908] - Support for nested field references in group operations. - -** Improvement - * [DATAMONGO-881] - Allow custom conversions to override default conversions. - -** Task - * [DATAMONGO-895] - Use most specific type for checks against values in DBObjects. - * [DATAMONGO-896] - Assert compatibility with latest MongoDB Java driver. - * [DATAMONGO-905] - Removed obsolete dependency to CGLib from cross-store support. - * [DATAMONGO-907] - Assert compatibility with mongodb 2.6. - * [DATAMONGO-911] - Release 1.4.2 - -Changes in version 1.5.0.M1 (2014-03-31) ----------------------------------------- -** Fix - * [DATAMONGO-471] - Update operation $addToSet does not support adding a list with $each. - * [DATAMONGO-773] - Spring Data MongoDB projection search on @DBref fields. - * [DATAMONGO-821] - MappingException for $size queries on subcollections containing dbrefs. - * [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, it sets num=0, unless Query specifies otherwise. - * [DATAMONGO-833] - EnumSet is not handled correctly. - * [DATAMONGO-843] - Unable to use @EnableMongoAuditing annotation in Java config. - * [DATAMONGO-862] - Update Array Field Using Positional Operator ($) Does Not Work. - * [DATAMONGO-863] - QueryMapper.getMappedValue Fails To Handle Arrays Mapped To $in. - * [DATAMONGO-868] - findAndModify method does not increment @Version field. - * [DATAMONGO-871] - Declarative query method with array return type causes NPE. - * [DATAMONGO-877] - AbstractMongoConfiguration.getMappingBasePackage() throws NullPointerException if config class resides in default package. - * [DATAMONGO-880] - Error when trying to persist an object containing a DBRef which was lazy loaded. - * [DATAMONGO-884] - Potential NullPointerException for lazy DBRefs. - * [DATAMONGO-887] - Repository not instantiated when entity contains field of type TreeMap. - * [DATAMONGO-890] - Point class toString method is confusing. - -** Improvement - * [DATAMONGO-809] - Make filename optional in GridFsOperations doc and GridFsTemplate implementation. - * [DATAMONGO-858] - Add support for common geospatial structures. - * [DATAMONGO-865] - Adjust test dependencies to avoid ClassNotFoundException during test runs. - * [DATAMONGO-881] - Cannot override default converters in CustomConversions. - * [DATAMONGO-882] - Adapt to removal of obsolete generics in BeanWrapper. - -** New Feature - * [DATAMONGO-566] - Provide support for removeBy… / deleteBy… methods like for findBy… on repository interfaces. - * [DATAMONGO-870] - Add support for sliced query method execution. - -** Task - * [DATAMONGO-876] - Adapt to changes introduced for property access configuration. - * [DATAMONGO-883] - Update auditing configuration to enable auditing annotations on accessors. - * [DATAMONGO-859] - Release 1.5 M1. - -Changes in version 1.4.1.RELEASE (2014-03-13) ---------------------------------------------- -** Fix - * [DATAMONGO-773] - Verify that @DBRef fields can be included in query. - * [DATAMONGO-821] - Fixed handling of keyword expressions for DBRefs. - * [DATAMONGO-829] - NearQuery should not default 'num' to zero. - * [DATAMONGO-833] - Support for EnumSet and EnumMap in MappingMongoConverter. - * [DATAMONGO-843] - Back-port of defaulting of the MappingContext for auditing. - * [DATAMONGO-862] - Fixed handling of unmapped paths for updates. - * [DATAMONGO-863] - UpdateMapper doesn't convert raw DBObjects anymore. - * [DATAMONGO-868] - MongoTemplate.findAndModify(…) increases version if not handled manually. - * [DATAMONGO-871] - Add support for arrays as query method return types. - * [DATAMONGO-877] - Added guard against null-package in AbstractMappingConfiguration. - -** Improvement - * [DATAMONGO-865] - Adjust test dependencies to avoid ClassNotFoundException during test runs. - -Changes in version 1.3.5.RELEASE (2014-03-10) ---------------------------------------------- -** Fix - * [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, no longer sets num=0, unless Query specifies otherwise. - * [DATAMONGO-871] - Repository queries support array return type. - -** Improvement - * [DATAMONGO-865] - Avoid ClassNotFoundException during test runs. - -Changes in version 1.4.0.RELEASE (2014-02-24) ---------------------------------------------- - -** Fix - * [DATAMONGO-354] - MongoTemplate should support multiple $pushAll in one update. - * [DATAMONGO-404] - Removing a DBRef using pull does not work. - * [DATAMONGO-410] - Update with pushAll should recognize defined Converter. - * [DATAMONGO-812] - $pushAll is deprecated since mongodb 2.4 move to $push $each. - * [DATAMONGO-830] - Fix NPE during cache warmup in CustomConversions. - * [DATAMONGO-838] - Support for refering to expression based field in group operation. - * [DATAMONGO-840] - Support for nested MongoDB field references in SpEL expressions within Projections. - * [DATAMONGO-842] - Fix documentation error in GRIDFS section. - * [DATAMONGO-852] - Increase version for update should traverse DBObject correctly in order to find version property. - -** Improvement - * [DATAMONGO-468] - Simplification for updates of DBRef fields with mongoTemplate. - * [DATAMONGO-849] - Documentation on github should not reference invalid class. - -** Task - * [DATAMONGO-848] - Ensure compatibility with Mongo Java driver 2.12. - * [DATAMONGO-853] - Update no longer allows null keys. - * [DATAMONGO-856] - Update documentation. - -Changes in version 1.3.4.RELEASE (2014-02-17) ---------------------------------------------- -** Bug - * [DATAMONGO-407] - Collection with generics losing element type after $set update - * [DATAMONGO-410] - Update with pushAll doesnt recognize defined Converter - * [DATAMONGO-686] - ClassCastException while reusing Query object - * [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException - * [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException - * [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly - * [DATAMONGO-811] - updateFirst methods do not increment @Version field - * [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums. - * [DATAMONGO-828] - UpdateFirst throws OptimisticLockingFailureException when updating document that does not exist - * [DATAMONGO-830] - NPE during cache warmup in CustomConversions - * [DATAMONGO-842] - Documentation error in GRIDFS section -** Improvement - * [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file -** Task - * [DATAMONGO-824] - Add contribution guidelines - * [DATAMONGO-846] - Release 1.3.4 - -Changes in version 1.4.0.RC1 (2014-01-29) ---------------------------------------------- - -** Bug - * [DATAMONGO-407] - Collection with generics losing element type after $set update - * [DATAMONGO-686] - ClassCastException while reusing Query object - * [DATAMONGO-726] - References to non existing classes in namespace XSD - * [DATAMONGO-804] - EnableMongoRepositories repositoryImplementationPostfix() default is empty String instead of "Impl" - * [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException - * [DATAMONGO-806] - Spring Data MongoDB - Aggregation Framework - No property _id found for type com.entity.User - * [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException - * [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly - * [DATAMONGO-811] - updateFirst methods do not increment @Version field - * [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums. -** Improvement - * [DATAMONGO-778] - Create geospatial index of type other than 2d with @GeoSpatialIndexed - * [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types - * [DATAMONGO-787] - Guard against SpEL issue in Spring 3.2.4 - * [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x - * [DATAMONGO-802] - Change AbstractMongoConfiguration.mongoDbFactory() to return MongoDbFactory - * [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file - * [DATAMONGO-822] - Add support for eager CDI repository instantiation - * [DATAMONGO-823] - Add bucket attribute to - * [DATAMONGO-837] - Upgrade MongoDB Java driver to 2.11.4 -** Task - * [DATAMONGO-790] - Ensure compatibility with Spring Framework 4.0 - * [DATAMONGO-824] - Add contribution guidelines - * [DATAMONGO-826] - Release Spring Data MongoDB 1.4.0.RC1 - * [DATAMONGO-835] - Code cleanups - -Changes in version 1.3.3.RELEASE (2013-12-11) ---------------------------------------------- -** Bug - * [DATAMONGO-726] - Fixed classname references in namespace XSDs. - * [DATAMONGO-788] - Projection operations do not render synthetic fields properly. - * [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour - * [DATAMONGO-804] - Fix default annotation attribute value for repositoryImplementationPostfix(). - * [DATAMONGO-806] - Fixed invalid rendering of id field references. - * [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor - -** Improvement - * [DATAMONGO-791] - Added newAggregation(…) overloads to accept a List. - * [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x - * [DATAMONGO-800] - Improved AuditingIntegrationTests. -** Task - * [DATAMONGO-810] - Release 1.3.3 - -Changes in version 1.4.0.M1 (2013-11-19) ---------------------------------------------- -** Bug - * [DATAMONGO-534] - The GridFs query execution does not return sorted resources, when the sorting fields are defined in the query definition - * [DATAMONGO-630] - Add support of $setOnInsert modifier for upsert - * [DATAMONGO-746] - IndexInfo cannot be read for indices created via mongo shell - * [DATAMONGO-752] - QueryMapper prevents searching for values that start with a $ [dollarsign] - * [DATAMONGO-753] - Add support for nested field references in group operations - * [DATAMONGO-758] - Reject excludes other than _id in projection operations - * [DATAMONGO-759] - Render group operation without non synthetic fields correctly. - * [DATAMONGO-761] - ClassCastException in SpringDataMongodbSerializer.getKeyForPath - * [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor - * [DATAMONGO-788] - Projection operations do not render synthetic fields properly. - * [DATAMONGO-789] - Support login via different (e.g. admin) authentication database - * [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour -** Improvement - * [DATAMONGO-757] - Projections should follow mongodb conventions more precisely. - * [DATAMONGO-764] - Add support for SSL connections to Mongo - * [DATAMONGO-766] - Allow nested field references on properties through e.g. @Field("a.b") - * [DATAMONGO-769] - Support arithmetic operators for properties - * [DATAMONGO-770] - Repository - findByIgnoreCase doesnt work - * [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails - * [DATAMONGO-774] - Support SpEL expressions to define arithmetical projection operations in the aggregation framework - * [DATAMONGO-776] - TypeBasedAggregationOperationContext should use MappingContext.getPersistentPropertyPath(String, Class) - * [DATAMONGO-780] - Add support for nested repositories - * [DATAMONGO-782] - Typo in reference documentation - * [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types - * [DATAMONGO-787] - Upgrade to Spring 3.2.4 - * [DATAMONGO-791] - make newAggregation() method to accept list - * [DATAMONGO-793] - Adapt to changes in Spring Data Commons triggered by repository initialization changes - * [DATAMONGO-800] - AuditingIntegrationTests fail on fast machines -** New Feature - * [DATAMONGO-348] - Lazy Load for DbRef - * [DATAMONGO-653] - Support for index operations in GridFsOperations - * [DATAMONGO-760] - Add support for custom findAll Queries - * [DATAMONGO-792] - Add support to configure Auditing via JavaConfig. -** Task - * [DATAMONGO-777] - Upgrade to Mongo Java Driver in 2.11 - -Changes in version 1.3.2.RELEASE (2013-10-25) ---------------------------------------------- -** Bug - * [DATAMONGO-746] IndexInfo cannot be read for indices created via mongo shell - * [DATAMONGO-752] QueryMapper prevents searching for values that start with a $ [dollarsign] - * [DATAMONGO-753] Add support for nested field references in group operations - * [DATAMONGO-758] Reject excludes other than _id in projection operations - * [DATAMONGO-759] Render group operation without non synthetic fields correctly. - * [DATAMONGO-761] ClassCastException in SpringDataMongodbSerializer.getKeyForPath - * [DATAMONGO-768] Improve documentation of how to use @PersistenceConstructor - -** Improvement - * [DATAMONGO-757] - Projections should follow mongodb conventions more precisely. - * [DATAMONGO-769] - Support arithmetic operators for properties - * [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails -** Task - * [DATAMONGO-772] - Release 1.3.2 - -Changes in version 1.3.1.RELEASE (2013-09-09) ---------------------------------------------- -** Task - * [DATAMONGO-751] Upgraded to Spring Data Commons 1.6.1. - -Changes in version 1.3.0.RELEASE (2013-09-09) ---------------------------------------------- -** Bug - * [DATAMONGO-540] MongoTemplate upsert and findOne handle id queries differently. - * [DATAMONGO-445] GeoNear Query Doesn't Work with Pageable. - * [DATAMONGO-507] Criteria not() is not working. - * [DATAMONGO-602] Querying with $in operator on the id field of type BigInteger returns zero results. - -** Improvement - * [DATAMONGO-725] Improve configurability and documentation of TypeMapper on MappingMongoConverter. - * [DATAMONGO-738] Add methods to MongoTemplate and MongoOperations to allow calling class to pass both the entityClass and the collectionName for the update and upsert methods. - * [DATAMONGO-737] Extra MongoSynchronizations cause TransactionSynchronizationManager to throw IllegalStateException on transaction complete. - * [DATAMONGO-743] Support returning raw json from a query. - -** Task - * [DATAMONGO-742] Document CDI integration in reference documentation. - -Changes in version 1.3.0.RC1 (2013-08-05) ------------------------------------------ -** Bug - * [DATAMONGO-392] - Updating an object does not write type information for objects to be updated - * [DATAMONGO-685] - JMX ServerInfo bean may return wrong info - * [DATAMONGO-688] - There is no precedence between @Id annotation and field named "id" - both are attempted to be used - * [DATAMONGO-693] - MongoFactoryBean should create a mongo instance with host/port if replicaset is null or empty - * [DATAMONGO-702] - Spring Data MongoDB projection search, is not properly configured with respective Java Pojo - * [DATAMONGO-704] - Remove references to SimpleMongoConverter from JavaDoc. - * [DATAMONGO-705] - QueryMapper doesn't handles exists query with DBRef field - * [DATAMONGO-706] - QueryMapper does not transform DBRefs in nested keywords correctly - * [DATAMONGO-709] - Polymorphic query on documents in same collection - * [DATAMONGO-717] - Application context is not properly distributed to persistent entities - * [DATAMONGO-721] - Polymorphic attribute type not persisted on update operations - -** Improvement - * [DATAMONGO-701] - Improve performance of indexed starts-with queries - * [DATAMONGO-713] - Typos in readme.md - -** New Feature - * [DATAMONGO-544] - Support for TTL collection via Indexed annotation - * [DATAMONGO-586] - Add support for new Aggregation Framework - -** Task - * [DATAMONGO-714] - Add latest formatter to project sources - * [DATAMONGO-723] - Clean up test cases - * [DATAMONGO-728] - Add missing package-info.java files - * [DATAMONGO-731] - Adapt refactorings in Spring Data Commons - * [DATAMONGO-732] - Release 1.3 RC1 - - -Changes in version 1.2.3.GA (2013-07-24) ----------------------------------------- -** Task - * [DATAMONGO-728] - Add missing package-info.java files - * [DATAMONGO-729] - Release 1.2.3. - - -Changes in version 1.2.2.GA (2013-07-19) ----------------------------------------- -** Bug - * [DATAMONGO-663] - org.springframework.data.mongodb.core.query.Field needs an equals method - * [DATAMONGO-677] - QueryMapper does not handled correctly Map with DBRef value - * [DATAMONGO-679] - MongoTemplate.doSave(…) passed a JSON String doesn't save it. - * [DATAMONGO-683] - QueryMapper does not handle default _id when no MappingMetadata is present - * [DATAMONGO-685] - JMX ServerInfo bean may return wrong info - * [DATAMONGO-693] - MongoFactoryBean should create a mongo instance with host/port if replicaset is null or empty - * [DATAMONGO-704] - Remove references to SimpleMongoConverter from JavaDoc. - * [DATAMONGO-705] - QueryMapper doesn't handles exists query with DBRef field - * [DATAMONGO-706] - QueryMapper does not transform DBRefs in nested keywords correctly - * [DATAMONGO-717] - Application context is not properly distributed to persistent entities - -** Improvement - * [DATAMONGO-682] - Remove performance hotspots - * [DATAMONGO-701] - Improve performance of indexed starts-with queries - -** Task - * [DATAMONGO-658] - Minor formatting changes to README.md - * [DATAMONGO-678] - Performance improvements in CustomConversions - * [DATAMONGO-714] - Add latest formatter to project sources - * [DATAMONGO-723] - Clean up test cases - * [DATAMONGO-727] - Release 1.2.2 - - -Changes in version 1.3.0.M1 (2013-06-04) ----------------------------------------- -** Bug - * [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class - * [DATAMONGO-612] - Fix PDF reference documentation name - * [DATAMONGO-613] - Images missing from reference documentation - * [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…) - * [DATAMONGO-620] - MongoTemplate.doSaveVersioned(…) does not consider collection handed into the method - * [DATAMONGO-621] - MongoTemplate.initializeVersionProperty(…) does not use ConversionService - * [DATAMONGO-622] - An unversioned object should be created using insert(…) instead of save. - * [DATAMONGO-629] - Different results when using count and find with the same criteria with 'id' field - * [DATAMONGO-638] - MappingContext should not create PersistentEntity instances for native maps - * [DATAMONGO-640] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-641] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-642] - MongoChangeSetPersister does not use mapped collection name - * [DATAMONGO-646] - Can't insert DBObjects through MongoTemplate - * [DATAMONGO-648] - ID attributes in namespace shouldn't be XSD IDs - * [DATAMONGO-663] - org.springframework.data.mongodb.core.query.Field needs an equals method - * [DATAMONGO-669] - Incompatibility with Querydsl 3.1.1 - * [DATAMONGO-676] - SimpleMongoRepository fails if used with customized collection name - * [DATAMONGO-677] - QueryMapper does not handled correctly Map with DBRef value - * [DATAMONGO-679] - MongoTemplate.doSave(…) passed a JSON String doesn't save it. - * [DATAMONGO-683] - QueryMapper does not handle default _id when no MappingMetadata is present - -** Improvement - * [DATAMONGO-140] - Add XML namespace element for MongoTemplate - * [DATAMONGO-545] - Add before delete and after delete events for AbstractMongoEventListener - * [DATAMONGO-554] - Add background attribute to @Indexed and @CompoundIndex - * [DATAMONGO-569] - AbstractMongoConfiguration cannot be used on CloudFoundry - * [DATAMONGO-594] - cross-store=> Define document name using annotation - * [DATAMONGO-631] - Explicitly prevent an Order instance set to ignore case from being piped into a query - * [DATAMONGO-632] - Polish namespace XSD to avoid errors in STS - * [DATAMONGO-633] - Upgrade to Querydsl 3.0.0 - * [DATAMONGO-634] - Inherit application scope from basic CDI bean of Spring Data Commons - * [DATAMONGO-635] - Fix some Sonar warnings - * [DATAMONGO-636] - Add support for countBy projections - * [DATAMONGO-637] - Typo in Query.query(…) - * [DATAMONGO-651] - WriteResult not available from thrown Exception - * [DATAMONGO-652] - Add support for elemMatch and positional operator projections - * [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate - * [DATAMONGO-657] - Allow to write Map value as DBRef - * [DATAMONGO-666] - Fix architecture inconsistency created by MongoDataIntegrityViolationException - * [DATAMONGO-680] - SimpleMongoRepository.exists(ID) improvement - * [DATAMONGO-681] - Expose MongoTemplate.exists() method - * [DATAMONGO-682] - Remove performance hotspots - -** New Feature - * [DATAMONGO-607] - Add an abbreviating field naming strategy - * [DATAMONGO-628] - Add XML namespace elements for MongoTemplate and GridFsTemplate - -** Task - * [DATAMONGO-597] - Website is severely out-of-date - * [DATAMONGO-658] - Minor formatting changes to README.md - * [DATAMONGO-667] - Remove deprecations and further deprecate sorting/ordering types - * [DATAMONGO-672] - Upgrade to latest Spring Data Build and Commons - * [DATAMONGO-678] - Performance improvements in CustomConversions - * [DATAMONGO-690] - Release 1.3 M1 - -Changes in version 1.2.1.GA (2013-04-17) ----------------------------------------- -** Bug - * [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class - * [DATAMONGO-612] - Fix PDF reference documentation name - * [DATAMONGO-613] - Images missing from reference documentation - * [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…) - * [DATAMONGO-620] - MongoTemplate.doSaveVersioned(…) does not consider collection handed into the method - * [DATAMONGO-621] - MongoTemplate.initializeVersionProperty(…) does not use ConversionService - * [DATAMONGO-622] - An unversioned object should be created using insert(…) instead of save. - * [DATAMONGO-629] - Different results when using count and find with the same criteria with 'id' field - * [DATAMONGO-638] - MappingContext should not create PersistentEntity instances for native maps - * [DATAMONGO-640] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-641] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-642] - MongoChangeSetPersister does not use mapped collection name - * [DATAMONGO-646] - Can't insert DBObjects through MongoTemplate - * [DATAMONGO-648] - ID attributes in namespace shouldn't be XSD IDs - -** Improvement - * [DATAMONGO-594] - cross-store=> Define document name using annotation - * [DATAMONGO-632] - Polish namespace XSD to avoid errors in STS - * [DATAMONGO-635] - Fix some Sonar warnings - * [DATAMONGO-637] - Typo in Query.query(…) - * [DATAMONGO-651] - WriteResult not available from thrown Exception - * [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate - -** Task - * [DATAMONGO-597] - Website is severely out-of-date - * [DATAMONGO-654] - Release 1.2.1 - -Changes in version 1.2.0.GA (2013-02-08) ----------------------------------------- -** Bug - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-568] - MongoTemplate.find(...) method causes Nullpointer if query parameter is null - * [DATAMONGO-570] - Query methods on @DBRef field with the qualifier isNull throws Exception - * [DATAMONGO-583] - Check if you are using for loop with a DBCursor - * [DATAMONGO-585] - Exception during authentication in multithreaded access - * [DATAMONGO-588] - MongoTemplate.insert does not initialize null versions to zero - * [DATAMONGO-592] - Persisting Objects containing Objects with PersistenceConstructor causes MappingInstantiationException - * [DATAMONGO-593] - Persisting objects containing primitive arrays with PersistenceConstructor causes MappingInstantiationException - * [DATAMONGO-600] - Issues with polymorphism of nested types - * [DATAMONGO-601] - CannotGetMongoDbConnectionException should not print password in logfile - * [DATAMONGO-603] - Results of geo queries in repository dont't get metrics of supplied distance applied - -** Improvement - * [DATAMONGO-503] - GridFsTemplate is not setting the file Content-Type - * [DATAMONGO-573] - Move to Logback for test logging - * [DATAMONGO-580] - Polish BeanDefinitionParsers to avoid warnings in STS - * [DATAMONGO-581] - Expose managed PersistentEntity in MongoRepositoryFactoryBean - * [DATAMONGO-606] - Register converter for JodaTime types if present on classpath - -** New Feature - * [DATAMONGO-577] - Add support for auditing - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-576] - Configure java.util.logging to reduce verbose test logging - * [DATAMONGO-590] - Clean up code in MongoTemplate - * [DATAMONGO-598] - Upgrade to new build infrastructure - * [DATAMONGO-609] - Release 1.2.0 - -Changes in version 1.1.2.GA (2013-02-08) ----------------------------------------- -** Bug - * [DATAMONGO-562] - Cannot create entity with OptimisticLocking (@Version) and initial id - * [DATAMONGO-568] - MongoTemplate.find(...) method causes Nullpointer if query parameter is null - * [DATAMONGO-570] - Query methods on @DBRef field with the qualifier isNull throws Exception - * [DATAMONGO-578] - pom version issues in 1.1.x branch - * [DATAMONGO-583] - Check if you are using for loop with a DBCursor - * [DATAMONGO-585] - Exception during authentication in multithreaded access - * [DATAMONGO-588] - MongoTemplate.insert does not initialize null versions to zero - * [DATAMONGO-600] - Issues with polymorphism of nested types - * [DATAMONGO-601] - CannotGetMongoDbConnectionException should not print password in logfile - -** Improvement - * [DATAMONGO-573] - Move to Logback for test logging - * [DATAMONGO-580] - Polish BeanDefinitionParsers to avoid warnings in STS - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-563] - Upgrade to MongoDB driver 2.9.2 as it fixes a serious regression introduced in 2.9.0 - * [DATAMONGO-576] - Configure java.util.logging to reduce verbose test logging - * [DATAMONGO-590] - Clean up code in MongoTemplate - * [DATAMONGO-608] - Release 1.1.2 - -Changes in version 1.1.1.GA (2012-10-17) ----------------------------------------- -** Bug - * [DATAMONGO-549] - MongoTemplate.save(…) suffers from potential NullPointException - * [DATAMONGO-550] - MongoTemplate.save(BasicDBObject, String) results in NPE (after upgrading to 1.1.0.RELEASE - * [DATAMONGO-551] - MongoTemplate.save(String, String) results in NPE (after upgrading to 1.1.0.RELEASE - -** Task - * [DATAMONGO-559] - Release 1.1.1.RELEASE - - -Changes in version 1.1.0.GA (2012-10-10) ----------------------------------------- -** Bug - * [DATAMONGO-523] - @TypeAlias annotation not used with AbstractMongoConfiguration - * [DATAMONGO-527] - Criteria.equals(…) broken for complex criterias - * [DATAMONGO-530] - MongoMappingContext.setApplicationContext(…) does not invoke superclass method - * [DATAMONGO-531] - StackOverflowError when persisting Groovy beans - * [DATAMONGO-532] - Multithreading authentication issue - * [DATAMONGO-533] - Default MongoPersistentEntityIndexCreator not registered if ApplicationContext already contains one for different MappingContext - * [DATAMONGO-535] - Retrieve of existing Mongo DB from Transaction is not working - * [DATAMONGO-539] - Document remove doesn't work when giving collection name as a parameter - -** Improvement - * [DATAMONGO-279] - Optimistic locking using @Version field - * [DATAMONGO-456] - XSD incorrectly states the default value for the mongo-ref attribute of the mongo:db-factory configuration element - * [DATAMONGO-457] - broken links "Spring Data MongoDB - Reference Documentation" - * [DATAMONGO-526] - Polish README.md - * [DATAMONGO-529] - Improve Querydsl setup - * [DATAMONGO-538] - Unify usage of Sort APIs in Query API - -** New Feature - * [DATAMONGO-389] - stable release spring-data-mongodb should work with stable spring spring-data-jpa - -** Task - * [DATAMONGO-484] - Migrate to latest MongoDB Java driver - * [DATAMONGO-528] - Document GridFS support - * [DATAMONGO-536] - Fix package cycle introduced by SerializationUtils - * [DATAMONGO-541] - Release 1.1 GA - * [DATAMONGO-543] - Polish reference documentation - * [DATAMONGO-548] - Upgrade to Querydsl 2.8.0 - - -Changes in version 1.1.0.RC1 (2012-24-08) ------------------------------------------ -** Bug - * [DATAMONGO-493] - Criteria.ne() method converts all value into ObjectId - * [DATAMONGO-494] - $or/$nor expressions do not consider entity class mapping - * [DATAMONGO-495] - JSON can't serialize Enum when printing Query in DEBUG message - * [DATAMONGO-497] - Reading an empty List throws a MappingInstantiationException because it returns an HashSet instead of returning an ArrayList - * [DATAMONGO-505] - Conversion of associations doesn't work for collection values - * [DATAMONGO-508] - DBRef can accidentally get added as PersistentProperty - * [DATAMONGO-517] - QueryMapping incorrectly translates complex keywords - -** Improvement - * [DATAMONGO-496] - AbstractMongoConfiguration.getMappingBasePackage() could default to config class' package - * [DATAMONGO-499] - Namespace XSDs of current release version should refer to repositories XSD in version 1.0 - * [DATAMONGO-500] - Index creation reacts on events not intended for it - * [DATAMONGO-502] - QueryMapper should transparently translate property names to field names - * [DATAMONGO-509] - SimpleMongoRepository.exists(…) can be improved. - * [DATAMONGO-510] - Criteria should only use BasicDBList internally - * [DATAMONGO-511] - QueryMapper should correctly transform associations - * [DATAMONGO-516] - Make Spring 3.1.2.RELEASE default Spring dependency version - -** Task - * [DATAMONGO-513] - Release 1.1 RC1 - - -Changes in version 1.0.4.RELEASE MongoDB (2012-08-24) ------------------------------------------------------ -** Bug - * [DATAMONGO-493] - Criteria.ne() method converts all value into ObjectId - * [DATAMONGO-494] - $or/$nor expressions do not consider entity class mapping - * [DATAMONGO-495] - JSON can't serialize Enum when printing Query in DEBUG message - -** Improvement - * [DATAMONGO-499] - Namespace XSDs of current release version should refer to repositories XSD in version 1.0 - -** Task - * [DATAMONGO-514] - Release 1.0.4. - -Changes in version 1.1.0.M2 (2012-24-07) ----------------------------------------- -** Bug - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-424] - Declaring a list of DBRef in a domian class results in Null for each DBRef when reading from mongo database - * [DATAMONGO-425] - Binding a Date to a manually defined repository query fails - * [DATAMONGO-428] - ClassCastException when using outputDatabase option in map-reduce - * [DATAMONGO-446] - Pageable query methods returning List are broken - * [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids - * [DATAMONGO-450] - enabling DEBUG causes RuntimeException - * [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable - * [DATAMONGO-458] - When reading back empty collections unmodifiable instances of Collections.emptyList/Set is returned. - * [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause - * [DATAMONGO-465] - Mongo inserts document with "_id" as an integer but saves with "_id" as a string. - * [DATAMONGO-467] - String @id field is not mapped to ObjectId when using QueryDSL ".id" path - * [DATAMONGO-469] - Query creation from method names using AND criteria does not work anymore - * [DATAMONGO-474] - Wrong property is used for Id mapping - * [DATAMONGO-475] - 'group' operation fails where query references non primitive property - * [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents. - * [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB - * [DATAMONGO-489] - ClassCastException when loading Map - -** Improvement - * [DATAMONGO-448] - Remove the need for Converters for complex classes that are used as IDs - * [DATAMONGO-455] - Document how to use raw queries using BasicQuery - * [DATAMONGO-460] - Improve Querydsl implementation internals - * [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes - * [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method. - * [DATAMONGO-477] - Change upper bound of Google Guava package import to 13 - * [DATAMONGO-482] - typo in documentation - 2 i's in usiing - * [DATAMONGO-486] - Polish namspace implementation - * [DATAMONGO-491] - Release 1.1.0.M2 - -** New Feature - * [DATAMONGO-476] - JavaConfig support for Mongo repositories - -** Task - * [DATAMONGO-451] - Tweak pom.xml to let Sonar build run without Bundlor - * [DATAMONGO-490] - Fix minor typos - - -Changes in version 1.0.3.RELEASE (2012-24-07) ---------------------------------------------- -** Bug - * [DATAMONGO-467] - String @id field is not mapped to ObjectId when using QueryDSL ".id" path - * [DATAMONGO-469] - Query creation from method names using AND criteria does not work anymore - * [DATAMONGO-474] - Wrong property is used for Id mapping - * [DATAMONGO-475] - 'group' operation fails where query references non primitive property - * [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents. - * [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB - * [DATAMONGO-489] - ClassCastException when loading Map - -** Improvement - * [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes - * [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method. - * [DATAMONGO-482] - typo in documentation - 2 i's in usiing - -** Task - * [DATAMONGO-492] - Release 1.0.3 - - -Changes in version 1.0.2.RELEASE (2012-06-20) ---------------------------------------------- -** Bug - * [DATAMONGO-360] - java.lang.ClassCastException when placing GeospatialIndex into IndexOperations and invoking IndexOperations.getIndexInfo() - * [DATAMONGO-366] - Chapter 3.2. points to wrong bugtracker - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-382] - ClassCastException: "com.mongodb.BasicDBObject cannot be cast to com.mongodb.BasicDBList" during find() - * [DATAMONGO-411] - Potential ClassCastExceptions in MongoPersistentEntityIndexCreator - * [DATAMONGO-412] - getUserCredentials() is called twice in AbstractMongoConfiguration::mongoDbFactory() - * [DATAMONGO-413] - Using "Or" in repository query yields a ClassCastException - * [DATAMONGO-422] - UUIDToBinaryConverter not compatible with mongo java driver - * [DATAMONGO-423] - Criteria.regex should use java.util.Pattern instead of $regex - * [DATAMONGO-425] - Binding a Date to a manually defined repository query fails - * [DATAMONGO-428] - ClassCastException when using outputDatabase option in map-reduce - * [DATAMONGO-429] - using @Query annotation, arrays are translated somewhere between query creation and mongo interpretation - * [DATAMONGO-446] - Pageable query methods returning List are broken - * [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids - * [DATAMONGO-450] - enabling DEBUG causes RuntimeException - * [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable - * [DATAMONGO-461] - MappedConstructor potentially throws NullPointerException - * [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause - -** Improvement - * [DATAMONGO-448] - Remove the need for Converters for complex classes that are used as IDs - * [DATAMONGO-455] - Document how to use raw queries using BasicQuery - -** Task - * [DATAMONGO-463] - Release 1.0.2 - - -Changes in version 1.1.0.M1 (2012-05-07) ----------------------------------------- - -** Bug - * [DATAMONGO-299] - Mongodb Query Does not allow for multiple query conditionals of the same time. - * [DATAMONGO-360] - java.lang.ClassCastException when placing GeospatialIndex into IndexOperations and invoking IndexOperations.getIndexInfo() - * [DATAMONGO-363] - Criteria.and() cannot be chained when using Criteria.gte and Criteria.lte - * [DATAMONGO-364] - Chaining of Criteria when including a GeoSpatial field is inconsistent. - * [DATAMONGO-366] - Chapter 3.2. points to wrong bugtracker - * [DATAMONGO-368] - Empty values in collections are not supported - * [DATAMONGO-369] - Wrong query created when one value is a DBObject - * [DATAMONGO-373] - QueryMapper is getting a ClassCasteException When trying to Convert an ArrayList to a BSONList - * [DATAMONGO-376] - Fix potential NPE in SpringDataMongodbSerializer - * [DATAMONGO-380] - maps with mongo reserved characters for keys generate on save: java.lang.IllegalArgumentException: fields stored in the db can't have . in them - * [DATAMONGO-387] - Executing query methods with GeoPage results doesn't work - * [DATAMONGO-401] - StringBasedMongoQuery suffers from NullPointerException in case a null parameter gets bound to a parameter placeholder - * [DATAMONGO-402] - Inner class not supported - * [DATAMONGO-403] - Conflicts between MongoDB and JPA - * [DATAMONGO-411] - Potential ClassCastExceptions in MongoPersistentEntityIndexCreator - * [DATAMONGO-412] - getUserCredentials() is called twice in AbstractMongoConfiguration::mongoDbFactory() - * [DATAMONGO-413] - Using "Or" in repository query yields a ClassCastException - * [DATAMONGO-423] - Criteria.regex should use java.util.Pattern instead of $regex - -** Defect - * [DATAMONGO-429] - using @Query annotation, arrays are translated somewhere between query creation and mongo interpretation - -** Improvement - * [DATAMONGO-347] - Repositories and DBRef - * [DATAMONGO-375] - Polish versions of referenced XSD schemas - * [DATAMONGO-379] - Exception when trying to instantiate an entity having a primitive constructor argument and no according document field - * [DATAMONGO-390] - Add Converter for UUID - * [DATAMONGO-391] - Move to SLF4J for logging - * [DATAMONGO-397] - MongoRepositoryFactoryBean should refer to MongoOperations instead of MongoTemplate - * [DATAMONGO-441] - Improve MongoDbUtils API - -** New Feature - * [DATAMONGO-6] - Integration with GridFS features - * [DATAMONGO-36] - Validation support to MongoTemplate - * [DATAMONGO-356] - Provide CDI integration - * [DATAMONGO-418] - Add support for newly introduced StartingWith, EndingWith and Containing keywords - * [DATAMONGO-427] - Support After and Before keywords for query creation - -** Refactoring - * [DATAMONGO-383] - Adapt new entity instantiation API from Spring Data Commons - * [DATAMONGO-431] - Adapt changes in CrudRepository - -** Task - * [DATAMONGO-443] - Upgrade to Querydsl 2.5.0 - * [DATAMONGO-394] - Upgrade to Querydsl 2.3.2 - * [DATAMONGO-396] - Release 1.1.0.M1. - * [DATAMONGO-432] - Upgrade to Spring Data Commons 1.3.0.RC1 - * [DATAMONGO-439] - Add performance tests - -Changes in version 1.0.1.RELEASE MongoDB (2012-02-11) ------------------------------------------------------ - -** Bug - * [DATAMONGO-363] - Criteria.and() cannot be chained when using Criteria.gte and Criteria.lte - * [DATAMONGO-364] - Chaining of Criteria when including a GeoSpatial field is inconsistent. - * [DATAMONGO-368] - Empty values in collections are not supported - * [DATAMONGO-369] - Wrong query created when one value is a DBObject - * [DATAMONGO-376] - Fix potential NPE in SpringDataMongodbSerializer - * [DATAMONGO-380] - maps with mongo reserved characters for keys generate on save: java.lang.IllegalArgumentException: fields stored in the db can't have . in them - * [DATAMONGO-387] - Executing query methods with GeoPage results doesn't work - * [DATAMONGO-401] - StringBasedMongoQuery suffers from NullPointerException in case a null parameter gets bound to a parameter placeholder - -** Improvement - * [DATAMONGO-375] - Polish versions of referenced XSD schemas - * [DATAMONGO-379] - Exception when trying to instantiate an entity having a primitive constructor argument and no according document field - * [DATAMONGO-390] - Add Converter for UUID - * [DATAMONGO-397] - MongoRepositoryFactoryBean should refer to MongoOperations instead of MongoTemplate - -** Task - * [DATAMONGO-395] - Release 1.0.1. - - -Changes in version 1.0.0.RELEASE MongoDB (2011-12-22) ------------------------------------------------------ - -** Bug - * [DATAMONGO-260] - MapReduce fails when using with Long as key-type. - * [DATAMONGO-319] - WriteConcern not parsed correctly in namespace handlers - * [DATAMONGO-336] - MongoDB GeoNear returning null pointer exception when giving data more precision than test data - * [DATAMONGO-343] - ServerAddressPropertyEditor disables default Spring conversion - * [DATAMONGO-346] - MongoTemplate.remove(Object arg) not working - * [DATAMONGO-349] - remove doesn't work in RC1 for mongo db - -** Improvement - * [DATAMONGO-139] - Startup behavior should be that MongoTemplate does not eagerly try to connect to MongoDB - * [DATAMONGO-296] - Add hook to use MongoConverter for Querydsl argument handling - * [DATAMONGO-326] - Enums can't be used in Criteria - * [DATAMONGO-341] - Tighten implementation of MongoTemplate's geoNear(...) methods - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-93] - Create integration tests for authentication - * [DATAMONGO-257] - Document TypeMapper abstraction to control how type information is stored and retrieved from documents - * [DATAMONGO-330] - Document classpath scanning for Converters - * [DATAMONGO-350] - Upgrade to latest Querydsl - * [DATAMONGO-355] - Upgrade to Spring 3.0.7 - * [DATAMONGO-357] - Release 1.0 GA - - -Changes in version 1.0.0.RC1 MongoDB (2011-12-6) ------------------------------------------------- - -** Bug - * [DATAMONGO-199] - Synchronisation during performance tests - * [DATAMONGO-298] - Spring custom converters do not work for subclasses of java.lang.Number - * [DATAMONGO-306] - NullPointerException if mongo factory created via URI with out credentials - * [DATAMONGO-309] - POJO containing a List of Maps not persisting properly - * [DATAMONGO-312] - Cannot retrieve persisted Enum implementing an abstract method - * [DATAMONGO-315] - MongoTemplate.findOne(query) methods ignore SortOrder on query - * [DATAMONGO-316] - Replica Set configuration via properties file throws ArrayIndexOutOfBoundsException - * [DATAMONGO-318] - Distinguishing write errors and writes with zero documents affected - * [DATAMONGO-321] - An ID field of type integer is always saved as zero if not set by the user before calling save. Throw exception to indicate an int field will not be autopopulated. - * [DATAMONGO-322] - Throw exception in a save operation if the POJO's ID field is null and field type is not String, BigInteger or ObjectId. - * [DATAMONGO-325] - MongoTemplate fails to correctly report a js file not found on classpath while calling mapReduce - * [DATAMONGO-328] - Fix the import statement in mongodb manifest - * [DATAMONGO-329] - Map value not converted correctly - * [DATAMONGO-333] - AbstractMongoEventListener throws NullPointerException if used without generic parameter - -** Improvement - * [DATAMONGO-26] - Investigate performance of POJO serialization. - * [DATAMONGO-174] - Add additional constructor to MongoTemplate that take com.mongodb.Mongo, database name, user credentials and MongoConverter. - * [DATAMONGO-208] - Add suppoprt for group() operation on collection in MongoOperations - * [DATAMONGO-213] - Provide additional options for setting WriteConcern on a per operation basis - * [DATAMONGO-234] - MongoTemplate should support the findAndModify operation to update version fields - * [DATAMONGO-292] - Several mongo for different database names - * [DATAMONGO-301] - Allow converters to be included through scanning - * [DATAMONGO-305] - Remove synchronized(this) from sort() and fields() methods in the Query class - * [DATAMONGO-310] - Allow Collections as parameters in @Query - * [DATAMONGO-320] - Remove use of slaveOk boolean option in MongoTemplate as it is deprecated. Replace with ReadPreference - * [DATAMONGO-323] - Using @Query and a Sort parameter on the same method should produce sorted results - * [DATAMONGO-324] - Support for JSON in mongo template - * [DATAMONGO-337] - The "nin" and "all" methods on Criteria should take a collection like the "in" method. - * [DATAMONGO-338] - Add query derivation implementations for newly introduced Regex, Exists, True and False keywords - -** New Feature - * [DATAMONGO-185] - Add hint to Query - * [DATAMONGO-251] - Support geting index information on a collection or mapped class. - * [DATAMONGO-308] - Add support for upsert methods - -** Refactoring - * [DATAMONGO-304] - Change package name for Class MongoLog4jAppender - * [DATAMONGO-313] - Use MongoOperations interface instead of MongoTemplate class - -** Task - * [DATAMONGO-195] - Add description of @Field mapping annotation to reference docs - * [DATAMONGO-262] - Ensure Cloud Foundry Runtime works with RC1 - * [DATAMONGO-263] - Ensure Cloud Foundry Examples work with RC1 - * [DATAMONGO-311] - Update MongoDB driver to v 2.7.x - * [DATAMONGO-332] - Update reference documentation to list correct necessary dependencies - * [DATAMONGO-334] - Use repository URLs pointing to Artifactory - * [DATAMONGO-335] - Create hybrid Spring 3.0.6 / 3.1 build - - -Changes in version 1.0.0.M5 MongoDB (2011-10-24) ------------------------------------------------- - -** Bug - * [DATAMONGO-259] - Maps inside collections are not written correctly - * [DATAMONGO-268] - CustomConversions is too liberal in registering "simple types" (asymmetric conversion) - * [DATAMONGO-269] - XML configuration for replica sets is not working - * [DATAMONGO-275] - DBRef fields and collections are returning nulls - * [DATAMONGO-281] - Improve the to handle blank username and password when using property placholders like ${mongo.username} - * [DATAMONGO-282] - Cannot create a "range" query - * [DATAMONGO-284] - Execution of Querydsl query maps id incorrectly - * [DATAMONGO-285] - NPE in MappingMongoConverter.writeMapInternal when saving a Map with val instance of Collection - * [DATAMONGO-288] - querying same property multiple times produces incorrect query - * [DATAMONGO-289] - AbstractMongoEventListener will never call onAfterLoad - * [DATAMONGO-294] - List elements nested in Map lose their type when persisted - -** Improvement - * [DATAMONGO-65] - Allow Spring EL usage in collection name attribute of @Document - * [DATAMONGO-183] - Query count() support for pagination - * [DATAMONGO-258] - M4 documentation states SD Commons 1.1.0.M1 required but actually needs 1.2.0.M1 - * [DATAMONGO-261] - Reference documentation for geoNear queries has no stable section id - * [DATAMONGO-270] - Approach Sonar results to improve code quality - * [DATAMONGO-271] - Remove 'document' from cross-store package names - * [DATAMONGO-272] - Namespace configuration file still resides in 'document' package - * [DATAMONGO-276] - QueryUtils should be public - * [DATAMONGO-280] - Add maxAutoConnectRetryTime for and MongoOptionsFactoryBean - * [DATAMONGO-283] - $and support - * [DATAMONGO-286] - MongoDB Repository no query methods for $lte and $gte - * [DATAMONGO-291] - Path expressions in repository methods should honour mapping metadata - * [DATAMONGO-293] - Add support for new polygon based within search in Mongo 2.0. - * [DATAMONGO-295] - Allow MongoTemplate to be configured using MongoURI - * [DATAMONGO-300] - Re-work the Query/Criteria to better support $and, $or and $nor queries - * [DATAMONGO-302] - Consistently handle null values given to CrudRepository implementation - -** New Feature - * [DATAMONGO-230] - MongoTemplate missing method remove(Object object, String collectionName) - -** Refactoring - * [DATAMONGO-274] - Split up repository package according to the structure in Spring Data JPA - -** Task - * [DATAMONGO-264] - Ensure Data Document examples work - * [DATAMONGO-265] - Create new github repository for mongodb - * [DATAMONGO-266] - Create new github repository for CouchDB - * [DATAMONGO-297] - Prune project directory - * [DATAMONGO-303] - Update to QueryDsl 2.2.4 - - -Changes in version 1.0.0.M4 MongoDB (2011-09-01) ------------------------------------------------- - -** Bug - * [DATADOC-134] - MongoDB: No exception when saving duplicate value to an attribute annotated with @Indexed(unique=true) - * [DATADOC-162] - Exception thrown on toString of Point class - * [DATADOC-167] - @Document annotation is not inherited - * [DATADOC-168] - Registering a custom converter from String to UUID causes all Strings to be converted to UUIDs - * [DATADOC-172] - Unable to force property order when saving document - * [DATADOC-176] - @DBRef annotation only supports ids of type ObjectId - * [DATADOC-177] - Sorting on multiple fields does not maintain order - * [DATADOC-181] - MongoFactoryBean does not call Mongo.close() on shutdown - * [DATADOC-190] - SimpleMongoRepository.exists(…) returns false for existing entities with non-ObjectId id - * [DATADOC-192] - MappingMongoConverter does not read empty Sets correctly - * [DATADOC-199] - Synchronisation during performance tests - * [DATADOC-207] - MappingMongoConverter fails when reading empty java.util.SortedMaps - * [DATADOC-209] - Collections of enums not handled correctly - * [DATADOC-210] - spring-data-mongodb requires Java 1.6, should require 1.5 - * [DATADOC-212] - NPE during MongoTemplate.update() if no ID field is defined (via field name or annotation) - * [DATADOC-217] - Set cannot be used as a collection in a Document - * [DATADOC-218] - Adding of custom simple types is not easy - * [DATADOC-221] - BigDecimal values not read correctly in maps - * [DATADOC-224] - MappingMongoConverter does not inspect value type for Object properties - * [DATADOC-228] - NullPointerException when persiting Map with null values - * [DATADOC-229] - When a parameterized List is used in the PersistentConstructor, conversion fail - * [DATADOC-231] - spring-data-mongodb does not work in an OSGi server because of unresolved dependencies - * [DATADOC-232] - mongodb allow to $inc many fields in one query, but Updat().inc(firs).inc(last) do only last inc - * [DATADOC-235] - Unable to map unstructured data - * [DATADOC-236] - Repository queries do not honour order defined in method name - * [DATADOC-237] - @Indexed annotation doesn't honor field name from @Field annotation - * [DATADOC-240] - Update with id key is not working - * [DATADOC-243] - mongo:mapping-converter schema does not allow db-factory-ref - * [DATADOC-246] - Stack overflow when Update.pushAll(push) - * [DATADOC-247] - QueryMapper does not handle BigInteger ids correctly - * [DATADOC-248] - MongoDB Query and Collection mapping - * [DATADOC-249] - ConcurrentModificationException when calling MongoTemplate.updateFirst - * [DATADOC-254] - SimpleMongoDbFactory should handle dots in database names correctly - -** Improvement - * [DATADOC-32] - SimpleMongoConverter could support identifying Spring EL expressions in keys - * [DATADOC-63] - Converters to support use of a 'typeId' strategy to determine class to marshall/unmarshal from Mongo - * [DATADOC-166] - Check for null if various template CRUD methods - * [DATADOC-169] - Registering custom converters for a type requires treating the type as simple in mapping context - * [DATADOC-171] - IllegalArgumentException when persisting entity with BigDecimal field - * [DATADOC-178] - System.out.println in the in method of Criteria. Line 179. - * [DATADOC-188] - Allow means to disable repository infrastructure creating indexes - * [DATADOC-189] - Improve extensibility of MongoRepositoryFactoryBean - * [DATADOC-215] - Allow configuring WriteConcern via MongoFactoryBean and thus the namespace - * [DATADOC-223] - Registering of customSimpleTypes should be available through the mongo namespace of spring-data-mongodb - * [DATADOC-225] - BasicMongoPersistentEntity shouldn't reject root entities without an id property - * [DATADOC-241] - Allow Map conversion behavior overriding - * [DATADOC-255] - Add to MongoOperations and executeCommand with an additional integer options argument - * [DATADOC-256] - Update to use MongoDB driver version 2.6.5 - -** New Feature - * [DATADOC-7] - Support for map-reduce operations in MongoTemplate - * [DATADOC-64] - Allow defining the collections a query is ran against on finder methods - * [DATADOC-68] - Support for geoNear command - * [DATADOC-87] - Provide @GeoSpatialIndexed annotation that mirrors GeoSpatialIndex class for use in mapping. - * [DATADOC-100] - Provide means to externalize manually defined queries - * [DATADOC-202] - Add a 'DocumentCallbackHandler' so that a callback can process each DBObject returned from a query - * [DATADOC-216] - Allow configuring a WriteConcern on SimpleMongoDbFactory for DB wide WriteConcern - * [DATADOC-226] - Add QuerydslRepositorySupport helper class similar to the one in Spring Data JPA - -** Refactoring - * [DATADOC-170] - Review listener design - * [DATADOC-191] - Remove 'document' from package names - * [DATADOC-214] - MongoConverter refactorings - -** Task - * [DATADOC-91] - Add more TestCases for the basic functionality - * [DATADOC-152] - Investigate failing of test for repository.findbyLocationWithinBox - * [DATADOC-175] - Review DSM matrix and remove package cycles if found. - * [DATADOC-194] - Remove use of Class.forName to support use in an OSGi environment - * [DATADOC-206] - Upgrade to Querydsl 2.2.0 - * [DATADOC-253] - Upgrade to Spring 3.0.6 - - -Changes in version 1.0.0.M3 MongoDB (2011-06-02) ------------------------------------------------- - -General -* [DATADOC-92] - Exception translation to catch RuntimeException instead of MongoException -* [DATADOC-111] - Ensure all MongoTemplate methods execute within the context of a callback method. -* [DATADOC-120] - Remove MongoReaderWriter -* [DATADOC-160] - Review MongoOperations and make the parameter ordering more consistent across methods - -Core Data Access -* [DATADOC-80] - Rename addConverters to setCustomConverters in MappingMongoConverter and SimpleMongoConverter -* [DATADOC-89] - Support setting slaveOk in MongoTemplate per query -* [DATADOC-108] - Add findById method to MongoTemplate. -* [DATADOC-112] - Storing a DBRef uses the wrong collection name -* [DATADOC-116] - Nesting DBRefs doesn't work > 1 layers -* [DATADOC-117] - Remove the default collection name on the MongoTemplate -* [DATADOC-118] - Remove MongoTemplate methods that take a Reader/Writer parameter -* [DATADOC-121] - Deprecate SimpleMongoConverter -* [DATADOC-124] - Add method to MappingContext to get the collection name used for a specific type -* [DATADOC-127] - @Document(collection="SOME_COLLECTION_NAME") doesn't take effect -* [DATADOC-141] - Provide a prepareCollection protected method in MongoTemplate to allow customization of behavior such as slaveOk or writeConcern via a subclass -* [DATADOC-142] - Change constructors in MongoTemplate that take Mongo object -* [DATADOC-143] - MappingMongoConverter should be MongoTemplate's default converter -* [DATADOC-149] - Remove setter for MongoDbFactory from MappingMongoConverter -* [DATADOC-157] - MongoTemplate updateFirst/updateMulti methods to take java.lang.Class parameter as last in method param list to be consistent with other usage -* [DATADOC-158] - Change default conventions of java.lang.Class to collection name in MappingMongoConverter to be first letter lower case camel casing. - -Configuration -* [DATADOC-42] - Provide option for configuring replica sets using the Mongo namespace -* [DATADOC-88] - Create MongoDbFactory to consolidate DB, Server location, and user credentials into one location -* [DATADOC-119] - Modify XML schema and @Configuration base class to make it easy to register custom Spring converters with the mapper. -* [DATADOC-133] - Support Property Placeholder as MongoDB Port Number in Application Context Configuration -* [DATADOC-135] - should use - instead of camel case to be consistent with other attribute names -* [DATADOC-138] - Expose all properties of the MongoOptions class in Mongo namespace - -Querying / Updating -* [DATADOC-43] - Query creator should support Near and Within keyword -* [DATADOC-96] - Query#or(Query) does not work -* [DATADOC-102] - Update does not let you $set across multiple fields -* [DATADOC-106] - Add additional mongo operators to Criteria class -* [DATADOC-107] - Criteria "in" operator should correctly handle passing in a collection instead of an array. -* [DATADOC-113] - NotNull/IsNotNull not implemented in MongoQueryCreator -* [DATADOC-146] - Advanced Regexp Queries - -Mapping -* [DATADOC-95] - Can not save an object that has not had any of its properties set -* [DATADOC-97] - ID replacement not working correctly when using updateFirst/updateMulti -* [DATADOC-98] - Collection or Object[][] doesn't save correctly -* [DATADOC-109] - Add MappingContext to MongoConverter interface -* [DATADOC-110] - Improve implementation of substituteMappedIdIfNecessary -* [DATADOC-101] - Explicit Converters only registered one way. -* [DATADOC-114] - UpdateFirst/Multi operations on MongoTemplate not properly using converter to store objects. -* [DATADOC-122] - Use same default collection name for MappingMongoConverter and SimpleMongoConverter -* [DATADOC-123] - Use the same id/_id mapping for MappingMongoConverter and SimpleMongoConverter -* [DATADOC-128] - Support inheritance with Document mappings -* [DATADOC-130] - Problem with Converters (java.util.Locale) -* [DATADOC-144] - Add an @Key annotation to allow defining the key a property is stored under -* [DATADOC-145] - MappingMongoConverter does not convert objects for collections of interfaces -* [DATADOC-155] - Need to support plain POJOs with non-ObjectId-compatible ID properties -* [DATADOC-156] - MongoOperations.find(query(where("id").in(ids)) fails where ids aren't ObjectIds -* [DATADOC-159] - Saving the same Entity multiple times creates multiple entries in the database -* [DATADOC-161] - MappingMongoConverter now supports nested Maps - -Repository -* [DATADOC-115] - Upgrade to QueryDsl 2.2.0-beta4 -* [DATADOC-137] - Parameter values in MongoDB JSON Query are not being replaced properly - -Documentation -* [DATADOC-99] - Reference documentation shows invalid field spec for @Query usage with repositories - - -Changes in version 1.0.0.M2 MongoDB (2011-04-09) ------------------------------------------------- - -General -* Spring configuration support using Java based @Configuration classes - -Core Data Access -* Persistence and mapping lifecycle events -* GeoSpatial integration -* [DATADOC-76] - Add support for findAndRemove to MongoTemplate/MongoOperations -* [DATADOC-5] - Provide detailed mapping of Mongo errors onto Spring DAO exception -* [DATADOC-51] - Fixed issue with exceptions thrown when authenticating multiple times for sam eDB instance - -Querying -* [DATADOC-72] - Add support for Mongo's $elemMatch and chained Criteria -* [DATADOC-77] - Rename "and" method in Query to "addCritera" -* [DATADOC-67] - Criteria API to support keywords for geo search - -Mapping -* Feature Rich Object Mapping integrated with Spring's Conversion Service -* Annotation based mapping metadata but extensible to support other metadata formats -* [DATADOC-60] - Add namespace support to setup a MappingMongoConverter -* [DATADOC-33] - Introduce annotation to demarcate id field in a domain object - -Repository -* [DATADOC-47, DATACMNS-17] - Adapted new metamodel API -* [DATADOC-46] - Added support for 'In' and 'NotIn' keyword -* [DATADOC-49] - Fixed 'And' and 'Or' keywords -* [DATADOC-41] - Added support for executing QueryDsl predicates -* [DATADOC-69] - Let repository namespace pickup the default mapping context bean and allow configuration -* [DATADOC-24] - Allow use of @Query annotation to define queries -* [DATADOC-34] - Create indexes for columns that are mentioned in query methods - -Cross-Store -* [DATADOC-48] - Cross-store persistance - support for JPA Entities with fields transparently persisted/retrieved using MongoDB - -Logging -* [DATADOC-66] - Log4j log appender - -Changes in version 1.0.0.M1 MongoDB (2011-02-14) ------------------------------------------------- - -General -* BeanFactory for basic configuration of Mongo environment -* Namespace for basic configuration of Mongo environment - -Core Data Access -* Introduce MongoTemplate implementation with methods defined in MongoOperations interface -* MongoTemplate support for insert, find, save, update, remove -* MongoTemplate support for basic POJO serialization based on bean properties -* Allow MongoTemplate methods to use a default collection name -* Exception translation in MongoTemplate to Spring's DAO exception hierarchy -* Support for update modifiers to allow for partial document updates -* Expose WriteConcern settings on MongoTemplate used for any write operations -* Support in MongoTemplate for enabling either logging or throwing exceptions based on value of WriteResult return value. - -Repository -* Introducing generic repository implementation for MongoDB -* Automatic implementation of interface query method names on repositories. -* Namespace support for Mongo repositories -* Allow usage of pagination and sorting with repositories - diff --git a/src/main/resources/license.txt b/src/main/resources/license.txt index 7584e2dfe2..964a55d1c3 100644 --- a/src/main/resources/license.txt +++ b/src/main/resources/license.txt @@ -1,6 +1,6 @@ Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -207,7 +207,7 @@ similar licenses that require the source code and/or modifications to source code to be made available (as would be noted above), you may obtain a copy of the source code corresponding to the binaries for such open source components and modifications thereto, if any, (the "Source Files"), by -downloading the Source Files from http://www.springsource.org/download, +downloading the Source Files from https://www.springsource.org/download, or by sending a request, with your name and address to: VMware, Inc., 3401 Hillview Avenue, Palo Alto, CA 94304, United States of America or email info@vmware.com. All such requests should clearly specify: OPEN SOURCE FILES REQUEST, Attention General diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 1ba8024ff6..52ee00c4f5 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,5 +1,5 @@ -Spring Data MongoDB 1.9 GA -Copyright (c) [2010-2015] Pivotal Software, Inc. +Spring Data MongoDB 4.5 RC1 (2025.0.0) +Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). You may not use this product except in compliance with the License. @@ -8,3 +8,57 @@ This product may include a number of subcomponents with separate copyright notices and license terms. Your use of the source code for the these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE file. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +